text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""
Attributes:
SNAP_WIDTH (int): Description
"""
import logging
import random
from PyQt5.QtCore import (
QLineF,
QPointF,
Qt,
QRectF
)
from PyQt5.QtWidgets import (
QGraphicsItem,
QGraphicsEllipseItem,
QGraphicsSimpleTextItem,
QGraphicsSceneMouseEvent
)
from cadnano.views.pathview import pathstyles
from cadnano.controllers import VirtualHelixItemController
from cadnano.views.abstractitems import AbstractVirtualHelixItem
from cadnano.gui.palette import (
getPenObj,
getBrushObj
)
from . import slicestyles as styles
from .sliceextras import (
WedgeGizmo,
WEDGE_RECT
)
from cadnano.views.sliceview.tools import (
CreateSliceToolT,
SelectSliceToolT
)
from . import (
SliceNucleicAcidPartItemT,
SliceVirtualHelixItemT
)
from cadnano.cntypes import (
KeyT,
ValueT
)
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# set up default, hover, and active drawing styles
_RADIUS = styles.SLICE_HELIX_RADIUS
_RECT = QRectF(0, 0, 2 * _RADIUS, 2 * _RADIUS)
_FONT = styles.SLICE_NUM_FONT
_ZVALUE = styles.ZSLICEHELIX
_BRUSH_DEFAULT = getBrushObj(styles.SLICE_FILL)
_USE_TEXT_BRUSH = getBrushObj(styles.USE_TEXT_COLOR)
_HOVER_PEN = getPenObj('#ffffff', 128)
_HOVER_BRUSH = getBrushObj('#ffffff', alpha=5)
SNAP_WIDTH = 3
class SliceVirtualHelixItem(AbstractVirtualHelixItem, QGraphicsEllipseItem):
"""The VirtualHelixItem is an individual circle that gets drawn in the SliceView
as a child of the NucleicAcidPartItem. Taken as a group, many SliceHelix
instances make up the crossection of the NucleicAcidPart. Clicking on a SliceHelix
adds a VirtualHelix to the PlasmidPart. The SliceHelix then changes appearence
and paints its corresponding VirtualHelix number.
Attributes:
FILTER_NAME (str): Belongs to the filter class 'virtual_helix'.
is_active (bool): Does the item have focus.
old_pen (QPen): temp storage for pen for easy restoration on appearance change.
wedge_gizmos (dict): dictionary of `WedgeGizmo` objects.
"""
FILTER_NAME = 'virtual_helix'
def __init__(self, id_num: int, part_item: SliceNucleicAcidPartItemT):
"""
Args:
id_num: VirtualHelix ID number. See `NucleicAcidPart` for description and related methods.
part_item: the part item
"""
AbstractVirtualHelixItem.__init__(self, id_num, part_item)
QGraphicsEllipseItem.__init__(self, parent=part_item)
self.app_window = part_item.window()
self._controller = VirtualHelixItemController(self, self._model_part, do_wire_part=False, do_wire_strands=True)
self.hide()
self._viewroot = part_item._viewroot
model_part = self._model_part
x, y = model_part.locationQt(self._id_num, part_item.scaleFactor())
# set position to offset for radius
# self.setTransformOriginPoint(_RADIUS, _RADIUS)
self.setCenterPos(x, y)
self.wedge_gizmos = {}
self._added_wedge_gizmos = set()
# self._prexo_gizmos = []
self.setAcceptHoverEvents(True)
self.setZValue(_ZVALUE)
# handle the label specific stuff
self._label = self.createLabel()
self.setNumber()
self.old_pen = None
self.is_active = False
self.updateAppearance()
self.show()
self._right_mouse_move = False
# end def
### ACCESSORS ###
def setSnapOrigin(self, is_snap: bool):
"""Used to toggle an item as the snap origin. See `SelectSliceTool`.
Args:
is_snap (bool): True if this should be the snap origin, False otherwise.
"""
if is_snap:
op = self.pen()
if self.old_pen is None:
self.old_pen = op
self.setPen(getPenObj(op.color().name(), SNAP_WIDTH))
else:
self.setPen(self.old_pen)
self.old_pen = None
def activate(self):
"""Sets the VirtualHelixItem object as active (i.e. having focus due
to user input) and calls updateAppearance.
"""
self.is_active = True
self.updateAppearance()
def deactivate(self):
"""Sets the VirtualHelixItem object as not active (i.e. does not have
focus) and calls updateAppearance.
"""
self.is_active = False
self.updateAppearance()
def setCenterPos(self, x: float, y: float):
"""Moves this item a new position such that its center is located at
(x,y).
Args:
x: new x coordinate
y: new y coordinate
"""
# invert the y axis
part_item = self._part_item
parent_item = self.parentItem()
pos = QPointF(x - _RADIUS, y - _RADIUS)
if parent_item != part_item:
pos = parent_item.mapFromItem(part_item, pos)
self.setPos(pos)
# end def
def getCenterScenePos(self) -> QPointF:
"""
Returns:
the scenePos of the virtualhelixitem center
"""
return self.scenePos() + QPointF(_RADIUS, _RADIUS)
# end def
def partCrossoverSpanAngle(self) -> float:
"""The angle span, centered at each base, within which crossovers
will be allowed by the interface. The span is drawn by the WedgeGizmo.
Returns:
The span angle (default is set in NucleicAcidPart init)
"""
return float(self._model_part.getProperty('crossover_span_angle'))
# end def
### SIGNALS ###
### SLOTS ###
def mousePressEvent(self, event: QGraphicsSceneMouseEvent):
"""Event handler for when the mouse button is pressed inside
this item. If a tool-specific mouse press method is defined, it will be
called for the currently active tool. Otherwise, the default
QGraphicsItem.mousePressEvent will be called.
Note:
Only applies the event if the clicked item is in the part
item's active filter set.
Args:
event (QMouseEvent): contains parameters that describe the mouse event.
"""
if self.FILTER_NAME not in self._part_item.getFilterSet():
self.app_window.showFilterHints(True, filter_name=self.FILTER_NAME)
return
if event.button() == Qt.RightButton:
return
part_item = self._part_item
tool = part_item._getActiveTool()
tool_method_name = tool.methodPrefix() + "MousePress"
if hasattr(self, tool_method_name):
getattr(self, tool_method_name)(tool, part_item, event)
else:
QGraphicsItem.mousePressEvent(self, event)
# end def
def mouseReleaseEvent(self, event: QGraphicsSceneMouseEvent):
self.app_window.showFilterHints(False)
def createToolMousePress(self, tool: CreateSliceToolT,
part_item: SliceNucleicAcidPartItemT,
event: QGraphicsSceneMouseEvent):
shift = event.modifiers() & Qt.ShiftModifier
fwd_ss, rev_ss = self.part().getStrandSets(self._id_num)
idx_high = fwd_ss.length() - 1
parity = self._id_num % 2
c = random.choice(pathstyles.STAP_COLORS) if shift else None
if parity:
if shift:
fwd_ss.createStrand(0, idx_high, color=c)
else:
rev_ss.createStrand(0, idx_high, color=c)
else:
if shift:
rev_ss.createStrand(0, idx_high, color=c)
else:
fwd_ss.createStrand(0, idx_high, color=c)
def selectToolMousePress(self, tool: SelectSliceToolT,
part_item: SliceNucleicAcidPartItemT,
event: QGraphicsSceneMouseEvent):
"""The event handler for when the mouse button is pressed inside this
item with the :class:`SelectSliceTool` active.
Args:
tool: reference to call tool-specific methods
part_item: reference to the part item
event: contains parameters that describe the mouse event
"""
part = self._model_part
part.setSelected(True)
tool.selectOrSnap(part_item, self, event)
# return QGraphicsItem.mousePressEvent(self, event)
# end def
def virtualHelixPropertyChangedSlot(self, keys: KeyT, values: ValueT):
"""The event handler for when the a model virtual helix propety has
changed. See ``partVirtualHelixPropertyChangedSignal``.
Calls :meth:`updateAppearance()`, which will refresh styles if needed.
Args:
keys: names of properties that changed
values: new values of properties that change
"""
# for key, val in zip(keys, values):
# pass
self.updateAppearance()
# end def
def virtualHelixRemovedSlot(self):
"""The event handler for when a virtual helix is removed from the model.
"""
self.destroyItem()
# end def
def destroyItem(self):
"""Disconnects signals, and sets internal references to label, part_item,
and model_part to None, and finally removes the item from the scene.
"""
self._controller.disconnectSignals()
self._controller = None
part_item = self._part_item
tool = part_item._getActiveTool()
if tool.methodPrefix() == "selectTool":
tool.hideLineItem()
self.scene().removeItem(self._label)
self._label = None
self._part_item = None
self._model_part = None
self._viewroot = None
self.scene().removeItem(self)
# end def
def updateAppearance(self):
"""Check item's current visibility, color and active state, and sets
pen, brush, text according to style defaults.
"""
is_visible, color = self._model_part.getVirtualHelixProperties(self._id_num, ['is_visible', 'color'])
if is_visible:
self.show()
else:
self.hide()
return
pwidth = styles.SLICE_HELIX_STROKE_WIDTH if self.old_pen is None else SNAP_WIDTH
if self.is_active:
self._USE_PEN = getPenObj(styles.ACTIVE_STROKE, pwidth)
else:
self._USE_PEN = getPenObj(color, pwidth)
self._TEXT_BRUSH = getBrushObj(styles.SLICE_TEXT_COLOR)
self._BRUSH = _BRUSH_DEFAULT
self._USE_BRUSH = getBrushObj(color, alpha=150)
self._label.setBrush(self._TEXT_BRUSH)
self.setBrush(self._BRUSH)
self.setPen(self._USE_PEN)
self.setRect(_RECT)
# end def
def updatePosition(self):
"""Queries the model virtual helix for latest x,y coodinates, and sets
them in the scene if necessary.
NOTE:
coordinates in the model are always in the part
"""
part_item = self._part_item
# sf = part_item.scaleFactor()
x, y = self._model_part.locationQt(self._id_num, part_item.scaleFactor())
new_pos = QPointF(x - _RADIUS, y - _RADIUS) # top left
tl_pos = part_item.mapFromScene(self.scenePos()) # top left
"""
better to compare QPointF's since it handles difference
tolerances for you with !=
"""
if new_pos != tl_pos:
parent_item = self.parentItem()
if parent_item != part_item:
new_pos = parent_item.mapFromItem(part_item, new_pos)
self.setPos(new_pos)
# end def
def createLabel(self) -> QGraphicsSimpleTextItem:
"""Creates a text label to display the ID number. Font and Z are set
in slicestyles.
Returns:
the label
"""
label = QGraphicsSimpleTextItem("%d" % self.idNum())
label.setFont(_FONT)
label.setZValue(_ZVALUE)
label.setParentItem(self)
return label
# end def
def beginAddWedgeGizmos(self):
"""Resets the list of :class:`WedgeGizmos` that will be processed by
endAddWedgeGizmos.
Called by :meth:`SliceNucleicAcidPartItem._refreshVirtualHelixItemGizmos`,
before :meth:`setWedgeGizmo` and :meth:`endAddWedgeGizmos`.
"""
self._added_wedge_gizmos.clear()
# end def
def endAddWedgeGizmos(self):
"""Removes :meth:`beginAddWedgeGizmos` that no longer point at a
neighbor virtual helix.
Called by :meth:`SliceNucleicAcidPartItem._refreshVirtualHelixItemGizmos`,
before :meth:`setWedgeGizmo` and :meth:`endAddWedgeGizmos`.
"""
remove_list = []
scene = self.scene()
wg_dict = self.wedge_gizmos
recently_added = self._added_wedge_gizmos
for neighbor_virtual_helix in wg_dict.keys():
if neighbor_virtual_helix not in recently_added:
remove_list.append(neighbor_virtual_helix)
for nvh in remove_list:
wg = wg_dict.get(nvh)
del wg_dict[nvh]
scene.removeItem(wg)
# end def
def setWedgeGizmo(self, neighbor_virtual_helix: int,
neighbor_virtual_helix_item: SliceVirtualHelixItemT):
"""Adds a :class:`WedgeGizmo` to oriented toward the specified neighbor vhi.
Called by :meth:`SliceNucleicAcidPartItem._refreshVirtualHelixItemGizmos`,
before :meth:`setWedgeGizmo` and :meth:`endAddWedgeGizmos`.
Args:
neighbor_virtual_helix: the id_num of neighboring virtual helix
neighbor_virtual_helix_item: the neighboring virtual helix item
"""
wg_dict = self.wedge_gizmos
nvhi = neighbor_virtual_helix_item
pos = self.scenePos()
line = QLineF(pos, nvhi.scenePos())
line.translate(_RADIUS, _RADIUS)
if line.length() > (_RADIUS*1.99):
color = '#5a8bff'
else:
color = '#cc0000'
line.setLength(_RADIUS)
if neighbor_virtual_helix in wg_dict:
wedge_item = wg_dict[neighbor_virtual_helix]
else:
wedge_item = WedgeGizmo(_RADIUS, WEDGE_RECT, self)
wg_dict[neighbor_virtual_helix] = wedge_item
wedge_item.showWedge(line.angle(), color, outline_only=False)
self._added_wedge_gizmos.add(neighbor_virtual_helix)
# end def
def setNumber(self):
"""Updates the associated :class:`QGraphicsSimpleTextItem` label text
to match the id_num. Adjusts the label position so it is centered
regardless of number of digits in the label.
"""
num = self.idNum()
label = self._label
if num is not None:
label.setText("%d" % num)
else:
return
y_val = _RADIUS / 3
if num < 10:
label.setPos(_RADIUS / 1.5, y_val)
elif num < 100:
label.setPos(_RADIUS / 3, y_val)
else: # _number >= 100
label.setPos(0, y_val)
b_rect = label.boundingRect()
posx = b_rect.width()/2
posy = b_rect.height()/2
label.setPos(_RADIUS-posx, _RADIUS-posy)
# end def
# end class
|
{
"content_hash": "d049043cc9a00e3ee6dc8f8f54f7ae4a",
"timestamp": "",
"source": "github",
"line_count": 448,
"max_line_length": 119,
"avg_line_length": 33.921875,
"alnum_prop": 0.6120286898730013,
"repo_name": "scholer/cadnano2.5",
"id": "dcfcdd27eaa7dc56f042ba5aba4286a7084301ed",
"size": "15221",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cadnano/views/sliceview/virtualhelixitem.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2617"
},
{
"name": "Python",
"bytes": "1624263"
},
{
"name": "QMake",
"bytes": "3719"
}
],
"symlink_target": ""
}
|
__author__ = """Chris Tabor (dxdstudio@gmail.com)"""
if __name__ == '__main__':
from os import getcwd
from os import sys
sys.path.append(getcwd())
from MOAL.helpers.display import Section
from MOAL.helpers.display import print_h4
from MOAL.helpers.display import annotate
from MOAL.helpers.display import print_simple
from MOAL.helpers.datamaker import random_dna
DEBUG = True if __name__ == '__main__' else False
@annotate
def dictify(arg):
"""Convert arg to a dict.
Args:
arg (mixed) - the thing to convert to a dictionary.
"""
return {'arg': arg}
@annotate
def listify(arg):
"""Convert arg to a list.
Args:
arg (mixed) - the thing to convert to a list.
"""
return [arg]
def fooify(thing):
return 'FOO_{}'.format(thing)
def bazify(thing):
return '{}_BAZ'.format(thing)
def prefix(thing, prefix='_:'):
return '{}{}'.format(prefix, thing)
def delimit(thing, delimiter=','):
return '{}{}'.format(thing, delimiter)
def compose_hybrid(arg, **kwargs):
f = kwargs.get('f')
g = kwargs.get('g')
return f(g(arg))
def compose_hybrid_hof(**kwargs):
f = kwargs.get('f')
g = kwargs.get('g')
def _compose(*args, **kwargs):
return f(g(*args, **kwargs))
return _compose
def id(thing):
"""Identity function.
Args:
thing (mixed) - the thing to return.
"""
return thing
"""Closer to canonical functions."""
def f(a):
return a ** a
def g(b):
return b * 2
def f_g(a, b, arg):
return a(b(arg))
if DEBUG:
with Section('Category Theory basics'):
"""Challenge ideas taken from bartoszmilewski.com/2014/11/04
/category-the-essence-of-composition/ "Challenges" section."""
print_h4('Identity function')
for thing in ['cat', 1, 'dog', 2, range(0, 3), 0.03]:
assert thing == id(thing)
print(thing, id(thing))
print_h4('Function composition')
res = compose_hybrid('composition is neat!', f=dictify, g=listify)
print(res)
print_h4('Random funcs')
print(listify('foo'))
print(dictify('foo'))
print_h4('Higher order function composition')
f2 = compose_hybrid_hof(f=listify, g=dictify)
print(f2('composition yay!'))
print_h4('Function composition on a "stream" or incoming set')
res = [compose_hybrid(str(x), f=fooify, g=bazify) for x in range(4)]
print(res)
print_h4('Messing around...')
res = ''.join(res)
for x in range(4):
res += compose_hybrid(x, f=fooify, g=bazify)
print(res)
res = ''
for x in range(10):
# Just to make things interesting...
res += compose_hybrid(random_dna(), f=delimit, g=prefix)
print(res)
composed = f_g(f, g, 4)
print_simple('Traditional composed example:', composed, newline=False)
|
{
"content_hash": "daf14134391cb19456725474a30ec27f",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 78,
"avg_line_length": 23.456,
"alnum_prop": 0.5890177353342428,
"repo_name": "christabor/MoAL",
"id": "a86abd8ace147679331418176e4dafccb9514ac5",
"size": "2957",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MOAL/maths/category_theory/cat_basics.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1102"
},
{
"name": "Clojure",
"bytes": "1089"
},
{
"name": "Gherkin",
"bytes": "793"
},
{
"name": "HTML",
"bytes": "3579"
},
{
"name": "JavaScript",
"bytes": "1647"
},
{
"name": "Makefile",
"bytes": "1436"
},
{
"name": "PLSQL",
"bytes": "415"
},
{
"name": "Python",
"bytes": "692840"
},
{
"name": "Shell",
"bytes": "4420"
},
{
"name": "TSQL",
"bytes": "1090"
}
],
"symlink_target": ""
}
|
"""Support for Zigbee Home Automation devices."""
import asyncio
import logging
import voluptuous as vol
from zhaquirks import setup as setup_quirks
from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH
from homeassistant import config_entries, const as ha_const
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.typing import HomeAssistantType
from . import api
from .core import ZHAGateway
from .core.const import (
BAUD_RATES,
CONF_BAUDRATE,
CONF_DATABASE,
CONF_DEVICE_CONFIG,
CONF_ENABLE_QUIRKS,
CONF_RADIO_TYPE,
CONF_USB_PATH,
CONF_ZIGPY,
DATA_ZHA,
DATA_ZHA_CONFIG,
DATA_ZHA_DISPATCHERS,
DATA_ZHA_GATEWAY,
DATA_ZHA_PLATFORM_LOADED,
DOMAIN,
PLATFORMS,
SIGNAL_ADD_ENTITIES,
RadioType,
)
from .core.discovery import GROUP_PROBE
DEVICE_CONFIG_SCHEMA_ENTRY = vol.Schema({vol.Optional(ha_const.CONF_TYPE): cv.string})
ZHA_CONFIG_SCHEMA = {
vol.Optional(CONF_BAUDRATE): cv.positive_int,
vol.Optional(CONF_DATABASE): cv.string,
vol.Optional(CONF_DEVICE_CONFIG, default={}): vol.Schema(
{cv.string: DEVICE_CONFIG_SCHEMA_ENTRY}
),
vol.Optional(CONF_ENABLE_QUIRKS, default=True): cv.boolean,
vol.Optional(CONF_ZIGPY): dict,
vol.Optional(CONF_RADIO_TYPE): cv.enum(RadioType),
vol.Optional(CONF_USB_PATH): cv.string,
}
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
vol.All(
cv.deprecated(CONF_USB_PATH),
cv.deprecated(CONF_BAUDRATE),
cv.deprecated(CONF_RADIO_TYPE),
ZHA_CONFIG_SCHEMA,
),
),
},
extra=vol.ALLOW_EXTRA,
)
# Zigbee definitions
CENTICELSIUS = "C-100"
# Internal definitions
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass, config):
"""Set up ZHA from config."""
hass.data[DATA_ZHA] = {}
if DOMAIN in config:
conf = config[DOMAIN]
hass.data[DATA_ZHA][DATA_ZHA_CONFIG] = conf
return True
async def async_setup_entry(hass, config_entry):
"""Set up ZHA.
Will automatically load components to support devices found on the network.
"""
zha_data = hass.data.setdefault(DATA_ZHA, {})
config = zha_data.get(DATA_ZHA_CONFIG, {})
for platform in PLATFORMS:
zha_data.setdefault(platform, [])
if config.get(CONF_ENABLE_QUIRKS, True):
setup_quirks(config)
zha_gateway = ZHAGateway(hass, config, config_entry)
await zha_gateway.async_initialize()
zha_data[DATA_ZHA_DISPATCHERS] = []
zha_data[DATA_ZHA_PLATFORM_LOADED] = []
for platform in PLATFORMS:
coro = hass.config_entries.async_forward_entry_setup(config_entry, platform)
zha_data[DATA_ZHA_PLATFORM_LOADED].append(hass.async_create_task(coro))
device_registry = await hass.helpers.device_registry.async_get_registry()
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(CONNECTION_ZIGBEE, str(zha_gateway.application_controller.ieee))},
identifiers={(DOMAIN, str(zha_gateway.application_controller.ieee))},
name="Zigbee Coordinator",
manufacturer="ZHA",
model=zha_gateway.radio_description,
)
api.async_load_api(hass)
async def async_zha_shutdown(event):
"""Handle shutdown tasks."""
await zha_data[DATA_ZHA_GATEWAY].shutdown()
await zha_data[DATA_ZHA_GATEWAY].async_update_device_storage()
hass.bus.async_listen_once(ha_const.EVENT_HOMEASSISTANT_STOP, async_zha_shutdown)
asyncio.create_task(async_load_entities(hass))
return True
async def async_unload_entry(hass, config_entry):
"""Unload ZHA config entry."""
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].shutdown()
GROUP_PROBE.cleanup()
api.async_unload_api(hass)
dispatchers = hass.data[DATA_ZHA].get(DATA_ZHA_DISPATCHERS, [])
for unsub_dispatcher in dispatchers:
unsub_dispatcher()
for platform in PLATFORMS:
await hass.config_entries.async_forward_entry_unload(config_entry, platform)
return True
async def async_load_entities(hass: HomeAssistantType) -> None:
"""Load entities after integration was setup."""
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].async_initialize_devices_and_entities()
to_setup = hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED]
results = await asyncio.gather(*to_setup, return_exceptions=True)
for res in results:
if isinstance(res, Exception):
_LOGGER.warning("Couldn't setup zha platform: %s", res)
async_dispatcher_send(hass, SIGNAL_ADD_ENTITIES)
async def async_migrate_entry(
hass: HomeAssistantType, config_entry: config_entries.ConfigEntry
):
"""Migrate old entry."""
_LOGGER.debug("Migrating from version %s", config_entry.version)
if config_entry.version == 1:
data = {
CONF_RADIO_TYPE: config_entry.data[CONF_RADIO_TYPE],
CONF_DEVICE: {CONF_DEVICE_PATH: config_entry.data[CONF_USB_PATH]},
}
baudrate = hass.data[DATA_ZHA].get(DATA_ZHA_CONFIG, {}).get(CONF_BAUDRATE)
if data[CONF_RADIO_TYPE] != RadioType.deconz and baudrate in BAUD_RATES:
data[CONF_DEVICE][CONF_BAUDRATE] = baudrate
config_entry.version = 2
hass.config_entries.async_update_entry(config_entry, data=data)
_LOGGER.info("Migration to version %s successful", config_entry.version)
return True
|
{
"content_hash": "b2fd90eb79f85c447040853c7db3160c",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 88,
"avg_line_length": 31.593220338983052,
"alnum_prop": 0.6797210300429185,
"repo_name": "adrienbrault/home-assistant",
"id": "707e0292c4516659ce946f0463d44167f3cb3bd5",
"size": "5592",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/zha/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "32021043"
},
{
"name": "Shell",
"bytes": "4900"
}
],
"symlink_target": ""
}
|
import salt.config
_MINION_CONFIG_FILE = '/etc/salt/minion'
minion_config = salt.config.minion_config(_MINION_CONFIG_FILE)
print 'Master: {0}'.format(minion_config.get('master'))
|
{
"content_hash": "3c8dca48fade3e7b7ec9cb15b08db22d",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 62,
"avg_line_length": 26,
"alnum_prop": 0.7362637362637363,
"repo_name": "craig5/salt-essentials-utils",
"id": "07a4f9cd7d139faebdf9bd21c2e010f39208a46c",
"size": "205",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "example-data/extend-2/5-runner/scripts/show-master.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "3460"
},
{
"name": "Makefile",
"bytes": "1683"
},
{
"name": "Python",
"bytes": "67480"
},
{
"name": "Ruby",
"bytes": "1213"
},
{
"name": "SaltStack",
"bytes": "61788"
},
{
"name": "Scheme",
"bytes": "499"
},
{
"name": "Shell",
"bytes": "20303"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import datetime
import itertools
import os
import re
from importlib import import_module
from django.apps import apps
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.contrib.auth import REDIRECT_FIELD_NAME, SESSION_KEY
from django.contrib.auth.forms import (
AuthenticationForm, PasswordChangeForm, SetPasswordForm,
)
from django.contrib.auth.models import User
from django.contrib.auth.tests.custom_user import CustomUser
from django.contrib.auth.views import login as login_view, redirect_to_login
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sites.requests import RequestSite
from django.core import mail
from django.db import connection
from django.http import HttpRequest, QueryDict
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.test import TestCase, override_settings
from django.test.utils import patch_logger
from django.urls import NoReverseMatch, reverse, reverse_lazy
from django.utils.encoding import force_text
from django.utils.http import urlquote
from django.utils.six.moves.urllib.parse import ParseResult, urlparse
from django.utils.translation import LANGUAGE_SESSION_KEY
from .models import UUIDUser
from .settings import AUTH_TEMPLATES
@override_settings(
LANGUAGES=[
('en', 'English'),
],
LANGUAGE_CODE='en',
TEMPLATES=AUTH_TEMPLATES,
USE_TZ=False,
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='auth_tests.urls',
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='testclient',
first_name='Test', last_name='Client', email='testclient@example.com', is_staff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u2 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='inactive',
first_name='Inactive', last_name='User', email='testclient2@example.com', is_staff=False, is_active=False,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u3 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='staff',
first_name='Staff', last_name='Member', email='staffmember@example.com', is_staff=True, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u4 = User.objects.create(
password='', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False,
username='empty_password', first_name='Empty', last_name='Password', email='empty_password@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u5 = User.objects.create(
password='$', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False,
username='unmanageable_password', first_name='Unmanageable', last_name='Password',
email='unmanageable_password@example.com', is_staff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u6 = User.objects.create(
password='foo$bar', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False,
username='unknown_password', first_name='Unknown', last_name='Password',
email='unknown_password@example.com', is_staff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
def login(self, username='testclient', password='password'):
response = self.client.post('/login/', {
'username': username,
'password': password,
})
self.assertIn(SESSION_KEY, self.client.session)
return response
def logout(self):
response = self.client.get('/admin/logout/')
self.assertEqual(response.status_code, 200)
self.assertNotIn(SESSION_KEY, self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context['form'].errors.values()))
self.assertIn(force_text(error), form_errors)
def assertURLEqual(self, url, expected, parse_qs=False):
"""
Given two URLs, make sure all their components (the ones given by
urlparse) are equal, only comparing components that are present in both
URLs.
If `parse_qs` is True, then the querystrings are parsed with QueryDict.
This is useful if you don't want the order of parameters to matter.
Otherwise, the query strings are compared as-is.
"""
fields = ParseResult._fields
for attr, x, y in zip(fields, urlparse(url), urlparse(expected)):
if parse_qs and attr == 'query':
x, y = QueryDict(x), QueryDict(y)
if x and y and x != y:
self.fail("%r != %r (%s doesn't match)" % (url, expected, attr))
@override_settings(ROOT_URLCONF='django.contrib.auth.urls')
class AuthViewNamedURLTests(AuthViewsTestCase):
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb64': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure original,
# default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_extra_email_context(self):
"""
extra_email_context should be available in the email template context.
"""
response = self.client.post(
'/password_reset_extra_email_context/',
{'email': 'staffmember@example.com'},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Email email context: "Hello!"', mail.outbox[0].body)
def test_html_mail_template(self):
"""
A multipart email with text/plain and text/html is sent
if the html_email_template parameter is passed to the view
"""
response = self.client.post('/password_reset/html_email_template/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
self.assertNotIn('<html>', message.get_payload(0).get_payload())
self.assertIn('<html>', message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
response = self.client.post(
'/password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
response = self.client.post(
'/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
def _test_confirm_start(self):
# Start by creating the email
self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existent user, not a 404
response = self.client.get('/reset/123456/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
# Check the password has not been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'x'})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_reset_redirect_default(self):
response = self.client.post('/password_reset/',
{'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/done/')
def test_reset_custom_redirect(self):
response = self.client.post('/password_reset/custom_redirect/',
{'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_reset_custom_redirect_named(self):
response = self.client.post('/password_reset/custom_redirect/named/',
{'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
def test_confirm_redirect_default(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/reset/done/')
def test_confirm_redirect_custom(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/')
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_confirm_redirect_custom_named(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/named/')
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
def test_confirm_display_user_from_form(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# #16919 -- The ``password_reset_confirm`` view should pass the user
# object to the ``SetPasswordForm``, even on GET requests.
# For this test, we render ``{{ form.user }}`` in the template
# ``registration/password_reset_confirm.html`` so that we can test this.
username = User.objects.get(email='staffmember@example.com').username
self.assertContains(response, "Hello, %s." % username)
# However, the view should NOT pass any user object on a form if the
# password reset link was invalid.
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "Hello, .")
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CustomUserPasswordResetTest(AuthViewsTestCase):
user_email = 'staffmember@example.com'
@classmethod
def setUpTestData(cls):
cls.u1 = CustomUser.custom_objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), email='staffmember@example.com', is_active=True,
is_admin=False, date_of_birth=datetime.date(1976, 11, 8)
)
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': self.user_email})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
# then submit a new password
response = self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': 'anewpassword',
})
self.assertRedirects(response, '/reset/done/')
@override_settings(AUTH_USER_MODEL='auth_tests.UUIDUser')
class UUIDUserPasswordResetTest(CustomUserPasswordResetTest):
def _test_confirm_start(self):
# instead of fixture
UUIDUser.objects.create_user(
email=self.user_email,
username='foo',
password='foo',
)
return super(UUIDUserPasswordResetTest, self)._test_confirm_start()
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
})
def logout(self):
self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertFormError(response, PasswordChangeForm.error_messages['password_incorrect'])
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_password_change_succeeds(self):
self.login()
self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_change/done/')
@override_settings(LOGIN_URL='/login/')
def test_password_change_done_fails(self):
response = self.client.get('/password_change/done/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/?next=/password_change/done/')
def test_password_change_redirect_default(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_change/done/')
def test_password_change_redirect_custom(self):
self.login()
response = self.client.post('/password_change/custom/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_password_change_redirect_custom_named(self):
self.login()
response = self.client.post('/password_change/custom/named/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
class SessionAuthenticationTests(AuthViewsTestCase):
def test_user_password_change_updates_session(self):
"""
#21649 - Ensure contrib.auth.views.password_change updates the user's
session auth hash after a password change so the session isn't logged out.
"""
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
# if the hash isn't updated, retrieving the redirection page will fail.
self.assertRedirects(response, '/password_change/done/')
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('login'))
self.assertEqual(response.status_code, 200)
if apps.is_installed('django.contrib.sites'):
Site = apps.get_model('sites.Site')
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertIsInstance(response.context['form'], AuthenticationForm)
def test_security_check(self, password='password'):
login_url = reverse('login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'http:///example.com',
'https://example.com',
'ftp://example.com',
'///example.com',
'//example.com',
'javascript:alert("XSS")'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertNotIn(bad_url, response.url,
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://example.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
def test_login_form_contains_request(self):
# 15198
self.client.post('/custom_requestauth_login/', {
'username': 'testclient',
'password': 'password',
}, follow=True)
# the custom authentication form used by this login asserts
# that a request is passed to the form successfully.
def test_login_csrf_rotate(self, password='password'):
"""
Makes sure that a login rotates the currently-used CSRF token.
"""
# Do a GET to establish a CSRF token
# TestClient isn't used here as we're testing middleware, essentially.
req = HttpRequest()
CsrfViewMiddleware().process_view(req, login_view, (), {})
# get_token() triggers CSRF token inclusion in the response
get_token(req)
resp = login_view(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token1 = csrf_cookie.coded_value
# Prepare the POST request
req = HttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token1
req.method = "POST"
req.POST = {'username': 'testclient', 'password': password, 'csrfmiddlewaretoken': token1}
# Use POST request to log in
SessionMiddleware().process_request(req)
CsrfViewMiddleware().process_view(req, login_view, (), {})
req.META["SERVER_NAME"] = "testserver" # Required to have redirect work in login view
req.META["SERVER_PORT"] = 80
resp = login_view(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token2 = csrf_cookie.coded_value
# Check the CSRF token switched
self.assertNotEqual(token1, token2)
def test_session_key_flushed_on_login(self):
"""
To avoid reusing another user's session, ensure a new, empty session is
created if the existing session corresponds to a different authenticated
user.
"""
self.login()
original_session_key = self.client.session.session_key
self.login(username='staff')
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_session_key_flushed_on_login_after_password_change(self):
"""
As above, but same user logging in after a password change.
"""
self.login()
original_session_key = self.client.session.session_key
# If no password change, session key should not be flushed.
self.login()
self.assertEqual(original_session_key, self.client.session.session_key)
user = User.objects.get(username='testclient')
user.set_password('foobar')
user.save()
self.login(password='foobar')
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_session_without_hash_session_key(self):
"""
Session without django.contrib.auth.HASH_SESSION_KEY should login
without an exception.
"""
user = User.objects.get(username='testclient')
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[SESSION_KEY] = user.id
session.save()
original_session_key = session.session_key
self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key
self.login()
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginURLSettings(AuthViewsTestCase):
"""Tests for settings.LOGIN_URL."""
def assertLoginURLEquals(self, url, parse_qs=False):
response = self.client.get('/login_required/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, url, parse_qs=parse_qs)
@override_settings(LOGIN_URL='/login/')
def test_standard_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='login')
def test_named_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login')
def test_remote_login_url(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'http://remote.example.com/login?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='https:///login/')
def test_https_login_url(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'https:///login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='/login/?pretty=1')
def test_login_url_with_querystring(self):
self.assertLoginURLEquals('/login/?pretty=1&next=/login_required/', parse_qs=True)
@override_settings(LOGIN_URL='http://remote.example.com/login/?next=/default/')
def test_remote_login_url_with_next_querystring(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'http://remote.example.com/login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_lazy_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
class LoginRedirectUrlTest(AuthViewsTestCase):
"""Tests for settings.LOGIN_REDIRECT_URL."""
def assertLoginRedirectURLEqual(self, url):
response = self.login()
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, url)
def test_default(self):
self.assertLoginRedirectURLEqual('/accounts/profile/')
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def test_custom(self):
self.assertLoginRedirectURLEqual('/custom/')
@override_settings(LOGIN_REDIRECT_URL='password_reset')
def test_named(self):
self.assertLoginRedirectURLEqual('/password_reset/')
@override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/')
def test_remote(self):
self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/')
class RedirectToLoginTests(AuthViewsTestCase):
"""Tests for the redirect_to_login view"""
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_redirect_to_login_with_lazy(self):
login_redirect_response = redirect_to_login(next='/else/where/')
expected = '/login/?next=/else/where/'
self.assertEqual(expected, login_redirect_response.url)
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_redirect_to_login_with_lazy_and_unicode(self):
login_redirect_response = redirect_to_login(next='/else/where/झ/')
expected = '/login/?next=/else/where/%E0%A4%9D/'
self.assertEqual(expected, login_redirect_response.url)
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertIn('site', response.context)
def test_logout_doesnt_cache(self):
"""
The logout() view should send "no-cache" headers for reasons described
in #25490.
"""
response = self.client.get('/logout/')
self.assertIn('no-store', response['Cache-Control'])
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
response = self.client.get('/logout/next_page/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/')
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/')
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
self.confirm_logged_out()
def test_logout_with_named_redirect(self):
"Logout resolves names or URLs passed as next_page."
self.login()
response = self.client.get('/logout/next_page/named/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
self.confirm_logged_out()
def test_security_check(self, password='password'):
logout_url = reverse('logout')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'http:///example.com',
'https://example.com',
'ftp://example.com',
'///example.com',
'//example.com',
'javascript:alert("XSS")'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertNotIn(bad_url, response.url,
"%s should be blocked" % bad_url)
self.confirm_logged_out()
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://example.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
self.confirm_logged_out()
def test_logout_preserve_language(self):
"""Check that language stored in session is preserved after logout"""
# Create a new session with language
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[LANGUAGE_SESSION_KEY] = 'pl'
session.save()
self.client.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
self.client.get('/logout/')
self.assertEqual(self.client.session[LANGUAGE_SESSION_KEY], 'pl')
# Redirect in test_user_change_password will fail if session auth hash
# isn't updated after password change (#21649)
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='auth_tests.urls_admin',
)
class ChangelistTests(AuthViewsTestCase):
def setUp(self):
# Make me a superuser before logging in.
User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True)
self.login()
self.admin = User.objects.get(pk=self.u1.pk)
def get_user_data(self, user):
return {
'username': user.username,
'password': user.password,
'email': user.email,
'is_active': user.is_active,
'is_staff': user.is_staff,
'is_superuser': user.is_superuser,
'last_login_0': user.last_login.strftime('%Y-%m-%d'),
'last_login_1': user.last_login.strftime('%H:%M:%S'),
'initial-last_login_0': user.last_login.strftime('%Y-%m-%d'),
'initial-last_login_1': user.last_login.strftime('%H:%M:%S'),
'date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'initial-date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'initial-date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'first_name': user.first_name,
'last_name': user.last_name,
}
# #20078 - users shouldn't be allowed to guess password hashes via
# repeated password__startswith queries.
def test_changelist_disallows_password_lookups(self):
# A lookup that tries to filter on password isn't OK
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as logger_calls:
response = self.client.get(reverse('auth_test_admin:auth_user_changelist') + '?password__startswith=sha1$')
self.assertEqual(response.status_code, 400)
self.assertEqual(len(logger_calls), 1)
def test_user_change_email(self):
data = self.get_user_data(self.admin)
data['email'] = 'new_' + data['email']
response = self.client.post(
reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)),
data
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
row = LogEntry.objects.latest('id')
self.assertEqual(row.get_change_message(), 'Changed email.')
def test_user_not_change(self):
response = self.client.post(
reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)),
self.get_user_data(self.admin)
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
row = LogEntry.objects.latest('id')
self.assertEqual(row.get_change_message(), 'No fields changed.')
def test_user_change_password(self):
user_change_url = reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,))
password_change_url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,))
response = self.client.get(user_change_url)
# Test the link inside password field help_text.
rel_link = re.search(
r'you can change the password using <a href="([^"]*)">this form</a>',
force_text(response.content)
).groups()[0]
self.assertEqual(
os.path.normpath(user_change_url + rel_link),
os.path.normpath(password_change_url)
)
response = self.client.post(
password_change_url,
{
'password1': 'password1',
'password2': 'password1',
}
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest('id')
self.assertEqual(row.get_change_message(), 'Changed password.')
self.logout()
self.login(password='password1')
def test_user_change_different_user_password(self):
u = User.objects.get(email='staffmember@example.com')
response = self.client.post(
reverse('auth_test_admin:auth_user_password_change', args=(u.pk,)),
{
'password1': 'password1',
'password2': 'password1',
}
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_change', args=(u.pk,)))
row = LogEntry.objects.latest('id')
self.assertEqual(row.user_id, self.admin.pk)
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), 'Changed password.')
def test_password_change_bad_url(self):
response = self.client.get(reverse('auth_test_admin:auth_user_password_change', args=('foobar',)))
self.assertEqual(response.status_code, 404)
@override_settings(
AUTH_USER_MODEL='auth_tests.UUIDUser',
ROOT_URLCONF='auth_tests.urls_custom_user_admin',
)
class UUIDUserTests(TestCase):
def test_admin_password_change(self):
u = UUIDUser.objects.create_superuser(username='uuid', email='foo@bar.com', password='test')
self.assertTrue(self.client.login(username='uuid', password='test'))
user_change_url = reverse('custom_user_admin:auth_tests_uuiduser_change', args=(u.pk,))
response = self.client.get(user_change_url)
self.assertEqual(response.status_code, 200)
password_change_url = reverse('custom_user_admin:auth_user_password_change', args=(u.pk,))
response = self.client.get(password_change_url)
self.assertEqual(response.status_code, 200)
# A LogEntry is created with pk=1 which breaks a FK constraint on MySQL
with connection.constraint_checks_disabled():
response = self.client.post(password_change_url, {
'password1': 'password1',
'password2': 'password1',
})
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest('id')
self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change()
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), 'Changed password.')
|
{
"content_hash": "22dc4547415a933dd42714ba032f0d1c",
"timestamp": "",
"source": "github",
"line_count": 1020,
"max_line_length": 119,
"avg_line_length": 43.430392156862744,
"alnum_prop": 0.6206460642452426,
"repo_name": "AltSchool/django",
"id": "f3593d46c5be3f3b97f463f725fd24558fab9a5c",
"size": "44325",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tests/auth_tests/test_views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "52334"
},
{
"name": "HTML",
"bytes": "170440"
},
{
"name": "JavaScript",
"bytes": "256027"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "11448592"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
}
|
"""
Sample application that demonstrates various aspects of App Engine's request
handling.
"""
import os
import time
import webapp2
# [START request_timer]
class TimerHandler(webapp2.RequestHandler):
def get(self):
from google.appengine.runtime import DeadlineExceededError
try:
time.sleep(70)
self.response.write('Completed.')
except DeadlineExceededError:
self.response.clear()
self.response.set_status(500)
self.response.out.write(
'The request did not complete in time.')
# [END request_timer]
# [START environment]
class PrintEnvironmentHandler(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
for key, value in os.environ.iteritems():
self.response.out.write(
"{} = {}\n".format(key, value))
# [END environment]
# [START request_ids]
class RequestIdHandler(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
request_id = os.environ.get('REQUEST_LOG_ID')
self.response.write(
'REQUEST_LOG_ID={}'.format(request_id))
# [END request_ids]
app = webapp2.WSGIApplication([
('/timer', TimerHandler),
('/environment', PrintEnvironmentHandler),
('/requestid', RequestIdHandler)
], debug=True)
|
{
"content_hash": "8fb15eeb433fe0ab2c7596353f0b53c7",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 76,
"avg_line_length": 26.75,
"alnum_prop": 0.6441409058231489,
"repo_name": "clarko1/Cramd",
"id": "5064b5b937e762cc892e623c09c0ca0c163c2215",
"size": "1988",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "appengine/standard/requests/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2924"
},
{
"name": "HTML",
"bytes": "23592"
},
{
"name": "JavaScript",
"bytes": "11222"
},
{
"name": "Makefile",
"bytes": "881"
},
{
"name": "Protocol Buffer",
"bytes": "8810"
},
{
"name": "Python",
"bytes": "1055640"
},
{
"name": "Shell",
"bytes": "8344"
}
],
"symlink_target": ""
}
|
"""Extracts features for different models."""
import functools
import tensorflow as tf
from deeplab.core import resnet_v1_beta
from deeplab.core import xception
from tensorflow.contrib.slim.nets import resnet_utils
from nets.mobilenet import mobilenet_v2
slim = tf.contrib.slim
# Default end point for MobileNetv2.
_MOBILENET_V2_FINAL_ENDPOINT = 'layer_18'
def _mobilenet_v2(net,
depth_multiplier,
output_stride,
reuse=None,
scope=None,
final_endpoint=None):
"""Auxiliary function to add support for 'reuse' to mobilenet_v2.
Args:
net: Input tensor of shape [batch_size, height, width, channels].
depth_multiplier: Float multiplier for the depth (number of channels)
for all convolution ops. The value must be greater than zero. Typical
usage will be to set this value in (0, 1) to reduce the number of
parameters or computation cost of the model.
output_stride: An integer that specifies the requested ratio of input to
output spatial resolution. If not None, then we invoke atrous convolution
if necessary to prevent the network from reducing the spatial resolution
of the activation maps. Allowed values are 8 (accurate fully convolutional
mode), 16 (fast fully convolutional mode), 32 (classification mode).
reuse: Reuse model variables.
scope: Optional variable scope.
final_endpoint: The endpoint to construct the network up to.
Returns:
Features extracted by MobileNetv2.
"""
with tf.variable_scope(
scope, 'MobilenetV2', [net], reuse=reuse) as scope:
return mobilenet_v2.mobilenet_base(
net,
conv_defs=mobilenet_v2.V2_DEF,
depth_multiplier=depth_multiplier,
min_depth=8 if depth_multiplier == 1.0 else 1,
divisible_by=8 if depth_multiplier == 1.0 else 1,
final_endpoint=final_endpoint or _MOBILENET_V2_FINAL_ENDPOINT,
output_stride=output_stride,
scope=scope)
# A map from network name to network function.
networks_map = {
'mobilenet_v2': _mobilenet_v2,
'resnet_v1_50': resnet_v1_beta.resnet_v1_50,
'resnet_v1_50_beta': resnet_v1_beta.resnet_v1_50_beta,
'resnet_v1_101': resnet_v1_beta.resnet_v1_101,
'resnet_v1_101_beta': resnet_v1_beta.resnet_v1_101_beta,
'xception_41': xception.xception_41,
'xception_65': xception.xception_65,
'xception_71': xception.xception_71,
}
# A map from network name to network arg scope.
arg_scopes_map = {
'mobilenet_v2': mobilenet_v2.training_scope,
'resnet_v1_50': resnet_utils.resnet_arg_scope,
'resnet_v1_50_beta': resnet_utils.resnet_arg_scope,
'resnet_v1_101': resnet_utils.resnet_arg_scope,
'resnet_v1_101_beta': resnet_utils.resnet_arg_scope,
'xception_41': xception.xception_arg_scope,
'xception_65': xception.xception_arg_scope,
'xception_71': xception.xception_arg_scope,
}
# Names for end point features.
DECODER_END_POINTS = 'decoder_end_points'
# A dictionary from network name to a map of end point features.
networks_to_feature_maps = {
'mobilenet_v2': {
DECODER_END_POINTS: ['layer_4/depthwise_output'],
},
'resnet_v1_50': {
DECODER_END_POINTS: ['block1/unit_2/bottleneck_v1/conv3'],
},
'resnet_v1_50_beta': {
DECODER_END_POINTS: ['block1/unit_2/bottleneck_v1/conv3'],
},
'resnet_v1_101': {
DECODER_END_POINTS: ['block1/unit_2/bottleneck_v1/conv3'],
},
'resnet_v1_101_beta': {
DECODER_END_POINTS: ['block1/unit_2/bottleneck_v1/conv3'],
},
'xception_41': {
DECODER_END_POINTS: [
'entry_flow/block2/unit_1/xception_module/'
'separable_conv2_pointwise',
],
},
'xception_65': {
DECODER_END_POINTS: [
'entry_flow/block2/unit_1/xception_module/'
'separable_conv2_pointwise',
],
},
'xception_71': {
DECODER_END_POINTS: [
'entry_flow/block3/unit_1/xception_module/'
'separable_conv2_pointwise',
],
},
}
# A map from feature extractor name to the network name scope used in the
# ImageNet pretrained versions of these models.
name_scope = {
'mobilenet_v2': 'MobilenetV2',
'resnet_v1_50': 'resnet_v1_50',
'resnet_v1_50_beta': 'resnet_v1_50',
'resnet_v1_101': 'resnet_v1_101',
'resnet_v1_101_beta': 'resnet_v1_101',
'xception_41': 'xception_41',
'xception_65': 'xception_65',
'xception_71': 'xception_71',
}
# Mean pixel value.
_MEAN_RGB = [123.15, 115.90, 103.06]
def _preprocess_subtract_imagenet_mean(inputs):
"""Subtract Imagenet mean RGB value."""
mean_rgb = tf.reshape(_MEAN_RGB, [1, 1, 1, 3])
return inputs - mean_rgb
def _preprocess_zero_mean_unit_range(inputs):
"""Map image values from [0, 255] to [-1, 1]."""
return (2.0 / 255.0) * tf.to_float(inputs) - 1.0
_PREPROCESS_FN = {
'mobilenet_v2': _preprocess_zero_mean_unit_range,
'resnet_v1_50': _preprocess_subtract_imagenet_mean,
'resnet_v1_50_beta': _preprocess_zero_mean_unit_range,
'resnet_v1_101': _preprocess_subtract_imagenet_mean,
'resnet_v1_101_beta': _preprocess_zero_mean_unit_range,
'xception_41': _preprocess_zero_mean_unit_range,
'xception_65': _preprocess_zero_mean_unit_range,
'xception_71': _preprocess_zero_mean_unit_range,
}
def mean_pixel(model_variant=None):
"""Gets mean pixel value.
This function returns different mean pixel value, depending on the input
model_variant which adopts different preprocessing functions. We currently
handle the following preprocessing functions:
(1) _preprocess_subtract_imagenet_mean. We simply return mean pixel value.
(2) _preprocess_zero_mean_unit_range. We return [127.5, 127.5, 127.5].
The return values are used in a way that the padded regions after
pre-processing will contain value 0.
Args:
model_variant: Model variant (string) for feature extraction. For
backwards compatibility, model_variant=None returns _MEAN_RGB.
Returns:
Mean pixel value.
"""
if model_variant in ['resnet_v1_50',
'resnet_v1_101'] or model_variant is None:
return _MEAN_RGB
else:
return [127.5, 127.5, 127.5]
def extract_features(images,
output_stride=8,
multi_grid=None,
depth_multiplier=1.0,
final_endpoint=None,
model_variant=None,
weight_decay=0.0001,
reuse=None,
is_training=False,
fine_tune_batch_norm=False,
regularize_depthwise=False,
preprocess_images=True,
num_classes=None,
global_pool=False):
"""Extracts features by the particular model_variant.
Args:
images: A tensor of size [batch, height, width, channels].
output_stride: The ratio of input to output spatial resolution.
multi_grid: Employ a hierarchy of different atrous rates within network.
depth_multiplier: Float multiplier for the depth (number of channels)
for all convolution ops used in MobileNet.
final_endpoint: The MobileNet endpoint to construct the network up to.
model_variant: Model variant for feature extraction.
weight_decay: The weight decay for model variables.
reuse: Reuse the model variables or not.
is_training: Is training or not.
fine_tune_batch_norm: Fine-tune the batch norm parameters or not.
regularize_depthwise: Whether or not apply L2-norm regularization on the
depthwise convolution weights.
preprocess_images: Performs preprocessing on images or not. Defaults to
True. Set to False if preprocessing will be done by other functions. We
supprot two types of preprocessing: (1) Mean pixel substraction and (2)
Pixel values normalization to be [-1, 1].
num_classes: Number of classes for image classification task. Defaults
to None for dense prediction tasks.
global_pool: Global pooling for image classification task. Defaults to
False, since dense prediction tasks do not use this.
Returns:
features: A tensor of size [batch, feature_height, feature_width,
feature_channels], where feature_height/feature_width are determined
by the images height/width and output_stride.
end_points: A dictionary from components of the network to the corresponding
activation.
Raises:
ValueError: Unrecognized model variant.
"""
if 'resnet' in model_variant:
arg_scope = arg_scopes_map[model_variant](
weight_decay=weight_decay,
batch_norm_decay=0.95,
batch_norm_epsilon=1e-5,
batch_norm_scale=True)
features, end_points = get_network(
model_variant, preprocess_images, arg_scope)(
inputs=images,
num_classes=num_classes,
is_training=(is_training and fine_tune_batch_norm),
global_pool=global_pool,
output_stride=output_stride,
multi_grid=multi_grid,
reuse=reuse,
scope=name_scope[model_variant])
elif 'xception' in model_variant:
arg_scope = arg_scopes_map[model_variant](
weight_decay=weight_decay,
batch_norm_decay=0.9997,
batch_norm_epsilon=1e-3,
batch_norm_scale=True,
regularize_depthwise=regularize_depthwise)
features, end_points = get_network(
model_variant, preprocess_images, arg_scope)(
inputs=images,
num_classes=num_classes,
is_training=(is_training and fine_tune_batch_norm),
global_pool=global_pool,
output_stride=output_stride,
regularize_depthwise=regularize_depthwise,
multi_grid=multi_grid,
reuse=reuse,
scope=name_scope[model_variant])
elif 'mobilenet' in model_variant:
arg_scope = arg_scopes_map[model_variant](
is_training=(is_training and fine_tune_batch_norm),
weight_decay=weight_decay)
features, end_points = get_network(
model_variant, preprocess_images, arg_scope)(
inputs=images,
depth_multiplier=depth_multiplier,
output_stride=output_stride,
reuse=reuse,
scope=name_scope[model_variant],
final_endpoint=final_endpoint)
else:
raise ValueError('Unknown model variant %s.' % model_variant)
return features, end_points
def get_network(network_name, preprocess_images, arg_scope=None):
"""Gets the network.
Args:
network_name: Network name.
preprocess_images: Preprocesses the images or not.
arg_scope: Optional, arg_scope to build the network. If not provided the
default arg_scope of the network would be used.
Returns:
A network function that is used to extract features.
Raises:
ValueError: network is not supported.
"""
if network_name not in networks_map:
raise ValueError('Unsupported network %s.' % network_name)
arg_scope = arg_scope or arg_scopes_map[network_name]()
def _identity_function(inputs):
return inputs
if preprocess_images:
preprocess_function = _PREPROCESS_FN[network_name]
else:
preprocess_function = _identity_function
func = networks_map[network_name]
@functools.wraps(func)
def network_fn(inputs, *args, **kwargs):
with slim.arg_scope(arg_scope):
return func(preprocess_function(inputs), *args, **kwargs)
return network_fn
|
{
"content_hash": "b9592616d17e88069880cae6c93d32f9",
"timestamp": "",
"source": "github",
"line_count": 315,
"max_line_length": 80,
"avg_line_length": 36.62539682539683,
"alnum_prop": 0.6584033977637167,
"repo_name": "cshallue/models",
"id": "da89dfe938d66e0844fca7205de0e2fd791054bd",
"size": "12226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "research/deeplab/core/feature_extractor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "1523636"
},
{
"name": "Dockerfile",
"bytes": "9821"
},
{
"name": "GLSL",
"bytes": "976"
},
{
"name": "HTML",
"bytes": "147010"
},
{
"name": "JavaScript",
"bytes": "33208"
},
{
"name": "Jupyter Notebook",
"bytes": "2829707"
},
{
"name": "Makefile",
"bytes": "4933"
},
{
"name": "Python",
"bytes": "13149300"
},
{
"name": "Shell",
"bytes": "146035"
}
],
"symlink_target": ""
}
|
import GafferUI
import GafferVDB
GafferUI.Metadata.registerNode(
GafferVDB.PointsGridToPoints,
'description',
"""Converts a points grid in a VDB object to a points primitive.""",
plugs={
'grid' : [
'description',
"""
Name of the points grid in the VDB to create a points primitive from.
"""
],
"names" : [
"description",
"""
The names of the primitive variables to be extracted from VDB points grid.
Names should be separated by spaces, and Gaffer's
standard wildcard characters may be used.
"""
],
"invertNames" : [
"description",
"""
When on, the primitive variables matched by names
are not extracted, and the non-matching primitive
variables are extracted instead.
"""
],
}
)
|
{
"content_hash": "bcbc293e89aee55fa2cacba3ee5536d3",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 77,
"avg_line_length": 21.4,
"alnum_prop": 0.671562082777036,
"repo_name": "GafferHQ/gaffer",
"id": "d946ae082ed77f005eb1463c7b0a78785a8d7837",
"size": "2564",
"binary": false,
"copies": "10",
"ref": "refs/heads/main",
"path": "python/GafferVDBUI/PointsGridToPointsUI.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5790"
},
{
"name": "C",
"bytes": "61993"
},
{
"name": "C++",
"bytes": "9572701"
},
{
"name": "CMake",
"bytes": "85201"
},
{
"name": "GLSL",
"bytes": "6208"
},
{
"name": "Python",
"bytes": "10280178"
},
{
"name": "Ruby",
"bytes": "419"
},
{
"name": "Shell",
"bytes": "14580"
}
],
"symlink_target": ""
}
|
"""
Tasks module for use within the integration tests.
"""
from invoke import task
@task
def print_foo(c):
print("foo")
@task
def print_name(c, name):
print(name)
@task
def print_config(c):
print(c.foo)
|
{
"content_hash": "c35ed7d55addd9725d55526ede497324",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 50,
"avg_line_length": 12.222222222222221,
"alnum_prop": 0.6545454545454545,
"repo_name": "mkusz/invoke",
"id": "70ce5afd074c49bf6a7493fb89a0bb8e7af38cc6",
"size": "220",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "integration/tasks.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "PowerShell",
"bytes": "876"
},
{
"name": "Python",
"bytes": "553234"
},
{
"name": "Shell",
"bytes": "2763"
}
],
"symlink_target": ""
}
|
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request
from product_spiders.items import Product, ProductLoader
from product_spiders.utils import extract_price2uk
from decimal import Decimal
import logging
class RefrinclimaItSpider(BaseSpider):
name = "refrinclima.it"
allowed_domains = ["refrinclima.it"]
start_urls = (
'http://www.refrinclima.it/',
)
def parse(self, response):
hxs = HtmlXPathSelector(response)
categories = hxs.select("//div[@class='main-menu']/div[@class='menu']/ul/li//a/@href").extract()
for category in categories:
yield Request(category, callback=self.parse)
pages = hxs.select("//div[@class='pagination']/ul[@class='pagination']/li/a/@href").extract()
for page in pages:
yield Request(page, callback=self.parse)
products = hxs.select("//ul[@id='product_list']/li")
for product in products:
url = product.select("div/h5/a/@href").extract()[0]
yield Request(url, callback=self.parse_item)
def parse_item(self, response):
url = response.url
hxs = HtmlXPathSelector(response)
name = hxs.select("//div[@id='primary_block']/div[@id='pb-left-column']/h2/text()").extract()
if not name:
logging.error("NO NAME! %s" % url)
return
name = name[0]
price = hxs.select("//p[@class='price']/span[@class='our_price_display']/span/text()").extract()
if not price:
logging.error("NO PRICE! %s" % url)
return
price = price[0]
price = Decimal(extract_price2uk(price))
eco_tax = hxs.select("//p[@class='price-ecotax']/span/text()").extract()
if eco_tax:
eco_tax[0] = eco_tax[0].encode('ascii', 'ignore')
print "Found eco tax %s" % eco_tax[0]
price -= Decimal(extract_price2uk(eco_tax[0]))
l = ProductLoader(item=Product(), response=response)
l.add_value('identifier', str(name))
l.add_value('name', name)
l.add_value('url', url)
l.add_value('price', unicode(price))
yield l.load_item()
|
{
"content_hash": "534b5ecea6599c21fa6ff26c178e2354",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 104,
"avg_line_length": 35.0625,
"alnum_prop": 0.5984848484848485,
"repo_name": "0--key/lib",
"id": "1efc3a75f5c071479e80fb035dcd450ed6e545a8",
"size": "2244",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "portfolio/Python/scrapy/rosarioweb/refrinclima_it.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "28210"
},
{
"name": "Emacs Lisp",
"bytes": "76390"
},
{
"name": "HTML",
"bytes": "1136671"
},
{
"name": "JavaScript",
"bytes": "27718"
},
{
"name": "PHP",
"bytes": "378537"
},
{
"name": "Python",
"bytes": "1892998"
},
{
"name": "Shell",
"bytes": "4030"
}
],
"symlink_target": ""
}
|
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.header import Header
from email.generator import Generator
from random import randint
from time import sleep
from credentials import GMAIL_MAILADDR, GMAIL_PASSWORD, GMAIL_SENDER_NAME
GMAIL_SENDER = (GMAIL_SENDER_NAME, GMAIL_MAILADDR)
from credentials import PYSS_MAILADDR, PYSS_PASSWORD, PYSS_SENDER_NAME
PYSS_SENDER = (PYSS_SENDER_NAME, PYSS_MAILADDR)
try:
from email import Charset as charset
from cStringIO import StringIO
except:
from io import StringIO
from email import charset
# Default encoding mode set to Quoted Printable. Acts globally!
charset.add_charset('utf-8', charset.QP, charset.QP, 'utf-8')
class EMailClient(object):
_smtpserver = ''
_serverport = ''
def __init__(self, username, password, smtpserver='', serverport=''):
self.username = username
if not self._smtpserver:
self._smtpserver = smtpserver
if not self._serverport:
self._serverport = serverport
self.session = self.new_session()
try:
self.session.login(username, password)
except:
print('Error trying to login with user {}.'.format(username))
raise
self.message = None
def new_session(self):
try:
session = smtplib.SMTP(self._smtpserver, self._serverport)
session.ehlo ()
session.starttls()
except:
raise
else:
return session
@staticmethod
def _get_email_contact_list(contacts):
if contacts is not None:
return ["{}".format(c) for c in contacts]
def _add_addr_list(self, contacts, field='To'):
self.msg[field] = ','.join(self._get_email_contact_list(contacts))
def _add_from_addr(self, from_addr):
self.msg['From'] = "{}".format(from_addr)
def set_msg_header(self, from_addr, to_addr_list, subject, cc_addr_list=[]):
# 'alternative’ MIME type – HTML and plain text bundled in one e-mail message
self.msg = MIMEMultipart('alternative')
self.msg['Subject'] = "{}".format(Header(subject, 'utf-8'))
# Only descriptive part of recipient and sender shall be encoded, not the email address
self._add_from_addr(from_addr)
self._add_addr_list(to_addr_list, field='To')
if cc_addr_list:
self._add_addr_list(cc_addr_list, field='Cc')
def set_msg_body_html(self, html):
# Attach html parts
htmlpart = MIMEText(html, 'html', 'UTF-8')
self.msg.attach(htmlpart)
def set_msg_body_text(self, text):
# Attach text parts
textpart = MIMEText(text, 'plain', 'UTF-8')
self.msg.attach(textpart)
def send_email(self):
try:
# Create a generator and flatten message object to 'file’
str_io = StringIO()
g = Generator(str_io, False)
g.flatten(self.msg)
# str_io.getvalue() contains ready to sent message
# Optionally - send it – using python's smtplib
problems = self.session.sendmail("", self.msg['To'], str_io.getvalue())
except smtplib.SMTPException:
print("Error: unable to send email")
print(problems)
raise
else:
print("Successfully sent email")
class GMailClient(EMailClient):
_smtpserver = 'smtp.gmail.com'
_serverport = 587
class PySSMailClient(EMailClient):
_smtpserver = 'mail.pyss.org'
_serverport = 465
class EmailContact(object):
def __init__(self, name, email):
self.name = name
self.email = email
def __repr__(self):
return '"{}" <{}>'.format(Header(self.name, 'utf-8'), self.email)
def __str__(self):
return self.__repr__()
def get_ep15_sponsor_msg_es(sender, recipients, greeting='Hola'):
message = """
{greeting} {recipient_name},
Soy {sender_name} y les escribo en nombre del comité organizador del EuroPython 2015.
Estamos organizando la conferencia Europython 2015 que se realizará en Bilbao del 20 al 26 de Julio en el Palacio Euskalduna.
La conferencia se encuentra dentro del área de las nuevas tecnologías e innovación y está orientada a Python, un lenguaje de programación. Se trata de la mayor conferencia europea en la temática y en la pasada edición (https://ep2014.europython.eu/) reunió a más de 1200 personas en Berlin. La conferencia cuenta con más de 120 charlas de gran calidad, trainings, sprints y eventos dedicados al reclutamiento entre otras cosas.
EuroPython es una oportunidad única para reunir a los mejores programadores de Python, empresas y estudiantes europeos. Creemos que puede de ser de utilidad para promover su empresa y/o dar un empuje a la comunidad open source para que siga mejorando y creciendo.
Este año 2015, en Bilbao, esperamos poder atraer a mucha más gente y realizar el mejor Europython de la historia. Una oportunidad que creemos no deben desaprovechar teniendo en cuenta que es la primera vez que se celebra en España después de 13 ediciones.
Por favor le pido que compruebe el siguiente enlace a la hoja de servicios de patrocinios y nos gustaría saber si vosotros estaríais interesados en ayudarnos a financiar el evento.
https://www.dropbox.com/s/0l1hcs8lstlhd37/SponsorEuroPython2015-2015-02-26.pdf?dl=1
Muchas gracias por su atención.
Me quedo a su disposición para cualquier duda o aclaración.
Recibe un cordial saludo.
Atentamente,
{sender_name}
""".format(sender_name=sender.name, recipient_name=recipients[0].name, greeting=greeting)
subject = 'Patrocine el EuroPython 2015 en Bilbao del 20 al 26 de Julio'
return subject, message
def get_ep15_sponsor_msg_en(sender, recipients, greeting='Hello '):
message = """
{greeting}{recipient_name},
My name is {sender_name} and I'm contacting you on behalf of the EuroPython 2015 Organization team.
We are organizing the next EuroPython at the Euskalduna Congress Center (ECC) in Bilbao from the 20th to the 26th of July. EuroPython is the largest European conference focused on Python programming language and related top notch technologies. Last year it attracted more then 1200 attendees with more than 120 high quality talks, trainings, sprints and many side events. It is a unique opportunity to connect with the best Python developers, managers and students in Europe, promote your company or just help our open source community grow and improve.
This year in Bilbao we are working hard to attract even more attendees and make the best EuroPython ever! This is definitely an opportunity not to be missed!
Please, check out our sponsors brochure: https://www.dropbox.com/s/0l1hcs8lstlhd37/SponsorEuroPython2015-2015-02-26.pdf?dl=1
If you are interested please contact us through our email sponsoring@europython.eu.
Thank you very much for your attention.
I'm looking forward to your reply.
Best regards,
{sender_name}
""".format(sender_name=sender.name, recipient_name=recipients[0].name, greeting=greeting)
subject = 'Sponsor EuroPython 2015 in Bilbao, Spain in July 20th-26th'
return subject, message
def get_ep15_sponsor_msg_en_with_fabio_comment(sender, recipients, greeting='Hello'):
message = """
{greeting} {recipient_name},
My name is {sender_name} and I'm contacting you on behalf of the EuroPython 2015 Organization team.
We are organizing the next EuroPython at the Euskalduna Congress Center (ECC) in Bilbao from the 20th to the 26th of July. EuroPython is the largest European conference focused on Python programming language and related top notch technologies. Last year it attracted more then 1200 attendees with more than 120 high quality talks, trainings, sprints and many side events. It is a unique opportunity to connect with the best Python developers, managers and students in Europe, promote your company or just help our open source community grow and improve.
This year in Bilbao we are working hard to attract even more attendees and make the best EuroPython ever! This is definitely an opportunity not to be missed!
Please, check out our sponsors brochure: https://www.dropbox.com/s/0l1hcs8lstlhd37/SponsorEuroPython2015-2015-02-26.pdf?dl=1
If you are interested please contact us through our email sponsoring@europython.eu.
Last year, Fabio Pliger (who is in copy) co-managed sponsorships for EuroPython 2014. As you probably already know each other feel free to contact him or myself if you are interested in sponsoring this year.
Thank you very much for your attention.
I'm looking forward to your reply.
Best regards,
{sender_name}
""".format(sender_name=sender.name, recipient_name=recipients[0].name, greeting=greeting)
subject = 'Sponsor EuroPython 2015 in Bilbao, Spain in July 20th-26th'
return subject, message
def get_ep15_submitters_check_duration_message(sender, recipients, greeting='Hello'):
message = """
{greeting} {recipient_name},
My name is {sender_name} and I'm contacting you on behalf of the EuroPython 2015 Organization team.
I'm sorry if you get this email more than once.
We kindly ask you to please check if the duration of the submissions you have sent to the conference are correctly set.
You can do that in your personal page of our website: https://ep2015.europython.eu/en/
If there is anything wrong, please send an email to helpdesk@europython.eu.
Thank you very much for your attention.
We wish you good luck with the voting results.
Best regards,
{sender_name}
""".format(sender_name=sender.name, recipient_name=recipients[0].name, greeting=greeting)
subject = 'EuroPython 2015: please check your talk submissions duration.'
return subject, message
def get_ep15_edusummit_invitacion(sender_name, recipient_name):
message = """
Buenos días {recipient_name},
Somos {sender_name} y os escribimos como representantes de la Asociación Python San Sebastián, organizadora del EuroPython de este año (https://ep2015.europython.eu), que se celebra entre los días 20-26 de Julio en el Euskalduna de Bilbao.
En este contexto hemos decidido organizar, con la colaboración de la fundación Raspberry Pi (https://www.raspberrypi.org), una sesión especial orientada a las herramientas educativas que creemos que puede enriquecerse de la asistencia de profesores y profesoras. Esta sesión, que tendrá lugar el día 24, 25 y 26 de Julio, constará de varias conferencias y talleres donde se plantearán proyectos y soluciones aplicables en las aulas. Así pues, os invitamos a visitar nuestra página (https://ep2015.europython.eu/en/events/educational-summit), donde podréis encontrar más información, así como precios reducidos para el personal docente interesado en acudir al evento. En caso de tener alguna duda también responderemos cualquier pregunta enviada a helpdesk@europython.eu.
Python es un lenguaje de programación expresamente creado para ser legible y de uso sencillo, por lo que siempre ha estado íntimamente ligado con la educación. Una muestra del estrecho lazo entre estos dos campos es la fundación Raspberry Pi, pionera en la creación de herramientas sencillas y de bajo coste aplicables en entornos docentes.
Finalmente creemos que la asistencia al EuroPython, siendo este el mayor congreso sobre python de Europa, puede resultar una experiencia beneficiosa para todos aquellos interesados en las nuevas tecnologías.
Esperando veros pronto,
{sender_name}
""".format(sender_name=sender_name, recipient_name=recipient_name)
subject = 'EuroPython 2015: Educational Summit.'
return subject, message
def get_ep15_billing_sponsor_form_filling_request(sender):
message = """
Hi,
I hope I find you well. In order to write the sponsorship agreement we need some information, could you please fill the following form:
https://docs.google.com/forms/d/1VYZEgGmAOmFWDF3jiJ1QSc5GWssVKfkmmq9LO1_T-Ck/viewform
We can then can send you the filled agreement as soon as possible.
Thank you very much. Have a great day.
Cheers,
Alex
{sender_name}
""".format(sender_name=sender.name)
subject = 'EuroPython 2015: Billing information for agreement and invoice'
return subject, message
def get_gmail_client():
username = GMAIL_MAILADDR
password = GMAIL_PASSWORD
return GMailClient(username, password)
def get_pyss_client():
username = PYSS_MAILADDR
password = PYSS_PASSWORD
return PySSMailClient(username, password)
def send_mail_msg(email_client, sender, recipients, subject, message, random_sleep=True):
email_client.set_msg_header (sender, recipients, subject)
email_client.set_msg_body_text (message)
if random_sleep:
sleep(randint(10, 100))
email_client.send_email()
def send_gmail_msg(sender, recipients, subject, message, random_sleep=True):
gmail = get_gmail_client()
send_mail_msg(gmail, sender, recipients, subject, message, random_sleep)
def send_email_to_spanish_companies(sender):
import pandas
empresas = pandas.read_csv('empresas_espana.csv', delimiter=';').T
for idx in empresas:
empresa = empresas[idx]
recipients = [EmailContact(empresa['name'], empresa['email'])]
subject, message = get_ep15_sponsor_msg_es(sender, recipients)
print(recipients)
send_gmail_msg(sender, recipients, subject, message)
def send_email_to_known_spanish_companies(sender):
import pandas as pd
empresas = pd.read_csv('empresas_espana_conocidas.csv', delimiter=';').T
for idx in empresas:
empresa = empresas[idx]
if not empresa['email']:
continue
greeting = 'Hola'
if 'contact' in empresa and not pd.isnull(empresa['contact']):
greeting = 'Estimado/a'
recipients = [EmailContact(empresa['name'], empresa['email'])]
subject, message = get_ep15_sponsor_msg_es(sender, recipients, greeting=greeting)
print(recipients)
#print(message)
send_gmail_msg(sender, recipients, subject, message)
def send_email_to_known_international_companies(sender):
import pandas as pd
empresas = pd.read_csv('rest_known_international_companies.csv', delimiter=';').T
for idx in empresas:
empresa = empresas[idx]
if not empresa['email']:
continue
greeting = 'Hello'
if 'contact' in empresa and not pd.isnull(empresa['contact']):
greeting = 'Dear'
contact_name = empresa.get('contact', '')
else:
contact_name = empresa.get('name', '')
recipients = [EmailContact(contact_name, empresa['email'])]
subject, message = get_ep15_sponsor_msg_en(sender, recipients, greeting=greeting)
print(recipients)
send_gmail_msg(sender, recipients, subject, message)
def send_email_to_intel_international_companies(sender):
with open('intel_clean_emails.txt') as f:
emails = f.readlines()
emails = [email.strip() for email in emails]
for email in emails:
greeting = 'Hello'
recipients = [EmailContact('', email)]
subject, message = get_ep15_sponsor_msg_en(sender, recipients, greeting=greeting)
print(recipients)
#print(message)
send_gmail_msg(sender, recipients, subject, message)
def send_email_to_known_international_fabio_companies(sender):
import pandas as pd
empresas = pd.read_csv('known_international_fabio_companies.csv', delimiter=';').T
for idx in empresas:
empresa = empresas[idx]
if not empresa['email']:
continue
greeting = 'Hello'
if 'contact' in empresa and not pd.isnull(empresa['contact']):
greeting = 'Dear'
contact_name = empresa.get('contact', '')
else:
contact_name = empresa.get('name', '')
recipients = [EmailContact(contact_name, empresa['email']),
EmailContact('Fabio Pliger', 'fabio.pliger@gmail.com')]
subject, message = get_ep15_sponsor_msg_en_with_fabio_comment(sender, recipients, greeting=greeting)
print(recipients)
#print(message)
send_gmail_msg(sender, recipients, subject, message)
def send_email_to_paper_submitters(sender):
import json
authors = json.load(open('papers.json'))
for author in authors:
if not author['email']:
continue
greeting = 'Hello'
if 'name' in author:
greeting = 'Dear'
contact_name = author.get('name', '')
else:
contact_name = ''
recipients = [EmailContact(contact_name, author['email']),]
subject, message = get_ep15_submitters_check_duration_message(sender, recipients, greeting=greeting)
print(recipients)
#print(message)
send_gmail_msg(sender, recipients, subject, message)
def send_email_to_edutrack_teachers(email_client, sender):
schools = [l.strip().split(',') for l in open('Directorio_centros_no_universitarios_nom_em.xlsx - page 2.csv')]
for school in schools:
contact_name = school[0]
contact_email = school[1]
recipients = [EmailContact(contact_name, contact_email),]
subject, message = get_ep15_edusummit_invitacion("Leire Ozaeta y Alexandre Savio", contact_name)
print(recipients)
#print(message)
send_mail_msg(email_client, sender, recipients, subject, message)
def send_fillform_request_email_to_sponsors(email_client, sender):
rows = read_gspread('1j1Z-cNAjUppkYrt_UwcPOb_FzUSqWWb_VBbuFJDgH4I', '')
for sponsors in rows:
contact_name = school[0]
contact_email = school[1]
recipients = [EmailContact(contact_name, contact_email),]
subject, message = get_ep15_edusummit_invitacion("Leire Ozaeta y Alexandre Savio", contact_name)
print(recipients)
#print(message)
send_mail_msg(email_client, sender, recipients, subject, message)
def get_google_auth():
import json
import gspread
from oauth2client.client import SignedJwtAssertionCredentials
from credentials import GOOGLE_OAUTH_JSON_FILE
json_key = json.load(open(GOOGLE_OAUTH_JSON_FILE))
scope = ["https://spreadsheets.google.com/feeds"]
# authenticate
credentials = SignedJwtAssertionCredentials(json_key["client_email"],
json_key["private_key"].encode("utf-8"), scope)
return gspread.authorize(credentials)
def read_gspread(doc_key, worksheet_name=''):
gc = get_google_auth()
#Go to Google Sheets and share your spreadsheet with an email you have in your json_key['client_email'].
#Otherwise you’ll get a SpreadsheetNotFound exception when trying to open it.
spread = gc.open_by_key(doc_key)
try:
if worksheet_name:
wks = spread.worksheet(worksheet_name)
else:
wks = spread.sheet1
except:
wks = spread.sheet1
all_rows = wks.get_all_values()
keys = all_rows[0]
data = [dict(zip(keys, values)) for values in all_rows[1:]]
return data
if __name__ == '__main__':
sender = EmailContact(*GMAIL_SENDER)
client = get_gmail_client()
#send_email_to_spanish_companies (sender)
#send_email_to_known_spanish_companies (sender)
#send_email_to_known_international_companies (sender)
#send_email_to_known_international_fabio_companies (sender)
#send_email_to_paper_submitters (sender)
#send_email_to_edutrack_teachers (client, sender)
send_email_to_intel_international_companies (sender)
|
{
"content_hash": "c9f469d815a527e3b2bf9c100ad6606a",
"timestamp": "",
"source": "github",
"line_count": 543,
"max_line_length": 770,
"avg_line_length": 36.55064456721915,
"alnum_prop": 0.6932030029727415,
"repo_name": "PythonSanSebastian/pyper_the_bot",
"id": "6d68c1e7421c5f7e190f636d41d783fcf20814b7",
"size": "19944",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "implants/send_emails.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1645"
},
{
"name": "Python",
"bytes": "125591"
},
{
"name": "Shell",
"bytes": "1867"
},
{
"name": "TeX",
"bytes": "10667"
}
],
"symlink_target": ""
}
|
import getpass
import importlib.util
import importlib.machinery
import itertools
import json
import os
import io
import subprocess
import sys
import time
from contextlib import contextmanager, redirect_stdout
from typing import Callable
import yaml
from datetime import datetime, timedelta
import consts
SELF_ABS_PATH, SELF_FULL_DIR, SELF_SUB_DIR = consts.get_self_path_dir(__file__)
class AttrDict(dict):
def __init__(self, *args, **kwargs):
if args and isinstance(args[0], dict):
for k, v in args[0].items():
self[k] = AttrDict(v) if isinstance(v, dict) else v
else:
super(AttrDict, self).__init__(*args, **kwargs)
def __getattr__(self, name):
return self[name]
def __setattr__(self, name, value):
self[name] = value
def update(self, *args, **kwargs):
for arg in args:
for k, v in arg.items():
if k in self and isinstance(v, dict):
self[k].update(v)
else:
self[k] = v
super(AttrDict, self).update(**kwargs)
@property
def __dict__(self):
return dict((k, v.__dict__ if isinstance(v, type(self)) else v) for k, v in self.items())
def system_out(*cmd):
return subprocess.run(
' '.join(cmd), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True
).stdout
@contextmanager
def redirected_stdout_context(*cli) -> io.StringIO:
sys.argv = []
for c in cli:
if c:
sys.argv += c.split(' ')
with io.StringIO() as buf, redirect_stdout(buf):
yield buf
def fmtprint(values, formats=None, sep=' '):
for val, fmt in itertools.zip_longest(values, formats, fillvalue='{}'):
print(fmt.format(val), end=sep)
print()
def measure_column_widths(rows) -> list:
widths = []
for row in rows:
for i, field in enumerate(row):
try:
widths[i] = max(widths[i], len(field))
except IndexError:
widths.insert(i, len(field))
return widths
def print_table(titles: iter, rows: iter):
taskiter1, taskiter2 = itertools.tee(rows)
widths = (max(w1, w2) for w1, w2 in zip(measure_column_widths(taskiter1), measure_column_widths([titles])))
formats = [str('{:%d}' % width) for width in widths]
fmtprint(titles, formats, sep=' ')
for _task in taskiter2:
fmtprint(_task, formats, sep=' ')
def active_user_name() -> str:
return getpass.getuser()
def dump(items: iter, toyaml: bool = False, tojson: bool = False, squash: bool = False, entry=lambda item: item):
entries = [entry(i) for i in items]
if squash and len(entries) == 1:
entries = entries[0]
if toyaml:
print(yaml.dump(entries, default_flow_style=False))
if tojson:
print(json.dumps(entries, indent=4))
@contextmanager
def chdir_context(new_dir: str):
old_dir = os.getcwd()
os.chdir(new_dir)
try:
yield
finally:
os.chdir(old_dir)
def load_module(file_name: str) -> object:
file_name = os.path.join(SELF_FULL_DIR, file_name) if not file_name.startswith('/') else file_name
module_name = os.path.splitext(os.path.basename(file_name))[0]
loader = importlib.machinery.SourceFileLoader(module_name, file_name)
spec = importlib.util.spec_from_loader(loader.name, loader)
assert spec, 'failed loading spec from: ' + file_name
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def wait_until(predicate: Callable, timeout: timedelta, interval: int = 1):
start = datetime.now()
while datetime.now() - start < timeout and not predicate():
time.sleep(interval)
if datetime.now() - start >= timeout:
raise TimeoutError('timed-out after {}'.format(timeout))
if __name__ == '__main__':
m = load_module('autolite')
|
{
"content_hash": "0bae28d208cde96dedcc8e490928cdef",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 113,
"avg_line_length": 25.358974358974358,
"alnum_prop": 0.6180485338725986,
"repo_name": "avitalyahel/autolite",
"id": "f37f711a94399b2d90445f08cfe49dec81443ed0",
"size": "3956",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "111"
},
{
"name": "Python",
"bytes": "84555"
},
{
"name": "Shell",
"bytes": "1148"
}
],
"symlink_target": ""
}
|
"""
提供props常见操作支持,需要对应的类支持get_uuid, 目前是IData
"""
import types
import datetime
import copy
from david.lib.cache import lc
from david.lib.store import redis_store as db
class PropsMixin(object):
def get_uuid(self):
if not self.id:
raise RuntimeError('PropsMixin must have an id')
return '%s:%s' % (self.__tablename__, self.id)
@property
def _props_name(self):
return '__%s/props_cached' % self.get_uuid()
@property
def _props_db_key(self):
return '%s/props' % self.get_uuid()
def _get_props(self):
return db.get(self._props_db_key) or {}
#lc_name = self._props_name
#props = lc.get(lc_name)
#if props is None:
#props = db.get(self._props_db_key) or {}
#lc.set(lc_name, props)
#return props
def _set_props(self, props):
db.set(self._props_db_key, props)
#lc.delete(self._props_name)
def _destory_props(self):
db.delete(self._props_db_key)
#lc.delete(self._props_name)
get_props = _get_props
set_props = _set_props
props = property(_get_props, _set_props)
def set_props_item(self, key, value):
props = self.props
props[key] = value
self.props = props
def delete_props_item(self, key):
props = self.props
props.pop(key, None)
self.props = props
def get_props_item(self, key, default=None):
return self.props.get(key, default)
def update_props(self, **data):
props = self.props
props.update(data)
self.props = props
class PropsItem(object):
def __init__(self, name, default=None, output_filter=None):
self.name = name
self.default = default
self.output_filter = output_filter
self.mutable_default_value = isinstance(
self.default,
(
set,
bytearray,
types.DictType,
types.ListType,
)
)
def __get__(self, obj, objtype):
if obj is None:
return
r = obj.get_props_item(self.name, None)
if r is None:
if self.mutable_default_value:
return copy.deepcopy(self.default)
return self.default
elif self.output_filter:
return self.output_filter(r)
else:
return r
def __set__(self, obj, value):
obj.set_props_item(self.name, value)
def __delete__(self, obj):
obj.delete_props_item(self.name)
datetime_outputfilter = lambda v: datetime.datetime.strptime(v, '%Y-%m-%d %H:%M:%S') if v else None
date_outputfilter = lambda v: datetime.datetime.strptime(v, '%Y-%m-%d').date() if v else None
class DatetimePropsItem(PropsItem):
def __init__(self, name, default=None):
super(DatetimePropsItem, self).__init__(
name, default, datetime_outputfilter)
class DatePropsItem(PropsItem):
def __init__(self, name, default=None):
super(DatePropsItem, self).__init__(name, default, date_outputfilter)
|
{
"content_hash": "5f5075c1ff93150a4ccd9efe2fe0a14d",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 99,
"avg_line_length": 27.017241379310345,
"alnum_prop": 0.5701978302488833,
"repo_name": "ktmud/david",
"id": "7d5a58882337122830229d6e8326230c8edc03e3",
"size": "3199",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "david/lib/mixins/props.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "83881"
},
{
"name": "JavaScript",
"bytes": "281633"
},
{
"name": "PHP",
"bytes": "2274"
},
{
"name": "Python",
"bytes": "82385"
}
],
"symlink_target": ""
}
|
from trac.core import Component, implements, ExtensionPoint
from trac.util.text import empty
from trac.config import Option, BoolOption, ListOption
from trac.ticket.api import ITicketChangeListener
from trac.attachment import IAttachmentChangeListener
import datetime
import os
import re
from itertools import chain
import pytz
import time
import proton
from api import ICeleryTask
# TODO support IMilestoneChangeListener
# TODO admin ui to configure project_identifier
class QueueFeeder(Component):
tasks = ExtensionPoint(ICeleryTask)
def send_events(self, events):
for event in events:
event['project'] = os.path.basename(self.env.path)
for task in self.tasks:
task.run(event)
class TicketListener(Component):
implements(ITicketChangeListener,
IAttachmentChangeListener)
def ticket_created(self, ticket):
_time = proton.timestamp(time.time() * 1000)
event = {"category": "created",
"time": _time,
"ticket": ticket.id,
"author": ticket['reporter']}
event['state'] = dict((k, self._transform_value(k, ticket[k])) for k in ticket.values)
QueueFeeder(self.env).send_events([event])
def ticket_changed(self, ticket, comment, author, old_values):
_time = proton.timestamp(time.time() * 1000)
event = {"category": "changed",
"time": _time,
"comment": comment,
"ticket": ticket.id,
"author": author}
event['change'] = dict((k, self._transform_value(k, ticket[k])) for k in old_values)
event['state'] = dict((k, self._transform_value(k, ticket[k])) for k in ticket.values)
QueueFeeder(self.env).send_events([event])
def ticket_deleted(self, ticket):
_time = proton.timestamp(time.time() * 1000)
event = {"category": "deleted",
"time": _time,
"ticket": ticket.id}
QueueFeeder(self.env).send_events([event])
def ticket_comment_modified(self, ticket, cdate, author, comment, old_comment):
_time = proton.timestamp(time.time() * 1000)
event = {"category": "changed",
"time": _time,
"ticket": ticket.id,
"author": author,
"cdate": cdate,
'change': {"comment": comment}}
QueueFeeder(self.env).send_events([event])
def ticket_change_deleted(self, ticket, cdate, changes):
# we don't support this, as the authors of this plugin don't
# support deleting changes in our downstream product
pass
def attachment_added(self, attachment):
_time = proton.timestamp(time.time() * 1000)
if attachment.parent_realm != "ticket":
return
event = {"category": "attachment-added",
"time": _time,
"ticket": attachment.parent_realm,
"author": attachment.author,
"filename": attachment.filename,
"description": attachment.description,
"size": attachment.size}
QueueFeeder(self.env).send_events([event])
def attachment_deleted(self, attachment):
_time = time.time()
if attachment.parent_realm != "ticket":
return
event = {"category": "attachment-deleted",
"time": _time,
"ticket": attachment.parent_realm,
"author": attachment.author,
"filename": attachment.filename}
QueueFeeder(self.env).send_events([event])
def attachment_version_deleted(self, attachment, old_version):
"""Called when a particular version of an attachment is deleted."""
self.attachment_deleted(attachment)
def attachment_reparented(self, attachment, old_parent_realm, old_parent_id):
"""Called when an attachment is reparented."""
self.attachment_added(attachment)
def _transform_value(self, field, value):
if value is empty:
return None
if field in ("cc", "keywords"):
# note, Trac uses '[;,\s]+' (see trac/ticket/model.py)
# but CGI's fork doesn't include the whitespace
return [x.strip() for x in re.split(r'[;,]+', value)]
# TODO deal with integer, date, float fields (CGI extensions)
# e.g., we have to convert value as string to value as float/integer, by looking
# at the field datatype configuration
# TODO ensure that 'changetime' is in UTC?
if isinstance(value, datetime.datetime):
# celery uses kombu, which uses pickle by default, which
# fails to unpickle trac.util.datefmt.FixedOffset
#value = value.astimezone(pytz.utc)
# but then also proton then fails to seralize for sending to Service Bus...
# TODO http://stackoverflow.com/a/7852891 says this is a bad idea
value = proton.timestamp(time.mktime(value.timetuple()) * 1000)
return value
|
{
"content_hash": "1297de059147658e7e7bd04f2555689a",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 94,
"avg_line_length": 40.82539682539682,
"alnum_prop": 0.5958398133748056,
"repo_name": "CGI-define-and-primeportal/trac-plugin-feedrabbitmq",
"id": "10a8dd1acec6913cb11a0ea281846d2b3e7073a1",
"size": "5144",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tracfeedrabbitmq/listeners.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "1867"
},
{
"name": "Python",
"bytes": "19449"
}
],
"symlink_target": ""
}
|
"""SCons.Tool.link
Tool-specific initialization for the generic Posix linker.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/link.py 2014/09/27 12:51:43 garyo"
import re
import SCons.Defaults
import SCons.Tool
import SCons.Util
import SCons.Warnings
from SCons.Tool.FortranCommon import isfortran
from SCons.Tool.DCommon import isD
cplusplus = __import__('c++', globals(), locals(), [])
issued_mixed_link_warning = False
def smart_link(source, target, env, for_signature):
has_cplusplus = cplusplus.iscplusplus(source)
has_fortran = isfortran(env, source)
has_d = isD(env, source)
if has_cplusplus and has_fortran and not has_d:
global issued_mixed_link_warning
if not issued_mixed_link_warning:
msg = "Using $CXX to link Fortran and C++ code together.\n\t" + \
"This may generate a buggy executable if the '%s'\n\t" + \
"compiler does not know how to deal with Fortran runtimes."
SCons.Warnings.warn(SCons.Warnings.FortranCxxMixWarning,
msg % env.subst('$CXX'))
issued_mixed_link_warning = True
return '$CXX'
elif has_d:
env['LINKCOM'] = env['DLINKCOM']
env['SHLINKCOM'] = env['SHDLINKCOM']
return '$DC'
elif has_fortran:
return '$FORTRAN'
elif has_cplusplus:
return '$CXX'
return '$CC'
def shlib_emitter(target, source, env):
Verbose = False
platform = env.subst('$PLATFORM')
for tgt in target:
tgt.attributes.shared = 1
try:
# target[0] comes in as libtest.so. Add the version extensions
version = env.subst('$SHLIBVERSION')
if version:
version_names = shlib_emitter_names(target, source, env)
# change the name of the target to include the version number
target[0].name = version_names[0]
for name in version_names:
env.SideEffect(name, target[0])
env.Clean(target[0], name)
if Verbose:
print "shlib_emitter: add side effect - ",name
except KeyError:
version = None
return (target, source)
def shlib_emitter_names(target, source, env):
"""Return list of file names that are side effects for a versioned library build. The first name in the list is the new name for the target"""
Verbose = False
platform = env.subst('$PLATFORM')
version_names = []
try:
# target[0] comes in as libtest.so. Add the version extensions
version = env.subst('$SHLIBVERSION')
if version.count(".") != 2:
# We need a version of the form x.y.z to proceed
raise ValueError
if version:
if platform == 'posix':
versionparts = version.split('.')
name = target[0].name
# generate library name with the version number
version_name = target[0].name + '.' + version
if Verbose:
print "shlib_emitter_names: target is ", version_name
print "shlib_emitter_names: side effect: ", name
# add version_name to list of names to be a Side effect
version_names.append(version_name)
if Verbose:
print "shlib_emitter_names: versionparts ",versionparts
for ver in versionparts[0:-1]:
name = name + '.' + ver
if Verbose:
print "shlib_emitter_names: side effect: ", name
# add name to list of names to be a Side effect
version_names.append(name)
elif platform == 'darwin':
shlib_suffix = env.subst('$SHLIBSUFFIX')
name = target[0].name
# generate library name with the version number
suffix_re = re.escape(shlib_suffix)
version_name = re.sub(suffix_re, '.' + version + shlib_suffix, name)
if Verbose:
print "shlib_emitter_names: target is ", version_name
print "shlib_emitter_names: side effect: ", name
# add version_name to list of names to be a Side effect
version_names.append(version_name)
elif platform == 'cygwin':
shlib_suffix = env.subst('$SHLIBSUFFIX')
name = target[0].name
# generate library name with the version number
suffix_re = re.escape(shlib_suffix)
version_name = re.sub(suffix_re, '-' + re.sub('\.', '-', version) + shlib_suffix, name)
if Verbose:
print "shlib_emitter_names: target is ", version_name
print "shlib_emitter_names: side effect: ", name
# add version_name to list of names to be a Side effect
version_names.append(version_name)
except KeyError:
version = None
return version_names
def generate(env):
"""Add Builders and construction variables for gnulink to an Environment."""
SCons.Tool.createSharedLibBuilder(env)
SCons.Tool.createProgBuilder(env)
env['SHLINK'] = '$LINK'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
env['SHLINKCOM'] = '$SHLINK -o $TARGET $SHLINKFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
# don't set up the emitter, cause AppendUnique will generate a list
# starting with None :-(
env.Append(SHLIBEMITTER = [shlib_emitter])
env['SMARTLINK'] = smart_link
env['LINK'] = "$SMARTLINK"
env['LINKFLAGS'] = SCons.Util.CLVar('')
# __RPATH is only set to something ($_RPATH typically) on platforms that support it.
env['LINKCOM'] = '$LINK -o $TARGET $LINKFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
env['LIBDIRPREFIX']='-L'
env['LIBDIRSUFFIX']=''
env['_LIBFLAGS']='${_stripixes(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, LIBPREFIXES, LIBSUFFIXES, __env__)}'
env['LIBLINKPREFIX']='-l'
env['LIBLINKSUFFIX']=''
if env['PLATFORM'] == 'hpux':
env['SHLIBSUFFIX'] = '.sl'
elif env['PLATFORM'] == 'aix':
env['SHLIBSUFFIX'] = '.a'
# For most platforms, a loadable module is the same as a shared
# library. Platforms which are different can override these, but
# setting them the same means that LoadableModule works everywhere.
SCons.Tool.createLoadableModuleBuilder(env)
env['LDMODULE'] = '$SHLINK'
# don't set up the emitter, cause AppendUnique will generate a list
# starting with None :-(
env.Append(LDMODULEEMITTER='$SHLIBEMITTER')
env['LDMODULEPREFIX'] = '$SHLIBPREFIX'
env['LDMODULESUFFIX'] = '$SHLIBSUFFIX'
env['LDMODULEFLAGS'] = '$SHLINKFLAGS'
env['LDMODULECOM'] = '$LDMODULE -o $TARGET $LDMODULEFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
def exists(env):
# This module isn't really a Tool on its own, it's common logic for
# other linkers.
return None
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
{
"content_hash": "fd3e5cc1d422434ce17a6a72d2e43a74",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 146,
"avg_line_length": 41.18627450980392,
"alnum_prop": 0.6227088788383718,
"repo_name": "smandy/d_c_experiment",
"id": "3871e8d46f7c5800c140834ac57f96426120d60a",
"size": "8402",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "scons-local-2.3.4/SCons/Tool/link.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "931"
},
{
"name": "C++",
"bytes": "860"
},
{
"name": "D",
"bytes": "1306"
},
{
"name": "DTrace",
"bytes": "204"
},
{
"name": "Python",
"bytes": "1916841"
}
],
"symlink_target": ""
}
|
"""
********************************************************************************
* Name: utilities.py
* Author: Nathan Swain
* Created On: 2014
* Copyright: (c) Brigham Young University 2014
* License: BSD 2-Clause
********************************************************************************
"""
import logging
import os
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from django.utils._os import safe_join
from tethys_apps.harvester import SingletonHarvester
tethys_log = logging.getLogger('tethys.' + __name__)
def get_tethys_src_dir():
"""
Get/derive the TETHYS_SRC variable.
Returns:
str: path to TETHYS_SRC.
"""
default = os.path.dirname(os.path.dirname(__file__))
return os.environ.get('TETHYS_SRC', default)
def get_tethys_home_dir():
"""
Get/derive the TETHYS_HOME variable.
Returns:
str: path to TETHYS_HOME.
"""
env_tethys_home = os.environ.get('TETHYS_HOME')
# Return environment value if set
if env_tethys_home:
return env_tethys_home
# Initialize to default TETHYS_HOME
tethys_home = os.path.expanduser('~/.tethys')
try:
conda_env_name = os.environ.get('CONDA_DEFAULT_ENV')
if conda_env_name != 'tethys':
tethys_home = os.path.join(tethys_home, conda_env_name)
except Exception:
tethys_log.warning(f'Running Tethys outside of active Conda environment detected. Using default '
f'TETHYS_HOME "{tethys_home}". Set TETHYS_HOME environment to override.')
return tethys_home
def get_directories_in_tethys(directory_names, with_app_name=False):
"""
# Locate given directories in tethys apps and extensions.
Args:
directory_names: directory to get path to.
with_app_name: include the app name if True.
Returns:
list: list of paths to directories in apps and extensions.
"""
potential_dirs = []
# Determine the directories of tethys extensions
harvester = SingletonHarvester()
for _, app_module in harvester.app_modules.items():
try:
app_module = __import__(app_module, fromlist=[''])
potential_dirs.append(app_module.__path__[0])
except (ImportError, AttributeError, IndexError):
pass
for _, extension_module in harvester.extension_modules.items():
try:
extension_module = __import__(extension_module, fromlist=[''])
potential_dirs.append(extension_module.__path__[0])
except (ImportError, AttributeError, IndexError):
pass
# Check each directory combination
match_dirs = []
for potential_dir in potential_dirs:
for directory_name in directory_names:
# Only check directories
if os.path.isdir(potential_dir):
match_dir = safe_join(potential_dir, directory_name)
if match_dir not in match_dirs and os.path.isdir(match_dir):
if not with_app_name:
match_dirs.append(match_dir)
else:
match_dirs.append((os.path.basename(potential_dir), match_dir))
return match_dirs
def get_active_app(request=None, url=None, get_class=None):
"""
Get the active TethysApp object based on the request or URL.
"""
from tethys_apps.models import TethysApp
apps_root = 'apps'
if request is not None:
the_url = request.path
elif url is not None:
the_url = url
else:
return None
url_parts = the_url.split('/')
app = None
# Find the app key
if apps_root in url_parts:
# The app root_url is the path item following (+1) the apps_root item
app_root_url_index = url_parts.index(apps_root) + 1
app_root_url = url_parts[app_root_url_index]
if app_root_url:
try:
# Get the app from the database
app = TethysApp.objects.get(root_url=app_root_url)
except ObjectDoesNotExist:
tethys_log.warning('Could not locate app with root url "{0}".'.format(app_root_url))
except MultipleObjectsReturned:
tethys_log.warning('Multiple apps found with root url "{0}".'.format(app_root_url))
if get_class:
app = get_app_class(app)
return app
def get_app_class(app):
for app_s in SingletonHarvester().apps:
if app_s.package == app.package:
return app_s
def get_app_settings(app):
"""
Get settings related to app
Args:
app(str): name of app
Returns:
dict (linked_settings, unlinked_settings): Dictionary with two keys: linked_settings(list) - list of linked settings, unlinked_settings(list) - list of unlinked settings # noqa: E501
"""
from tethys_cli.cli_colors import write_error
from tethys_apps.models import (TethysApp, TethysExtension, PersistentStoreConnectionSetting,
PersistentStoreDatabaseSetting, SpatialDatasetServiceSetting,
DatasetServiceSetting, WebProcessingServiceSetting,
CustomSetting)
try:
app = TethysApp.objects.get(package=app)
app_settings = []
for setting in PersistentStoreConnectionSetting.objects.filter(tethys_app=app):
app_settings.append(setting)
for setting in PersistentStoreDatabaseSetting.objects.filter(tethys_app=app):
app_settings.append(setting)
for setting in SpatialDatasetServiceSetting.objects.filter(tethys_app=app):
app_settings.append(setting)
for setting in DatasetServiceSetting.objects.filter(tethys_app=app):
app_settings.append(setting)
for setting in WebProcessingServiceSetting.objects.filter(tethys_app=app):
app_settings.append(setting)
for setting in CustomSetting.objects.filter(tethys_app=app):
app_settings.append(setting)
unlinked_settings = []
linked_settings = []
for setting in app_settings:
if (hasattr(setting, 'spatial_dataset_service') and setting.spatial_dataset_service) \
or (hasattr(setting, 'persistent_store_service') and setting.persistent_store_service) \
or (hasattr(setting, 'dataset_service') and setting.dataset_service) \
or (hasattr(setting, 'web_processing_service') and setting.web_processing_service) \
or (hasattr(setting, 'value') and setting.value != ''):
linked_settings.append(setting)
else:
unlinked_settings.append(setting)
return {
'linked_settings': linked_settings,
'unlinked_settings': unlinked_settings
}
except ObjectDoesNotExist:
try:
# Fail silently if the object is an Extension
TethysExtension.objects.get(package=app)
except ObjectDoesNotExist:
# Write an error if the object is not a TethysApp or Extension
write_error('The app or extension you specified ("{0}") does not exist. Command aborted.'.format(app))
except Exception as e:
write_error(str(e))
write_error('Something went wrong. Please try again.')
def create_ps_database_setting(app_package, name, description='', required=False, initializer='', initialized=False,
spatial=False, dynamic=False):
from tethys_cli.cli_colors import pretty_output, FG_RED, FG_GREEN
from tethys_apps.models import PersistentStoreDatabaseSetting
from tethys_apps.models import TethysApp
try:
app = TethysApp.objects.get(package=app_package)
except ObjectDoesNotExist:
with pretty_output(FG_RED) as p:
p.write('A Tethys App with the name "{}" does not exist. Aborted.'.format(app_package))
return False
try:
setting = PersistentStoreDatabaseSetting.objects.get(name=name)
if setting:
with pretty_output(FG_RED) as p:
p.write('A PersistentStoreDatabaseSetting with name "{}" already exists. Aborted.'.format(name))
return False
except ObjectDoesNotExist:
pass
try:
ps_database_setting = PersistentStoreDatabaseSetting(
tethys_app=app,
name=name,
description=description,
required=required,
initializer=initializer,
initialized=initialized,
spatial=spatial,
dynamic=dynamic
)
ps_database_setting.save()
with pretty_output(FG_GREEN) as p:
p.write('PersistentStoreDatabaseSetting named "{}" for app "{}" created successfully!'.format(name,
app_package))
return True
except Exception as e:
print(e)
with pretty_output(FG_RED) as p:
p.write('The above error was encountered. Aborted.')
return False
def remove_ps_database_setting(app_package, name, force=False):
from tethys_apps.models import TethysApp
from tethys_cli.cli_colors import pretty_output, FG_RED, FG_GREEN
from tethys_apps.models import PersistentStoreDatabaseSetting
try:
app = TethysApp.objects.get(package=app_package)
except ObjectDoesNotExist:
with pretty_output(FG_RED) as p:
p.write('A Tethys App with the name "{}" does not exist. Aborted.'.format(app_package))
return False
try:
setting = PersistentStoreDatabaseSetting.objects.get(tethys_app=app, name=name)
except ObjectDoesNotExist:
with pretty_output(FG_RED) as p:
p.write('An PersistentStoreDatabaseSetting with the name "{}" for app "{}" does not exist. Aborted.'
.format(name, app_package))
return False
if not force:
proceed = input('Are you sure you want to delete the '
'PersistentStoreDatabaseSetting named "{}"? [y/n]: '.format(name))
while proceed not in ['y', 'n', 'Y', 'N']:
proceed = input('Please enter either "y" or "n": ')
if proceed in ['y', 'Y']:
setting.delete()
with pretty_output(FG_GREEN) as p:
p.write('Successfully removed PersistentStoreDatabaseSetting with name "{0}"!'.format(name))
return True
else:
with pretty_output(FG_RED) as p:
p.write('Aborted. PersistentStoreDatabaseSetting not removed.')
else:
setting.delete()
with pretty_output(FG_GREEN) as p:
p.write('Successfully removed PersistentStoreDatabaseSetting with name "{0}"!'.format(name))
return True
def link_service_to_app_setting(service_type, service_uid, app_package, setting_type, setting_uid):
"""
Links a Tethys Service to a TethysAppSetting.
:param service_type: The type of service being linked to an app.
Must be either 'spatial' or 'persistent' or 'dataset' or 'wps'.
:param service_uid: The name or id of the service being linked to an app.
:param app_package: The package name of the app whose setting is being linked to a service.
:param setting_type: The type of setting being linked to a service. Must be one of the following: 'ps_database',
'ps_connection', or 'ds_spatial'.
:param setting_uid: The name or id of the setting being linked to a service.
:return: True if successful, False otherwise.
"""
import django
django.setup()
from tethys_cli.cli_colors import pretty_output, FG_GREEN, FG_RED
from tethys_sdk.app_settings import (SpatialDatasetServiceSetting, PersistentStoreConnectionSetting,
PersistentStoreDatabaseSetting, DatasetServiceSetting,
WebProcessingServiceSetting)
from tethys_apps.models import TethysApp
setting_type_to_link_model_dict = {
'ps_database': {
'setting_model': PersistentStoreDatabaseSetting,
'service_field': 'persistent_store_service'
},
'ps_connection': {
'setting_model': PersistentStoreConnectionSetting,
'service_field': 'persistent_store_service'
},
'ds_spatial': {
'setting_model': SpatialDatasetServiceSetting,
'service_field': 'spatial_dataset_service'
},
'ds_dataset': {
'setting_model': DatasetServiceSetting,
'service_field': 'dataset_service'
},
'wps': {
'setting_model': WebProcessingServiceSetting,
'service_field': 'web_processing_service'
}
}
service_model = get_service_model_from_type(service_type)
try:
try:
service_uid = int(service_uid)
service = service_model.objects.get(pk=service_uid)
except ValueError:
service = service_model.objects.get(name=service_uid)
except ObjectDoesNotExist:
with pretty_output(FG_RED) as p:
p.write(f'A {service_model.__class__.__name__} with ID/Name "{service_uid}" does not exist.')
return False
try:
app = TethysApp.objects.get(package=app_package)
except ObjectDoesNotExist:
with pretty_output(FG_RED) as p:
p.write(f'A Tethys App with the name "{app_package}" does not exist. Aborted.')
return False
try:
linked_setting_model_dict = setting_type_to_link_model_dict[setting_type]
except KeyError:
with pretty_output(FG_RED) as p:
p.write(f'The setting_type you specified ("{setting_type}") does not exist.'
'\nChoose from: "ps_database|ps_connection|ds_spatial"')
return False
linked_setting_model = linked_setting_model_dict['setting_model']
linked_service_field = linked_setting_model_dict['service_field']
try:
try:
setting_uid = int(setting_uid)
setting = linked_setting_model.objects.get(
tethys_app=app, pk=setting_uid)
except ValueError:
setting = linked_setting_model.objects.get(
tethys_app=app, name=setting_uid)
setattr(setting, linked_service_field, service)
setting.save()
with pretty_output(FG_GREEN) as p:
p.write(f'{service.__class__.__name__}:"{service.name}" was successfully linked '
f'to {setting.__class__.__name__}:"{setting.name}" of the "{app_package}" Tethys App')
return True
except ObjectDoesNotExist:
with pretty_output(FG_RED) as p:
p.write(
f'A {linked_setting_model.__name__} with ID/Name "{setting_uid}" does not exist.')
return False
def get_service_model_from_type(service_type):
from tethys_services.models import (
SpatialDatasetService, DatasetService, PersistentStoreService, WebProcessingService)
service_type_to_model_dict = {
"spatial": SpatialDatasetService,
"dataset": DatasetService,
"persistent": PersistentStoreService,
'wps': WebProcessingService
}
return service_type_to_model_dict[service_type]
def user_can_access_app(user, app):
from django.conf import settings
if getattr(settings, 'ENABLE_OPEN_PORTAL', False):
return True
elif getattr(settings, "ENABLE_RESTRICTED_APP_ACCESS", False):
return user.has_perm(f'{app.package}:access_app', app)
else:
return True
|
{
"content_hash": "682e25007362f9aed8b120505aeea2d4",
"timestamp": "",
"source": "github",
"line_count": 413,
"max_line_length": 191,
"avg_line_length": 37.96125907990315,
"alnum_prop": 0.614300293404771,
"repo_name": "CI-WATER/tethys",
"id": "ae0f98268a304ba94ce64513947547e581ef978f",
"size": "15678",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tethys_apps/utilities.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "175789"
},
{
"name": "HTML",
"bytes": "149728"
},
{
"name": "JavaScript",
"bytes": "360375"
},
{
"name": "Python",
"bytes": "592551"
}
],
"symlink_target": ""
}
|
"""
Tests for the Crazy Egg template tags and filters.
"""
from django.http import HttpRequest
from django.template import Context
from django.test.utils import override_settings
from analytical.templatetags.crazy_egg import CrazyEggNode
from analytical.tests.utils import TagTestCase
from analytical.utils import AnalyticalException
@override_settings(CRAZY_EGG_ACCOUNT_NUMBER='12345678')
class CrazyEggTagTestCase(TagTestCase):
"""
Tests for the ``crazy_egg`` template tag.
"""
def test_tag(self):
r = self.render_tag('crazy_egg', 'crazy_egg')
self.assertTrue('/1234/5678.js' in r, r)
def test_node(self):
r = CrazyEggNode().render(Context())
self.assertTrue('/1234/5678.js' in r, r)
@override_settings(CRAZY_EGG_ACCOUNT_NUMBER=None)
def test_no_account_number(self):
self.assertRaises(AnalyticalException, CrazyEggNode)
@override_settings(CRAZY_EGG_ACCOUNT_NUMBER='123abc')
def test_wrong_account_number(self):
self.assertRaises(AnalyticalException, CrazyEggNode)
def test_uservars(self):
context = Context({'crazy_egg_var1': 'foo', 'crazy_egg_var2': 'bar'})
r = CrazyEggNode().render(context)
self.assertTrue("CE2.set(1, 'foo');" in r, r)
self.assertTrue("CE2.set(2, 'bar');" in r, r)
@override_settings(ANALYTICAL_INTERNAL_IPS=['1.1.1.1'])
def test_render_internal_ip(self):
req = HttpRequest()
req.META['REMOTE_ADDR'] = '1.1.1.1'
context = Context({'request': req})
r = CrazyEggNode().render(context)
self.assertTrue(r.startswith(
'<!-- Crazy Egg disabled on internal IP address'), r)
self.assertTrue(r.endswith('-->'), r)
|
{
"content_hash": "9f4bab4a5c9d790346f655deef0caf16",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 77,
"avg_line_length": 34.62,
"alnum_prop": 0.6632004621606008,
"repo_name": "machtfit/django-analytical",
"id": "66e6d56d8a2de27fb359247224be7d235a683401",
"size": "1731",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "analytical/tests/test_tag_crazy_egg.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "151233"
}
],
"symlink_target": ""
}
|
DIFF_TEST_DATA = '''diff --git a/WebCore/layout/style/StyleFlexibleBoxData.h b/WebCore/layout/style/StyleFlexibleBoxData.h
index f5d5e74..3b6aa92 100644
--- a/WebCore/layout/style/StyleFlexibleBoxData.h
+++ b/WebCore/layout/style/StyleFlexibleBoxData.h
@@ -47,7 +47,6 @@ public:
unsigned align : 3; // EBoxAlignment
unsigned pack: 3; // EBoxAlignment
- unsigned orient: 1; // EBoxOrient
unsigned lines : 1; // EBoxLines
private:
diff --git a/WebCore/layout/style/StyleRareInheritedData.cpp b/WebCore/layout/style/StyleRareInheritedData.cpp
index ce21720..324929e 100644
--- a/WebCore/layout/style/StyleRareInheritedData.cpp
+++ b/WebCore/layout/style/StyleRareInheritedData.cpp
@@ -39,6 +39,7 @@ StyleRareInheritedData::StyleRareInheritedData()
, textSizeAdjust(LayoutStyle::initialTextSizeAdjust())
, resize(LayoutStyle::initialResize())
, userSelect(LayoutStyle::initialUserSelect())
+ , boxOrient(LayoutStyle::initialBoxOrient())
{
}
@@ -58,6 +59,7 @@ StyleRareInheritedData::StyleRareInheritedData(const StyleRareInheritedData& o)
, textSizeAdjust(o.textSizeAdjust)
, resize(o.resize)
, userSelect(o.userSelect)
+ , boxOrient(o.boxOrient)
{
}
@@ -81,7 +83,8 @@ bool StyleRareInheritedData::operator==(const StyleRareInheritedData& o) const
&& khtmlLineBreak == o.khtmlLineBreak
&& textSizeAdjust == o.textSizeAdjust
&& resize == o.resize
- && userSelect == o.userSelect;
+ && userSelect == o.userSelect
+ && boxOrient == o.boxOrient;
}
bool StyleRareInheritedData::shadowDataEquivalent(const StyleRareInheritedData& o) const
diff --git a/LayoutTests/platform/mac/fast/flexbox/box-orient-button-expected.checksum b/LayoutTests/platform/mac/fast/flexbox/box-orient-button-expected.checksum
new file mode 100644
index 0000000..6db26bd
--- /dev/null
+++ b/LayoutTests/platform/mac/fast/flexbox/box-orient-button-expected.checksum
@@ -0,0 +1 @@
+61a373ee739673a9dcd7bac62b9f182e
\ No newline at end of file
'''
|
{
"content_hash": "44b5eec2408a1d67ceb15ec217d80f98",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 162,
"avg_line_length": 39.78431372549019,
"alnum_prop": 0.7274519467718088,
"repo_name": "sgraham/nope",
"id": "6d25d43ec9e6ac05f3f767fa505ce2066ecc891b",
"size": "3637",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/WebKit/Tools/Scripts/webkitpy/common/checkout/diff_test_data.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "39967"
},
{
"name": "C",
"bytes": "4061434"
},
{
"name": "C++",
"bytes": "279546186"
},
{
"name": "CMake",
"bytes": "27212"
},
{
"name": "CSS",
"bytes": "919339"
},
{
"name": "Emacs Lisp",
"bytes": "988"
},
{
"name": "Go",
"bytes": "13628"
},
{
"name": "Groff",
"bytes": "5283"
},
{
"name": "HTML",
"bytes": "15989749"
},
{
"name": "Java",
"bytes": "7541683"
},
{
"name": "JavaScript",
"bytes": "32372588"
},
{
"name": "Lua",
"bytes": "16189"
},
{
"name": "Makefile",
"bytes": "40513"
},
{
"name": "Objective-C",
"bytes": "1584184"
},
{
"name": "Objective-C++",
"bytes": "8249988"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "169060"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "427339"
},
{
"name": "Python",
"bytes": "8346306"
},
{
"name": "Scheme",
"bytes": "10604"
},
{
"name": "Shell",
"bytes": "844553"
},
{
"name": "Standard ML",
"bytes": "4965"
},
{
"name": "VimL",
"bytes": "4075"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
}
|
from django.conf.urls import url
from registration import views
import django.contrib.auth.views as auth_views
urlpatterns = [
url(r'^register/$', views.register, name='register'),
url(r'^login/$', views.user_login, name='login'),
url(r'^logout/$', views.user_logout, name='logout'),
url(r'^profile/$', views.ProfileUpdate.as_view(), name='profile'),
url(r'^change_password/$', auth_views.PasswordChangeView.as_view(), {'post_change_redirect': 'profile'}, name='password_change'),
]
|
{
"content_hash": "2b73eea9f9f73e913647d1ce546c9f3d",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 133,
"avg_line_length": 45.72727272727273,
"alnum_prop": 0.6918489065606361,
"repo_name": "sdrogers/ms2ldaviz",
"id": "c9d50497a5c35c836161f8b0d2f0c8695d917f80",
"size": "503",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ms2ldaviz/registration/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "155389"
},
{
"name": "Dockerfile",
"bytes": "324"
},
{
"name": "HTML",
"bytes": "281089"
},
{
"name": "JavaScript",
"bytes": "564464"
},
{
"name": "Jupyter Notebook",
"bytes": "22354299"
},
{
"name": "Python",
"bytes": "897444"
},
{
"name": "Shell",
"bytes": "561"
}
],
"symlink_target": ""
}
|
""" Provides the configuration for the weather module. """
from configparser import ConfigParser
import os
import sys
import errno
class Config():
"""
Reads the default configuration from config file. If file doesn't exist
then it is created.
:param config_file: Location of config file
"""
def __init__(self, config_file=None):
if not config_file:
config_file = "~/.config/weather/config"
# Get the absolute file path
self.config_file = os.path.expanduser(config_file)
if not os.path.isfile(self.config_file):
self.create_config()
self.parse_config()
def parse_config(self, profile='default'):
""" Reads the config file and imports settings. """
config = ConfigParser()
config.read(self.config_file)
self.location = config[profile]['location']
self.units = config[profile]['units']
self.date_format = config[profile]['date_format']
if config[profile]['api_key'] != 'api_key':
self.api_key = config[profile]['api_key']
else:
# If enviroment variable exist for api_key, use it.
try:
self.api_key = os.environ['WEATHER_API_KEY']
except KeyError:
pass
try:
if self.api_key == 'api_key':
raise AttributeError(
'API_KEY not set in config file or environment.'
)
except AttributeError as err:
print(err)
sys.exit()
def create_config(self):
""" Creates the config file. """
config = ConfigParser()
config['default'] = {'api_key': 'api_key',
'location': '27607',
'date_format': 'date',
'units': 'english'}
# Create directory if it doesn't exist
self.create_dir()
# Write config file
with open(self.config_file, 'w+') as configfile:
config.write(configfile)
def create_dir(self):
""" Creates defaults directory if it doesn't exist. """
directory = os.path.expanduser(os.path.dirname(self.config_file))
try:
os.makedirs(directory)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(directory):
pass
else:
raise
|
{
"content_hash": "124b63ef62a2984299499823e3cc165e",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 76,
"avg_line_length": 31.256410256410255,
"alnum_prop": 0.5467596390484003,
"repo_name": "paris3200/wunderapi",
"id": "91a75698ec577fdf041f5ae115cc81fd74fc00a1",
"size": "2438",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "weather/config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15991"
}
],
"symlink_target": ""
}
|
import pygame
from toast.math import lerp
class Gradient(object):
@staticmethod
def createVerticalGradient(dimension, *stops):
surf = pygame.Surface(dimension)
width, height = dimension
num_stops = len(stops)
for h in range(height):
step = float(h) / (height)
# Caclulate the array index
i = min(int(step * (num_stops - 1)), num_stops - 1)
# Adjust the step to properly blend between the gradient stops.
grad_step = step * (num_stops - 1) - i
r = lerp(stops[i][0], stops[i+1][0], grad_step)
g = lerp(stops[i][1], stops[i+1][1], grad_step)
b = lerp(stops[i][2], stops[i+1][2], grad_step)
pygame.draw.line(surf, (r,g,b), (0, h), (width, h))
return surf
@staticmethod
def createHorizontalGradient(dimension, *stops):
surf = pygame.Surface(dimension)
width, height = dimension
num_stops = len(stops)
for w in range(width):
step = float(w) / (width)
# Caclulate the array index
i = min(int(step * (num_stops - 1)), num_stops - 1)
# Adjust the step to properly blend between the gradient stops.
grad_step = step * (num_stops - 1) - i
r = lerp(stops[i][0], stops[i+1][0], grad_step)
g = lerp(stops[i][1], stops[i+1][1], grad_step)
b = lerp(stops[i][2], stops[i+1][2], grad_step)
pygame.draw.line(surf, (r,g,b), (w, 0), (w, height))
return surf
|
{
"content_hash": "2fdef42518f279d05a5c73f1e80b06d4",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 75,
"avg_line_length": 33.72549019607843,
"alnum_prop": 0.488953488372093,
"repo_name": "JSkelly/Toast",
"id": "3f6cedc717421a2345053a96bf16159f6df40023",
"size": "1720",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "toast/gradient.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "132965"
}
],
"symlink_target": ""
}
|
import sys
from uxie.utils import lazy_func
from inspect import cleandoc
def test_lazy_func(tmpdir):
package = tmpdir.mkdir('package')
package.join('__init__.py').write('')
package.join('func.py').write(cleandoc('''
def func():
return 10
'''))
package.join('test.py').write(cleandoc('''
from uxie.utils import lazy_func
import sys
def test():
func = lazy_func('.func.func')
old_code = func.__code__
result = func()
assert result == 10
del sys.modules['package.test']
del sys.modules['package']
result = func()
assert result == 10
'''))
old_path = sys.path
sys.path = [str(tmpdir)] + old_path
__import__('package.test')
sys.path = old_path
sys.modules['package.test'].test()
|
{
"content_hash": "6071b94d6422214dff4c43ee338190f2",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 46,
"avg_line_length": 23.405405405405407,
"alnum_prop": 0.5392609699769053,
"repo_name": "baverman/uxie",
"id": "9b5580f0a02327dbb8b59c71961bf2871ac8da47",
"size": "866",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "66585"
}
],
"symlink_target": ""
}
|
import glob
import json
import math
import os
import re
import time
from base64 import b64decode
from subprocess import check_call, CalledProcessError
import six
from charmhelpers.fetch import (
apt_install,
filter_installed_packages,
)
from charmhelpers.core.hookenv import (
config,
is_relation_made,
local_unit,
log,
relation_get,
relation_ids,
related_units,
relation_set,
unit_get,
unit_private_ip,
charm_name,
DEBUG,
INFO,
WARNING,
ERROR,
status_set,
)
from charmhelpers.core.sysctl import create as sysctl_create
from charmhelpers.core.strutils import bool_from_string
from charmhelpers.contrib.openstack.exceptions import OSContextError
from charmhelpers.core.host import (
get_bond_master,
is_phy_iface,
list_nics,
get_nic_hwaddr,
mkdir,
write_file,
pwgen,
lsb_release,
CompareHostReleases,
is_container,
)
from charmhelpers.contrib.hahelpers.cluster import (
determine_apache_port,
determine_api_port,
https,
is_clustered,
)
from charmhelpers.contrib.hahelpers.apache import (
get_cert,
get_ca_cert,
install_ca_cert,
)
from charmhelpers.contrib.openstack.neutron import (
neutron_plugin_attribute,
parse_data_port_mappings,
)
from charmhelpers.contrib.openstack.ip import (
resolve_address,
INTERNAL,
)
from charmhelpers.contrib.network.ip import (
get_address_in_network,
get_ipv4_addr,
get_ipv6_addr,
get_netmask_for_address,
format_ipv6_addr,
is_address_in_network,
is_bridge_member,
is_ipv6_disabled,
)
from charmhelpers.contrib.openstack.utils import (
config_flags_parser,
get_host_ip,
git_determine_usr_bin,
git_determine_python_path,
enable_memcache,
)
from charmhelpers.core.unitdata import kv
try:
import psutil
except ImportError:
if six.PY2:
apt_install('python-psutil', fatal=True)
else:
apt_install('python3-psutil', fatal=True)
import psutil
CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt'
ADDRESS_TYPES = ['admin', 'internal', 'public']
HAPROXY_RUN_DIR = '/var/run/haproxy/'
def ensure_packages(packages):
"""Install but do not upgrade required plugin packages."""
required = filter_installed_packages(packages)
if required:
apt_install(required, fatal=True)
def context_complete(ctxt):
_missing = []
for k, v in six.iteritems(ctxt):
if v is None or v == '':
_missing.append(k)
if _missing:
log('Missing required data: %s' % ' '.join(_missing), level=INFO)
return False
return True
class OSContextGenerator(object):
"""Base class for all context generators."""
interfaces = []
related = False
complete = False
missing_data = []
def __call__(self):
raise NotImplementedError
def context_complete(self, ctxt):
"""Check for missing data for the required context data.
Set self.missing_data if it exists and return False.
Set self.complete if no missing data and return True.
"""
# Fresh start
self.complete = False
self.missing_data = []
for k, v in six.iteritems(ctxt):
if v is None or v == '':
if k not in self.missing_data:
self.missing_data.append(k)
if self.missing_data:
self.complete = False
log('Missing required data: %s' % ' '.join(self.missing_data),
level=INFO)
else:
self.complete = True
return self.complete
def get_related(self):
"""Check if any of the context interfaces have relation ids.
Set self.related and return True if one of the interfaces
has relation ids.
"""
# Fresh start
self.related = False
try:
for interface in self.interfaces:
if relation_ids(interface):
self.related = True
return self.related
except AttributeError as e:
log("{} {}"
"".format(self, e), 'INFO')
return self.related
class SharedDBContext(OSContextGenerator):
interfaces = ['shared-db']
def __init__(self,
database=None, user=None, relation_prefix=None, ssl_dir=None):
"""Allows inspecting relation for settings prefixed with
relation_prefix. This is useful for parsing access for multiple
databases returned via the shared-db interface (eg, nova_password,
quantum_password)
"""
self.relation_prefix = relation_prefix
self.database = database
self.user = user
self.ssl_dir = ssl_dir
self.rel_name = self.interfaces[0]
def __call__(self):
self.database = self.database or config('database')
self.user = self.user or config('database-user')
if None in [self.database, self.user]:
log("Could not generate shared_db context. Missing required charm "
"config options. (database name and user)", level=ERROR)
raise OSContextError
ctxt = {}
# NOTE(jamespage) if mysql charm provides a network upon which
# access to the database should be made, reconfigure relation
# with the service units local address and defer execution
access_network = relation_get('access-network')
if access_network is not None:
if self.relation_prefix is not None:
hostname_key = "{}_hostname".format(self.relation_prefix)
else:
hostname_key = "hostname"
access_hostname = get_address_in_network(
access_network,
unit_get('private-address'))
set_hostname = relation_get(attribute=hostname_key,
unit=local_unit())
if set_hostname != access_hostname:
relation_set(relation_settings={hostname_key: access_hostname})
return None # Defer any further hook execution for now....
password_setting = 'password'
if self.relation_prefix:
password_setting = self.relation_prefix + '_password'
for rid in relation_ids(self.interfaces[0]):
self.related = True
for unit in related_units(rid):
rdata = relation_get(rid=rid, unit=unit)
host = rdata.get('db_host')
host = format_ipv6_addr(host) or host
ctxt = {
'database_host': host,
'database': self.database,
'database_user': self.user,
'database_password': rdata.get(password_setting),
'database_type': 'mysql'
}
if self.context_complete(ctxt):
db_ssl(rdata, ctxt, self.ssl_dir)
return ctxt
return {}
class PostgresqlDBContext(OSContextGenerator):
interfaces = ['pgsql-db']
def __init__(self, database=None):
self.database = database
def __call__(self):
self.database = self.database or config('database')
if self.database is None:
log('Could not generate postgresql_db context. Missing required '
'charm config options. (database name)', level=ERROR)
raise OSContextError
ctxt = {}
for rid in relation_ids(self.interfaces[0]):
self.related = True
for unit in related_units(rid):
rel_host = relation_get('host', rid=rid, unit=unit)
rel_user = relation_get('user', rid=rid, unit=unit)
rel_passwd = relation_get('password', rid=rid, unit=unit)
ctxt = {'database_host': rel_host,
'database': self.database,
'database_user': rel_user,
'database_password': rel_passwd,
'database_type': 'postgresql'}
if self.context_complete(ctxt):
return ctxt
return {}
def db_ssl(rdata, ctxt, ssl_dir):
if 'ssl_ca' in rdata and ssl_dir:
ca_path = os.path.join(ssl_dir, 'db-client.ca')
with open(ca_path, 'w') as fh:
fh.write(b64decode(rdata['ssl_ca']))
ctxt['database_ssl_ca'] = ca_path
elif 'ssl_ca' in rdata:
log("Charm not setup for ssl support but ssl ca found", level=INFO)
return ctxt
if 'ssl_cert' in rdata:
cert_path = os.path.join(
ssl_dir, 'db-client.cert')
if not os.path.exists(cert_path):
log("Waiting 1m for ssl client cert validity", level=INFO)
time.sleep(60)
with open(cert_path, 'w') as fh:
fh.write(b64decode(rdata['ssl_cert']))
ctxt['database_ssl_cert'] = cert_path
key_path = os.path.join(ssl_dir, 'db-client.key')
with open(key_path, 'w') as fh:
fh.write(b64decode(rdata['ssl_key']))
ctxt['database_ssl_key'] = key_path
return ctxt
class IdentityServiceContext(OSContextGenerator):
def __init__(self,
service=None,
service_user=None,
rel_name='identity-service'):
self.service = service
self.service_user = service_user
self.rel_name = rel_name
self.interfaces = [self.rel_name]
def __call__(self):
log('Generating template context for ' + self.rel_name, level=DEBUG)
ctxt = {}
if self.service and self.service_user:
# This is required for pki token signing if we don't want /tmp to
# be used.
cachedir = '/var/cache/%s' % (self.service)
if not os.path.isdir(cachedir):
log("Creating service cache dir %s" % (cachedir), level=DEBUG)
mkdir(path=cachedir, owner=self.service_user,
group=self.service_user, perms=0o700)
ctxt['signing_dir'] = cachedir
for rid in relation_ids(self.rel_name):
self.related = True
for unit in related_units(rid):
rdata = relation_get(rid=rid, unit=unit)
serv_host = rdata.get('service_host')
serv_host = format_ipv6_addr(serv_host) or serv_host
auth_host = rdata.get('auth_host')
auth_host = format_ipv6_addr(auth_host) or auth_host
svc_protocol = rdata.get('service_protocol') or 'http'
auth_protocol = rdata.get('auth_protocol') or 'http'
api_version = rdata.get('api_version') or '2.0'
ctxt.update({'service_port': rdata.get('service_port'),
'service_host': serv_host,
'auth_host': auth_host,
'auth_port': rdata.get('auth_port'),
'admin_tenant_name': rdata.get('service_tenant'),
'admin_user': rdata.get('service_username'),
'admin_password': rdata.get('service_password'),
'service_protocol': svc_protocol,
'auth_protocol': auth_protocol,
'api_version': api_version})
if float(api_version) > 2:
ctxt.update({'admin_domain_name':
rdata.get('service_domain')})
if self.context_complete(ctxt):
# NOTE(jamespage) this is required for >= icehouse
# so a missing value just indicates keystone needs
# upgrading
ctxt['admin_tenant_id'] = rdata.get('service_tenant_id')
return ctxt
return {}
class AMQPContext(OSContextGenerator):
def __init__(self, ssl_dir=None, rel_name='amqp', relation_prefix=None):
self.ssl_dir = ssl_dir
self.rel_name = rel_name
self.relation_prefix = relation_prefix
self.interfaces = [rel_name]
def __call__(self):
log('Generating template context for amqp', level=DEBUG)
conf = config()
if self.relation_prefix:
user_setting = '%s-rabbit-user' % (self.relation_prefix)
vhost_setting = '%s-rabbit-vhost' % (self.relation_prefix)
else:
user_setting = 'rabbit-user'
vhost_setting = 'rabbit-vhost'
try:
username = conf[user_setting]
vhost = conf[vhost_setting]
except KeyError as e:
log('Could not generate shared_db context. Missing required charm '
'config options: %s.' % e, level=ERROR)
raise OSContextError
ctxt = {}
for rid in relation_ids(self.rel_name):
ha_vip_only = False
self.related = True
transport_hosts = None
rabbitmq_port = '5672'
for unit in related_units(rid):
if relation_get('clustered', rid=rid, unit=unit):
ctxt['clustered'] = True
vip = relation_get('vip', rid=rid, unit=unit)
vip = format_ipv6_addr(vip) or vip
ctxt['rabbitmq_host'] = vip
transport_hosts = [vip]
else:
host = relation_get('private-address', rid=rid, unit=unit)
host = format_ipv6_addr(host) or host
ctxt['rabbitmq_host'] = host
transport_hosts = [host]
ctxt.update({
'rabbitmq_user': username,
'rabbitmq_password': relation_get('password', rid=rid,
unit=unit),
'rabbitmq_virtual_host': vhost,
})
ssl_port = relation_get('ssl_port', rid=rid, unit=unit)
if ssl_port:
ctxt['rabbit_ssl_port'] = ssl_port
rabbitmq_port = ssl_port
ssl_ca = relation_get('ssl_ca', rid=rid, unit=unit)
if ssl_ca:
ctxt['rabbit_ssl_ca'] = ssl_ca
if relation_get('ha_queues', rid=rid, unit=unit) is not None:
ctxt['rabbitmq_ha_queues'] = True
ha_vip_only = relation_get('ha-vip-only',
rid=rid, unit=unit) is not None
if self.context_complete(ctxt):
if 'rabbit_ssl_ca' in ctxt:
if not self.ssl_dir:
log("Charm not setup for ssl support but ssl ca "
"found", level=INFO)
break
ca_path = os.path.join(
self.ssl_dir, 'rabbit-client-ca.pem')
with open(ca_path, 'w') as fh:
fh.write(b64decode(ctxt['rabbit_ssl_ca']))
ctxt['rabbit_ssl_ca'] = ca_path
# Sufficient information found = break out!
break
# Used for active/active rabbitmq >= grizzly
if (('clustered' not in ctxt or ha_vip_only) and
len(related_units(rid)) > 1):
rabbitmq_hosts = []
for unit in related_units(rid):
host = relation_get('private-address', rid=rid, unit=unit)
host = format_ipv6_addr(host) or host
rabbitmq_hosts.append(host)
rabbitmq_hosts = sorted(rabbitmq_hosts)
ctxt['rabbitmq_hosts'] = ','.join(rabbitmq_hosts)
transport_hosts = rabbitmq_hosts
if transport_hosts:
transport_url_hosts = ','.join([
"{}:{}@{}:{}".format(ctxt['rabbitmq_user'],
ctxt['rabbitmq_password'],
host_,
rabbitmq_port)
for host_ in transport_hosts])
ctxt['transport_url'] = "rabbit://{}/{}".format(
transport_url_hosts, vhost)
oslo_messaging_flags = conf.get('oslo-messaging-flags', None)
if oslo_messaging_flags:
ctxt['oslo_messaging_flags'] = config_flags_parser(
oslo_messaging_flags)
if not self.complete:
return {}
return ctxt
class CephContext(OSContextGenerator):
"""Generates context for /etc/ceph/ceph.conf templates."""
interfaces = ['ceph']
def __call__(self):
if not relation_ids('ceph'):
return {}
log('Generating template context for ceph', level=DEBUG)
mon_hosts = []
ctxt = {
'use_syslog': str(config('use-syslog')).lower()
}
for rid in relation_ids('ceph'):
for unit in related_units(rid):
if not ctxt.get('auth'):
ctxt['auth'] = relation_get('auth', rid=rid, unit=unit)
if not ctxt.get('key'):
ctxt['key'] = relation_get('key', rid=rid, unit=unit)
ceph_addrs = relation_get('ceph-public-address', rid=rid,
unit=unit)
if ceph_addrs:
for addr in ceph_addrs.split(' '):
mon_hosts.append(format_ipv6_addr(addr) or addr)
else:
priv_addr = relation_get('private-address', rid=rid,
unit=unit)
mon_hosts.append(format_ipv6_addr(priv_addr) or priv_addr)
ctxt['mon_hosts'] = ' '.join(sorted(mon_hosts))
if not os.path.isdir('/etc/ceph'):
os.mkdir('/etc/ceph')
if not self.context_complete(ctxt):
return {}
ensure_packages(['ceph-common'])
return ctxt
class HAProxyContext(OSContextGenerator):
"""Provides half a context for the haproxy template, which describes
all peers to be included in the cluster. Each charm needs to include
its own context generator that describes the port mapping.
:side effect: mkdir is called on HAPROXY_RUN_DIR
"""
interfaces = ['cluster']
def __init__(self, singlenode_mode=False):
self.singlenode_mode = singlenode_mode
def __call__(self):
if not os.path.isdir(HAPROXY_RUN_DIR):
mkdir(path=HAPROXY_RUN_DIR)
if not relation_ids('cluster') and not self.singlenode_mode:
return {}
if config('prefer-ipv6'):
addr = get_ipv6_addr(exc_list=[config('vip')])[0]
else:
addr = get_host_ip(unit_get('private-address'))
l_unit = local_unit().replace('/', '-')
cluster_hosts = {}
# NOTE(jamespage): build out map of configured network endpoints
# and associated backends
for addr_type in ADDRESS_TYPES:
cfg_opt = 'os-{}-network'.format(addr_type)
laddr = get_address_in_network(config(cfg_opt))
if laddr:
netmask = get_netmask_for_address(laddr)
cluster_hosts[laddr] = {'network': "{}/{}".format(laddr,
netmask),
'backends': {l_unit: laddr}}
for rid in relation_ids('cluster'):
for unit in related_units(rid):
_laddr = relation_get('{}-address'.format(addr_type),
rid=rid, unit=unit)
if _laddr:
_unit = unit.replace('/', '-')
cluster_hosts[laddr]['backends'][_unit] = _laddr
# NOTE(jamespage) add backend based on private address - this
# with either be the only backend or the fallback if no acls
# match in the frontend
cluster_hosts[addr] = {}
netmask = get_netmask_for_address(addr)
cluster_hosts[addr] = {'network': "{}/{}".format(addr, netmask),
'backends': {l_unit: addr}}
for rid in relation_ids('cluster'):
for unit in related_units(rid):
_laddr = relation_get('private-address',
rid=rid, unit=unit)
if _laddr:
_unit = unit.replace('/', '-')
cluster_hosts[addr]['backends'][_unit] = _laddr
ctxt = {
'frontends': cluster_hosts,
'default_backend': addr
}
if config('haproxy-server-timeout'):
ctxt['haproxy_server_timeout'] = config('haproxy-server-timeout')
if config('haproxy-client-timeout'):
ctxt['haproxy_client_timeout'] = config('haproxy-client-timeout')
if config('haproxy-queue-timeout'):
ctxt['haproxy_queue_timeout'] = config('haproxy-queue-timeout')
if config('haproxy-connect-timeout'):
ctxt['haproxy_connect_timeout'] = config('haproxy-connect-timeout')
if config('prefer-ipv6'):
ctxt['ipv6'] = True
ctxt['local_host'] = 'ip6-localhost'
ctxt['haproxy_host'] = '::'
else:
ctxt['local_host'] = '127.0.0.1'
ctxt['haproxy_host'] = '0.0.0.0'
ctxt['stat_port'] = '8888'
db = kv()
ctxt['stat_password'] = db.get('stat-password')
if not ctxt['stat_password']:
ctxt['stat_password'] = db.set('stat-password',
pwgen(32))
db.flush()
for frontend in cluster_hosts:
if (len(cluster_hosts[frontend]['backends']) > 1 or
self.singlenode_mode):
# Enable haproxy when we have enough peers.
log('Ensuring haproxy enabled in /etc/default/haproxy.',
level=DEBUG)
with open('/etc/default/haproxy', 'w') as out:
out.write('ENABLED=1\n')
return ctxt
log('HAProxy context is incomplete, this unit has no peers.',
level=INFO)
return {}
class ImageServiceContext(OSContextGenerator):
interfaces = ['image-service']
def __call__(self):
"""Obtains the glance API server from the image-service relation.
Useful in nova and cinder (currently).
"""
log('Generating template context for image-service.', level=DEBUG)
rids = relation_ids('image-service')
if not rids:
return {}
for rid in rids:
for unit in related_units(rid):
api_server = relation_get('glance-api-server',
rid=rid, unit=unit)
if api_server:
return {'glance_api_servers': api_server}
log("ImageService context is incomplete. Missing required relation "
"data.", level=INFO)
return {}
class ApacheSSLContext(OSContextGenerator):
"""Generates a context for an apache vhost configuration that configures
HTTPS reverse proxying for one or many endpoints. Generated context
looks something like::
{
'namespace': 'cinder',
'private_address': 'iscsi.mycinderhost.com',
'endpoints': [(8776, 8766), (8777, 8767)]
}
The endpoints list consists of a tuples mapping external ports
to internal ports.
"""
interfaces = ['https']
# charms should inherit this context and set external ports
# and service namespace accordingly.
external_ports = []
service_namespace = None
def enable_modules(self):
cmd = ['a2enmod', 'ssl', 'proxy', 'proxy_http', 'headers']
check_call(cmd)
def configure_cert(self, cn=None):
ssl_dir = os.path.join('/etc/apache2/ssl/', self.service_namespace)
mkdir(path=ssl_dir)
cert, key = get_cert(cn)
if cn:
cert_filename = 'cert_{}'.format(cn)
key_filename = 'key_{}'.format(cn)
else:
cert_filename = 'cert'
key_filename = 'key'
write_file(path=os.path.join(ssl_dir, cert_filename),
content=b64decode(cert))
write_file(path=os.path.join(ssl_dir, key_filename),
content=b64decode(key))
def configure_ca(self):
ca_cert = get_ca_cert()
if ca_cert:
install_ca_cert(b64decode(ca_cert))
def canonical_names(self):
"""Figure out which canonical names clients will access this service.
"""
cns = []
for r_id in relation_ids('identity-service'):
for unit in related_units(r_id):
rdata = relation_get(rid=r_id, unit=unit)
for k in rdata:
if k.startswith('ssl_key_'):
cns.append(k.lstrip('ssl_key_'))
return sorted(list(set(cns)))
def get_network_addresses(self):
"""For each network configured, return corresponding address and vip
(if available).
Returns a list of tuples of the form:
[(address_in_net_a, vip_in_net_a),
(address_in_net_b, vip_in_net_b),
...]
or, if no vip(s) available:
[(address_in_net_a, address_in_net_a),
(address_in_net_b, address_in_net_b),
...]
"""
addresses = []
if config('vip'):
vips = config('vip').split()
else:
vips = []
for net_type in ['os-internal-network', 'os-admin-network',
'os-public-network']:
addr = get_address_in_network(config(net_type),
unit_get('private-address'))
if len(vips) > 1 and is_clustered():
if not config(net_type):
log("Multiple networks configured but net_type "
"is None (%s)." % net_type, level=WARNING)
continue
for vip in vips:
if is_address_in_network(config(net_type), vip):
addresses.append((addr, vip))
break
elif is_clustered() and config('vip'):
addresses.append((addr, config('vip')))
else:
addresses.append((addr, addr))
return sorted(addresses)
def __call__(self):
if isinstance(self.external_ports, six.string_types):
self.external_ports = [self.external_ports]
if not self.external_ports or not https():
return {}
self.configure_ca()
self.enable_modules()
ctxt = {'namespace': self.service_namespace,
'endpoints': [],
'ext_ports': []}
cns = self.canonical_names()
if cns:
for cn in cns:
self.configure_cert(cn)
else:
# Expect cert/key provided in config (currently assumed that ca
# uses ip for cn)
cn = resolve_address(endpoint_type=INTERNAL)
self.configure_cert(cn)
addresses = self.get_network_addresses()
for address, endpoint in sorted(set(addresses)):
for api_port in self.external_ports:
ext_port = determine_apache_port(api_port,
singlenode_mode=True)
int_port = determine_api_port(api_port, singlenode_mode=True)
portmap = (address, endpoint, int(ext_port), int(int_port))
ctxt['endpoints'].append(portmap)
ctxt['ext_ports'].append(int(ext_port))
ctxt['ext_ports'] = sorted(list(set(ctxt['ext_ports'])))
return ctxt
class NeutronContext(OSContextGenerator):
interfaces = []
@property
def plugin(self):
return None
@property
def network_manager(self):
return None
@property
def packages(self):
return neutron_plugin_attribute(self.plugin, 'packages',
self.network_manager)
@property
def neutron_security_groups(self):
return None
def _ensure_packages(self):
for pkgs in self.packages:
ensure_packages(pkgs)
def _save_flag_file(self):
if self.network_manager == 'quantum':
_file = '/etc/nova/quantum_plugin.conf'
else:
_file = '/etc/nova/neutron_plugin.conf'
with open(_file, 'wb') as out:
out.write(self.plugin + '\n')
def ovs_ctxt(self):
driver = neutron_plugin_attribute(self.plugin, 'driver',
self.network_manager)
config = neutron_plugin_attribute(self.plugin, 'config',
self.network_manager)
ovs_ctxt = {'core_plugin': driver,
'neutron_plugin': 'ovs',
'neutron_security_groups': self.neutron_security_groups,
'local_ip': unit_private_ip(),
'config': config}
return ovs_ctxt
def nuage_ctxt(self):
driver = neutron_plugin_attribute(self.plugin, 'driver',
self.network_manager)
config = neutron_plugin_attribute(self.plugin, 'config',
self.network_manager)
nuage_ctxt = {'core_plugin': driver,
'neutron_plugin': 'vsp',
'neutron_security_groups': self.neutron_security_groups,
'local_ip': unit_private_ip(),
'config': config}
return nuage_ctxt
def nvp_ctxt(self):
driver = neutron_plugin_attribute(self.plugin, 'driver',
self.network_manager)
config = neutron_plugin_attribute(self.plugin, 'config',
self.network_manager)
nvp_ctxt = {'core_plugin': driver,
'neutron_plugin': 'nvp',
'neutron_security_groups': self.neutron_security_groups,
'local_ip': unit_private_ip(),
'config': config}
return nvp_ctxt
def n1kv_ctxt(self):
driver = neutron_plugin_attribute(self.plugin, 'driver',
self.network_manager)
n1kv_config = neutron_plugin_attribute(self.plugin, 'config',
self.network_manager)
n1kv_user_config_flags = config('n1kv-config-flags')
restrict_policy_profiles = config('n1kv-restrict-policy-profiles')
n1kv_ctxt = {'core_plugin': driver,
'neutron_plugin': 'n1kv',
'neutron_security_groups': self.neutron_security_groups,
'local_ip': unit_private_ip(),
'config': n1kv_config,
'vsm_ip': config('n1kv-vsm-ip'),
'vsm_username': config('n1kv-vsm-username'),
'vsm_password': config('n1kv-vsm-password'),
'restrict_policy_profiles': restrict_policy_profiles}
if n1kv_user_config_flags:
flags = config_flags_parser(n1kv_user_config_flags)
n1kv_ctxt['user_config_flags'] = flags
return n1kv_ctxt
def calico_ctxt(self):
driver = neutron_plugin_attribute(self.plugin, 'driver',
self.network_manager)
config = neutron_plugin_attribute(self.plugin, 'config',
self.network_manager)
calico_ctxt = {'core_plugin': driver,
'neutron_plugin': 'Calico',
'neutron_security_groups': self.neutron_security_groups,
'local_ip': unit_private_ip(),
'config': config}
return calico_ctxt
def neutron_ctxt(self):
if https():
proto = 'https'
else:
proto = 'http'
if is_clustered():
host = config('vip')
else:
host = unit_get('private-address')
ctxt = {'network_manager': self.network_manager,
'neutron_url': '%s://%s:%s' % (proto, host, '9696')}
return ctxt
def pg_ctxt(self):
driver = neutron_plugin_attribute(self.plugin, 'driver',
self.network_manager)
config = neutron_plugin_attribute(self.plugin, 'config',
self.network_manager)
ovs_ctxt = {'core_plugin': driver,
'neutron_plugin': 'plumgrid',
'neutron_security_groups': self.neutron_security_groups,
'local_ip': unit_private_ip(),
'config': config}
return ovs_ctxt
def midonet_ctxt(self):
driver = neutron_plugin_attribute(self.plugin, 'driver',
self.network_manager)
midonet_config = neutron_plugin_attribute(self.plugin, 'config',
self.network_manager)
mido_ctxt = {'core_plugin': driver,
'neutron_plugin': 'midonet',
'neutron_security_groups': self.neutron_security_groups,
'local_ip': unit_private_ip(),
'config': midonet_config}
return mido_ctxt
def __call__(self):
if self.network_manager not in ['quantum', 'neutron']:
return {}
if not self.plugin:
return {}
ctxt = self.neutron_ctxt()
if self.plugin == 'ovs':
ctxt.update(self.ovs_ctxt())
elif self.plugin in ['nvp', 'nsx']:
ctxt.update(self.nvp_ctxt())
elif self.plugin == 'n1kv':
ctxt.update(self.n1kv_ctxt())
elif self.plugin == 'Calico':
ctxt.update(self.calico_ctxt())
elif self.plugin == 'vsp':
ctxt.update(self.nuage_ctxt())
elif self.plugin == 'plumgrid':
ctxt.update(self.pg_ctxt())
elif self.plugin == 'midonet':
ctxt.update(self.midonet_ctxt())
alchemy_flags = config('neutron-alchemy-flags')
if alchemy_flags:
flags = config_flags_parser(alchemy_flags)
ctxt['neutron_alchemy_flags'] = flags
self._save_flag_file()
return ctxt
class NeutronPortContext(OSContextGenerator):
def resolve_ports(self, ports):
"""Resolve NICs not yet bound to bridge(s)
If hwaddress provided then returns resolved hwaddress otherwise NIC.
"""
if not ports:
return None
hwaddr_to_nic = {}
hwaddr_to_ip = {}
for nic in list_nics():
# Ignore virtual interfaces (bond masters will be identified from
# their slaves)
if not is_phy_iface(nic):
continue
_nic = get_bond_master(nic)
if _nic:
log("Replacing iface '%s' with bond master '%s'" % (nic, _nic),
level=DEBUG)
nic = _nic
hwaddr = get_nic_hwaddr(nic)
hwaddr_to_nic[hwaddr] = nic
addresses = get_ipv4_addr(nic, fatal=False)
addresses += get_ipv6_addr(iface=nic, fatal=False)
hwaddr_to_ip[hwaddr] = addresses
resolved = []
mac_regex = re.compile(r'([0-9A-F]{2}[:-]){5}([0-9A-F]{2})', re.I)
for entry in ports:
if re.match(mac_regex, entry):
# NIC is in known NICs and does NOT hace an IP address
if entry in hwaddr_to_nic and not hwaddr_to_ip[entry]:
# If the nic is part of a bridge then don't use it
if is_bridge_member(hwaddr_to_nic[entry]):
continue
# Entry is a MAC address for a valid interface that doesn't
# have an IP address assigned yet.
resolved.append(hwaddr_to_nic[entry])
else:
# If the passed entry is not a MAC address, assume it's a valid
# interface, and that the user put it there on purpose (we can
# trust it to be the real external network).
resolved.append(entry)
# Ensure no duplicates
return list(set(resolved))
class OSConfigFlagContext(OSContextGenerator):
"""Provides support for user-defined config flags.
Users can define a comma-seperated list of key=value pairs
in the charm configuration and apply them at any point in
any file by using a template flag.
Sometimes users might want config flags inserted within a
specific section so this class allows users to specify the
template flag name, allowing for multiple template flags
(sections) within the same context.
NOTE: the value of config-flags may be a comma-separated list of
key=value pairs and some Openstack config files support
comma-separated lists as values.
"""
def __init__(self, charm_flag='config-flags',
template_flag='user_config_flags'):
"""
:param charm_flag: config flags in charm configuration.
:param template_flag: insert point for user-defined flags in template
file.
"""
super(OSConfigFlagContext, self).__init__()
self._charm_flag = charm_flag
self._template_flag = template_flag
def __call__(self):
config_flags = config(self._charm_flag)
if not config_flags:
return {}
return {self._template_flag:
config_flags_parser(config_flags)}
class LibvirtConfigFlagsContext(OSContextGenerator):
"""
This context provides support for extending
the libvirt section through user-defined flags.
"""
def __call__(self):
ctxt = {}
libvirt_flags = config('libvirt-flags')
if libvirt_flags:
ctxt['libvirt_flags'] = config_flags_parser(
libvirt_flags)
return ctxt
class SubordinateConfigContext(OSContextGenerator):
"""
Responsible for inspecting relations to subordinates that
may be exporting required config via a json blob.
The subordinate interface allows subordinates to export their
configuration requirements to the principle for multiple config
files and multiple serivces. Ie, a subordinate that has interfaces
to both glance and nova may export to following yaml blob as json::
glance:
/etc/glance/glance-api.conf:
sections:
DEFAULT:
- [key1, value1]
/etc/glance/glance-registry.conf:
MYSECTION:
- [key2, value2]
nova:
/etc/nova/nova.conf:
sections:
DEFAULT:
- [key3, value3]
It is then up to the principle charms to subscribe this context to
the service+config file it is interestd in. Configuration data will
be available in the template context, in glance's case, as::
ctxt = {
... other context ...
'subordinate_configuration': {
'DEFAULT': {
'key1': 'value1',
},
'MYSECTION': {
'key2': 'value2',
},
}
}
"""
def __init__(self, service, config_file, interface):
"""
:param service : Service name key to query in any subordinate
data found
:param config_file : Service's config file to query sections
:param interface : Subordinate interface to inspect
"""
self.config_file = config_file
if isinstance(service, list):
self.services = service
else:
self.services = [service]
if isinstance(interface, list):
self.interfaces = interface
else:
self.interfaces = [interface]
def __call__(self):
ctxt = {'sections': {}}
rids = []
for interface in self.interfaces:
rids.extend(relation_ids(interface))
for rid in rids:
for unit in related_units(rid):
sub_config = relation_get('subordinate_configuration',
rid=rid, unit=unit)
if sub_config and sub_config != '':
try:
sub_config = json.loads(sub_config)
except:
log('Could not parse JSON from '
'subordinate_configuration setting from %s'
% rid, level=ERROR)
continue
for service in self.services:
if service not in sub_config:
log('Found subordinate_configuration on %s but it '
'contained nothing for %s service'
% (rid, service), level=INFO)
continue
sub_config = sub_config[service]
if self.config_file not in sub_config:
log('Found subordinate_configuration on %s but it '
'contained nothing for %s'
% (rid, self.config_file), level=INFO)
continue
sub_config = sub_config[self.config_file]
for k, v in six.iteritems(sub_config):
if k == 'sections':
for section, config_list in six.iteritems(v):
log("adding section '%s'" % (section),
level=DEBUG)
if ctxt[k].get(section):
ctxt[k][section].extend(config_list)
else:
ctxt[k][section] = config_list
else:
ctxt[k] = v
log("%d section(s) found" % (len(ctxt['sections'])), level=DEBUG)
return ctxt
class LogLevelContext(OSContextGenerator):
def __call__(self):
ctxt = {}
ctxt['debug'] = \
False if config('debug') is None else config('debug')
ctxt['verbose'] = \
False if config('verbose') is None else config('verbose')
return ctxt
class SyslogContext(OSContextGenerator):
def __call__(self):
ctxt = {'use_syslog': config('use-syslog')}
return ctxt
class BindHostContext(OSContextGenerator):
def __call__(self):
if config('prefer-ipv6'):
return {'bind_host': '::'}
else:
return {'bind_host': '0.0.0.0'}
MAX_DEFAULT_WORKERS = 4
DEFAULT_MULTIPLIER = 2
def _calculate_workers():
'''
Determine the number of worker processes based on the CPU
count of the unit containing the application.
Workers will be limited to MAX_DEFAULT_WORKERS in
container environments where no worker-multipler configuration
option been set.
@returns int: number of worker processes to use
'''
multiplier = config('worker-multiplier') or DEFAULT_MULTIPLIER
count = int(_num_cpus() * multiplier)
if multiplier > 0 and count == 0:
count = 1
if config('worker-multiplier') is None and is_container():
# NOTE(jamespage): Limit unconfigured worker-multiplier
# to MAX_DEFAULT_WORKERS to avoid insane
# worker configuration in LXD containers
# on large servers
# Reference: https://pad.lv/1665270
count = min(count, MAX_DEFAULT_WORKERS)
return count
def _num_cpus():
'''
Compatibility wrapper for calculating the number of CPU's
a unit has.
@returns: int: number of CPU cores detected
'''
try:
return psutil.cpu_count()
except AttributeError:
return psutil.NUM_CPUS
class WorkerConfigContext(OSContextGenerator):
def __call__(self):
ctxt = {"workers": _calculate_workers()}
return ctxt
class WSGIWorkerConfigContext(WorkerConfigContext):
def __init__(self, name=None, script=None, admin_script=None,
public_script=None, process_weight=1.00,
admin_process_weight=0.25, public_process_weight=0.75):
self.service_name = name
self.user = name
self.group = name
self.script = script
self.admin_script = admin_script
self.public_script = public_script
self.process_weight = process_weight
self.admin_process_weight = admin_process_weight
self.public_process_weight = public_process_weight
def __call__(self):
total_processes = _calculate_workers()
ctxt = {
"service_name": self.service_name,
"user": self.user,
"group": self.group,
"script": self.script,
"admin_script": self.admin_script,
"public_script": self.public_script,
"processes": int(math.ceil(self.process_weight * total_processes)),
"admin_processes": int(math.ceil(self.admin_process_weight *
total_processes)),
"public_processes": int(math.ceil(self.public_process_weight *
total_processes)),
"threads": 1,
"usr_bin": git_determine_usr_bin(),
"python_path": git_determine_python_path(),
}
return ctxt
class ZeroMQContext(OSContextGenerator):
interfaces = ['zeromq-configuration']
def __call__(self):
ctxt = {}
if is_relation_made('zeromq-configuration', 'host'):
for rid in relation_ids('zeromq-configuration'):
for unit in related_units(rid):
ctxt['zmq_nonce'] = relation_get('nonce', unit, rid)
ctxt['zmq_host'] = relation_get('host', unit, rid)
ctxt['zmq_redis_address'] = relation_get(
'zmq_redis_address', unit, rid)
return ctxt
class NotificationDriverContext(OSContextGenerator):
def __init__(self, zmq_relation='zeromq-configuration',
amqp_relation='amqp'):
"""
:param zmq_relation: Name of Zeromq relation to check
"""
self.zmq_relation = zmq_relation
self.amqp_relation = amqp_relation
def __call__(self):
ctxt = {'notifications': 'False'}
if is_relation_made(self.amqp_relation):
ctxt['notifications'] = "True"
return ctxt
class SysctlContext(OSContextGenerator):
"""This context check if the 'sysctl' option exists on configuration
then creates a file with the loaded contents"""
def __call__(self):
sysctl_dict = config('sysctl')
if sysctl_dict:
sysctl_create(sysctl_dict,
'/etc/sysctl.d/50-{0}.conf'.format(charm_name()))
return {'sysctl': sysctl_dict}
class NeutronAPIContext(OSContextGenerator):
'''
Inspects current neutron-plugin-api relation for neutron settings. Return
defaults if it is not present.
'''
interfaces = ['neutron-plugin-api']
def __call__(self):
self.neutron_defaults = {
'l2_population': {
'rel_key': 'l2-population',
'default': False,
},
'overlay_network_type': {
'rel_key': 'overlay-network-type',
'default': 'gre',
},
'neutron_security_groups': {
'rel_key': 'neutron-security-groups',
'default': False,
},
'network_device_mtu': {
'rel_key': 'network-device-mtu',
'default': None,
},
'enable_dvr': {
'rel_key': 'enable-dvr',
'default': False,
},
'enable_l3ha': {
'rel_key': 'enable-l3ha',
'default': False,
},
'dns_domain': {
'rel_key': 'dns-domain',
'default': None,
},
}
ctxt = self.get_neutron_options({})
for rid in relation_ids('neutron-plugin-api'):
for unit in related_units(rid):
rdata = relation_get(rid=rid, unit=unit)
if 'l2-population' in rdata:
ctxt.update(self.get_neutron_options(rdata))
return ctxt
def get_neutron_options(self, rdata):
settings = {}
for nkey in self.neutron_defaults.keys():
defv = self.neutron_defaults[nkey]['default']
rkey = self.neutron_defaults[nkey]['rel_key']
if rkey in rdata.keys():
if type(defv) is bool:
settings[nkey] = bool_from_string(rdata[rkey])
else:
settings[nkey] = rdata[rkey]
else:
settings[nkey] = defv
return settings
class ExternalPortContext(NeutronPortContext):
def __call__(self):
ctxt = {}
ports = config('ext-port')
if ports:
ports = [p.strip() for p in ports.split()]
ports = self.resolve_ports(ports)
if ports:
ctxt = {"ext_port": ports[0]}
napi_settings = NeutronAPIContext()()
mtu = napi_settings.get('network_device_mtu')
if mtu:
ctxt['ext_port_mtu'] = mtu
return ctxt
class DataPortContext(NeutronPortContext):
def __call__(self):
ports = config('data-port')
if ports:
# Map of {port/mac:bridge}
portmap = parse_data_port_mappings(ports)
ports = portmap.keys()
# Resolve provided ports or mac addresses and filter out those
# already attached to a bridge.
resolved = self.resolve_ports(ports)
# FIXME: is this necessary?
normalized = {get_nic_hwaddr(port): port for port in resolved
if port not in ports}
normalized.update({port: port for port in resolved
if port in ports})
if resolved:
return {normalized[port]: bridge for port, bridge in
six.iteritems(portmap) if port in normalized.keys()}
return None
class PhyNICMTUContext(DataPortContext):
def __call__(self):
ctxt = {}
mappings = super(PhyNICMTUContext, self).__call__()
if mappings and mappings.keys():
ports = sorted(mappings.keys())
napi_settings = NeutronAPIContext()()
mtu = napi_settings.get('network_device_mtu')
all_ports = set()
# If any of ports is a vlan device, its underlying device must have
# mtu applied first.
for port in ports:
for lport in glob.glob("/sys/class/net/%s/lower_*" % port):
lport = os.path.basename(lport)
all_ports.add(lport.split('_')[1])
all_ports = list(all_ports)
all_ports.extend(ports)
if mtu:
ctxt["devs"] = '\\n'.join(all_ports)
ctxt['mtu'] = mtu
return ctxt
class NetworkServiceContext(OSContextGenerator):
def __init__(self, rel_name='quantum-network-service'):
self.rel_name = rel_name
self.interfaces = [rel_name]
def __call__(self):
for rid in relation_ids(self.rel_name):
for unit in related_units(rid):
rdata = relation_get(rid=rid, unit=unit)
ctxt = {
'keystone_host': rdata.get('keystone_host'),
'service_port': rdata.get('service_port'),
'auth_port': rdata.get('auth_port'),
'service_tenant': rdata.get('service_tenant'),
'service_username': rdata.get('service_username'),
'service_password': rdata.get('service_password'),
'quantum_host': rdata.get('quantum_host'),
'quantum_port': rdata.get('quantum_port'),
'quantum_url': rdata.get('quantum_url'),
'region': rdata.get('region'),
'service_protocol':
rdata.get('service_protocol') or 'http',
'auth_protocol':
rdata.get('auth_protocol') or 'http',
'api_version':
rdata.get('api_version') or '2.0',
}
if self.context_complete(ctxt):
return ctxt
return {}
class InternalEndpointContext(OSContextGenerator):
"""Internal endpoint context.
This context provides the endpoint type used for communication between
services e.g. between Nova and Cinder internally. Openstack uses Public
endpoints by default so this allows admins to optionally use internal
endpoints.
"""
def __call__(self):
return {'use_internal_endpoints': config('use-internal-endpoints')}
class AppArmorContext(OSContextGenerator):
"""Base class for apparmor contexts."""
def __init__(self, profile_name=None):
self._ctxt = None
self.aa_profile = profile_name
self.aa_utils_packages = ['apparmor-utils']
@property
def ctxt(self):
if self._ctxt is not None:
return self._ctxt
self._ctxt = self._determine_ctxt()
return self._ctxt
def _determine_ctxt(self):
"""
Validate aa-profile-mode settings is disable, enforce, or complain.
:return ctxt: Dictionary of the apparmor profile or None
"""
if config('aa-profile-mode') in ['disable', 'enforce', 'complain']:
ctxt = {'aa_profile_mode': config('aa-profile-mode'),
'ubuntu_release': lsb_release()['DISTRIB_RELEASE']}
if self.aa_profile:
ctxt['aa_profile'] = self.aa_profile
else:
ctxt = None
return ctxt
def __call__(self):
return self.ctxt
def install_aa_utils(self):
"""
Install packages required for apparmor configuration.
"""
log("Installing apparmor utils.")
ensure_packages(self.aa_utils_packages)
def manually_disable_aa_profile(self):
"""
Manually disable an apparmor profile.
If aa-profile-mode is set to disabled (default) this is required as the
template has been written but apparmor is yet unaware of the profile
and aa-disable aa-profile fails. Without this the profile would kick
into enforce mode on the next service restart.
"""
profile_path = '/etc/apparmor.d'
disable_path = '/etc/apparmor.d/disable'
if not os.path.lexists(os.path.join(disable_path, self.aa_profile)):
os.symlink(os.path.join(profile_path, self.aa_profile),
os.path.join(disable_path, self.aa_profile))
def setup_aa_profile(self):
"""
Setup an apparmor profile.
The ctxt dictionary will contain the apparmor profile mode and
the apparmor profile name.
Makes calls out to aa-disable, aa-complain, or aa-enforce to setup
the apparmor profile.
"""
self()
if not self.ctxt:
log("Not enabling apparmor Profile")
return
self.install_aa_utils()
cmd = ['aa-{}'.format(self.ctxt['aa_profile_mode'])]
cmd.append(self.ctxt['aa_profile'])
log("Setting up the apparmor profile for {} in {} mode."
"".format(self.ctxt['aa_profile'], self.ctxt['aa_profile_mode']))
try:
check_call(cmd)
except CalledProcessError as e:
# If aa-profile-mode is set to disabled (default) manual
# disabling is required as the template has been written but
# apparmor is yet unaware of the profile and aa-disable aa-profile
# fails. If aa-disable learns to read profile files first this can
# be removed.
if self.ctxt['aa_profile_mode'] == 'disable':
log("Manually disabling the apparmor profile for {}."
"".format(self.ctxt['aa_profile']))
self.manually_disable_aa_profile()
return
status_set('blocked', "Apparmor profile {} failed to be set to {}."
"".format(self.ctxt['aa_profile'],
self.ctxt['aa_profile_mode']))
raise e
class MemcacheContext(OSContextGenerator):
"""Memcache context
This context provides options for configuring a local memcache client and
server for both IPv4 and IPv6
"""
def __init__(self, package=None):
"""
@param package: Package to examine to extrapolate OpenStack release.
Used when charms have no openstack-origin config
option (ie subordinates)
"""
self.package = package
def __call__(self):
ctxt = {}
ctxt['use_memcache'] = enable_memcache(package=self.package)
if ctxt['use_memcache']:
# Trusty version of memcached does not support ::1 as a listen
# address so use host file entry instead
release = lsb_release()['DISTRIB_CODENAME'].lower()
if is_ipv6_disabled():
if CompareHostReleases(release) > 'trusty':
ctxt['memcache_server'] = '127.0.0.1'
else:
ctxt['memcache_server'] = 'localhost'
ctxt['memcache_server_formatted'] = '127.0.0.1'
ctxt['memcache_port'] = '11211'
ctxt['memcache_url'] = '{}:{}'.format(
ctxt['memcache_server_formatted'],
ctxt['memcache_port'])
else:
if CompareHostReleases(release) > 'trusty':
ctxt['memcache_server'] = '::1'
else:
ctxt['memcache_server'] = 'ip6-localhost'
ctxt['memcache_server_formatted'] = '[::1]'
ctxt['memcache_port'] = '11211'
ctxt['memcache_url'] = 'inet6:{}:{}'.format(
ctxt['memcache_server_formatted'],
ctxt['memcache_port'])
return ctxt
|
{
"content_hash": "0911f9a312faec1066911115454f22cf",
"timestamp": "",
"source": "github",
"line_count": 1657,
"max_line_length": 79,
"avg_line_length": 35.88292094146047,
"alnum_prop": 0.5290288943455885,
"repo_name": "konono/equlipse",
"id": "ea93159d8632449ff908baa0bd48cd3ed21b518a",
"size": "60043",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstack-install/charm/trusty/charm-keystone/charmhelpers/contrib/openstack/context.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "589"
},
{
"name": "Python",
"bytes": "1281308"
},
{
"name": "Shell",
"bytes": "12344"
}
],
"symlink_target": ""
}
|
import calendar
import requests
import xmltodict
import logging
import module.collector.config as config
import module.common.const as const
from datetime import datetime
from module.common.topologydb import *
from module.common.md import post_md
from module.common.sdn_md import create_monitoring_data_xml,DBUploader
from module.common.util import to_array
# constants
COL_NAME = 'monitoring-data(sdn)'
POST_URI = config.post_uri + "/" + const.TYPE_MON_SDN
PS_MSG_TEMPLATE = """
<SOAP-ENV:Envelope
xmlns:SOAP-ENC="http://schemas.xmlsoap.org/soap/encoding/"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/">
<SOAP-ENV:Header/>
<SOAP-ENV:Body>
</SOAP-ENV:Body>
</SOAP-ENV:Envelope>
"""
# logger.
logger = logging.getLogger(const.MODULE_NAME_COL)
# set data uploader script file path.
db_upld = DBUploader(log_name=const.MODULE_NAME_COL,
db_name=config.mon_data_sdn_db,
db_host=config.db_addr,db_port=config.db_port,
db_user=config.db_user,db_pass=config.db_pass)
class MonitoringDataSDN():
# befor monitoring timesamp(stime)
before_time = calendar.timegm(datetime.utcnow().timetuple())
def __create_ps_msg(self,nmwg_msg):
msg_template = PS_MSG_TEMPLATE
xd_root = xmltodict.parse(msg_template)
xd_msg = xmltodict.parse(nmwg_msg)
xd_root['SOAP-ENV:Envelope']['SOAP-ENV:Body'] = xd_msg
return xmltodict.unparse(xd_root)
def __post_sequel_service(self,msg):
logger.debug(msg)
post_uri = config.sequel_service_uri + "/snmpmg"
headers = {'SOAPAction': 'http://ggf.org/ns/nmwg/base/2.0/message/'}
return requests.post(post_uri, data=msg, headers=headers)
def __check_ps_res(self,req_name,msg):
logger.debug(msg)
xd_root = xmltodict.parse(msg)
xd_body = xd_root['SOAP-ENV:Envelope']['SOAP-ENV:Body']
msg_type = xd_body['nmwg:message']['@type']
if not msg_type == 'ErrorResponse' and not msg_type == 'QueryResponse':
return True
if not xd_body['nmwg:message'].has_key('nmwg:metadata'):
return True
event_type = xd_body['nmwg:message']['nmwg:metadata']['nmwg:eventType']
if 'error' in event_type.lower():
logger.error('{0} error.({1})'.format(req_name,xd_body['nmwg:message']['nmwg:data']['nmwgr:datum']['#text']))
return False
return True
def __get_tblsaffix(self,stime,etime):
logger.debug("get table-saffix({0}-{1}).".format(stime,etime))
table_saffix_msg = """
<nmwg:message
xmlns:nmwgt="http://ggf.org/ns/nmwg/topology/2.0/"
xmlns:nmwg="http://ggf.org/ns/nmwg/base/2.0/"
xmlns:select="http://ggf.org/ns/nmwg/ops/select/2.0/"
type="TableSuffixRequest">
<nmwg:metadata id="m1">
<nmwg:eventType>http://ggf.org/ns/nmwg/sequel/20090610</nmwg:eventType>
<nmwg:parameter name="stime">{0}</nmwg:parameter>
<nmwg:parameter name="etime">{1}</nmwg:parameter>
</nmwg:metadata>
<nmwg:data metadataIdRef="m1"/>
</nmwg:message>
""".format(stime,etime)
msg = self.__create_ps_msg(table_saffix_msg)
logger.debug('request perfSONAR')
res = self.__post_sequel_service(msg)
logger.debug('response perfSONAR')
if self.__check_ps_res('TableSaffixRequest',res.text) == False:
logger.debug('TableSaffixRequest no resqonse text.')
return None
xd_root = xmltodict.parse(res.text)
xd_body = xd_root['SOAP-ENV:Envelope']['SOAP-ENV:Body']
if not xd_body['nmwg:message']['nmwg:data']:
logger.debug('TableSaffixRequest no data.')
return None
data_list = to_array(xd_body['nmwg:message']['nmwg:data']['sequel:datum'])
tblsaffix_list = []
for data in data_list:
tblsaffix_list.append(data['@value'])
return tblsaffix_list
def __get_monitoring_data(self,node_name,port,stime,etime,tblsaffix_list):
logger.debug("get monitoring-data.")
sql_metaid = "(SELECT metaID FROM metaData " \
+ "WHERE node_name='{0}' AND port='{1}')"\
.format(node_name,port)
# search database.(data)
sql_base = "(SELECT timestamp,{0} FROM {1} WHERE metaID={2}"\
+ " AND timestamp >= {0}".format(stime)\
+ " AND timestamp < {0})".format(etime)
mon_item_list = []
for mon_item in config.ps_sdn_mon_item_list:
mon_item_list.append(mon_item['data-name'])
if not mon_item_list:
logger.debug('monitoring-item is not specified.')
return None
items = ",".join(mon_item_list)
sql_list = []
for tblsaffix in tblsaffix_list:
sql_list.append(sql_base.format(items,'data_'+tblsaffix,sql_metaid))
if not sql_list:
logger.debug('sql_list no data.')
return None
sql = " UNION ".join(sql_list)
sql += " ORDER BY timestamp"
logger.debug(sql)
query_msg = """
<nmwg:message
xmlns:nmwgt="http://ggf.org/ns/nmwg/topology/2.0/"
xmlns:nmwg="http://ggf.org/ns/nmwg/base/2.0/"
xmlns:select="http://ggf.org/ns/nmwg/ops/select/2.0/"
type="QueryRequest">
<nmwg:metadata id="m1">
<nmwg:eventType>http://ggf.org/ns/nmwg/sequel/20090610</nmwg:eventType>
<nmwg:parameter name="key">timestamp</nmwg:parameter>
<nmwg:parameter name="query">{0}</nmwg:parameter>
</nmwg:metadata>
<nmwg:data metadataIdRef="m1"/>
</nmwg:message>
""".format(sql)
msg = self.__create_ps_msg(query_msg)
logger.debug('request perfSONAR')
res = self.__post_sequel_service(msg)
logger.debug('response perfSONAR')
if self.__check_ps_res('QueryRequest',res.text) == False:
logger.debug('QueryRequest no resqonse text.')
return None
xd_root = xmltodict.parse(res.text)
xd_body = xd_root['SOAP-ENV:Envelope']['SOAP-ENV:Body']
if not xd_body['nmwg:message']['nmwg:data']:
logger.debug('QueryRequest no data.')
return None
data_list = to_array(xd_body['nmwg:message']['nmwg:data']['nmwg:commonTime'])
val_list = list()
for data in data_list:
val_dict = dict()
datum_list = to_array(data['sequel:datum'])
for datum in datum_list:
if datum['@value']:
val_dict[datum['@name']] = datum['@value']
val_list.append(val_dict)
res_dict = {'node_name':node_name,'port':port,'val_list':val_list}
return res_dict
def __aggregate_avg(self,data_name,val_list):
### val_list:list(val_dict[param_name:value])}
total_val = 0
count = 0
timestamp = 0
for val in val_list:
if not val.has_key(data_name):
continue
total_val += int(val[data_name])
# a final time of timestamp
if timestamp < int(val['timestamp']):
timestamp = int(val['timestamp'])
count += 1
if count == 0:
return None,timestamp
agg_val = total_val / count
return agg_val,timestamp
def __aggregate_last(self,data_name,val_list):
### val_list:list(val_dict[param_name:value])}
last_val = 0
timestamp = 0
for val in val_list:
if not val.has_key(data_name):
continue
# a final time of timestamp
if timestamp < int(val['timestamp']):
last_val = val[data_name]
timestamp = int(val['timestamp'])
if timestamp == 0:
return None,timestamp
return last_val,timestamp
def __aggregate(self,md_dict,timestamp):
### md_dict={type:switch,network_name:xxx,node_name:xxx,
### port:xxx,val_list:list(val_dict[param_name:value])}
# The Aggregate for each item of monitoring data.
# Aggregate(average value or last value)
agg_val_list = []
agg_val_dict = dict()
for item_dict in config.ps_sdn_mon_item_list:
data_name = item_dict['data-name']
agg_type = item_dict['agg-type']
logger.debug('data_name={0} agg_type={1}'.format(data_name,agg_type))
if agg_type == const.TYPE_AGG_AVG:
# adding the value and increments the counter.
agg_val,agg_ts = self.__aggregate_avg(data_name,md_dict['val_list'])
elif agg_type == const.TYPE_AGG_LAST:
# Compare the time stamp, to hold the latest value.
agg_val,agg_ts = self.__aggregate_last(data_name,md_dict['val_list'])
else :
logger.warn('aggregate type is invalid.({0})'.format(agg_val))
continue
if agg_val is None:
logger.warn('aggregate value is null.')
continue
logger.debug('timestamp={0} agg_val={1}'.format(agg_ts,agg_val))
# Timestamp is common to all of the aggregate data
agg_val_dict['timestamp'] = str(agg_ts)
agg_val_dict[data_name] = str(agg_val)
# Store a list that Aggregate value exists only one (overwrite)
agg_val_list.append(agg_val_dict)
md_dict['val_list'] = agg_val_list
return md_dict
def main(self):
# now monitoring timesamp(etime)
now_time = 0
try:
print(COL_NAME + ' -start-')
logger.debug(COL_NAME + ' -start-')
# get now time.(UTC:0)
now_time = calendar.timegm(datetime.utcnow().timetuple())
# get monitoring-data from SequelService.
tblsaffix_list = self.__get_tblsaffix(self.before_time,now_time)
if not tblsaffix_list:
logger.debug('tblsaffix_list is no data.')
return
# get all of the switch-I/F(port) from DB.
if_list = get_all_sw_if()
all_md_list = []
for interface in if_list:
if interface.node.network.type == const.TYPE_NW_SLICE:
logger.debug('(skip)slice interface is not target.')
continue
# get monitoring-data from SequelService.
node_name = interface.node.node_name
port = interface.port
md_dict = self.__get_monitoring_data(node_name,port,self.before_time,now_time,tblsaffix_list)
if not md_dict:
logger.debug('monitoring-data is no data.(node={0},port={1})'.format(node_name,port))
continue
logger.debug(md_dict)
# aggregate the monitoring-data.
if config.aggregate_flg == 1:
logger.debug('aggregate the monitoring-data.')
md_dict = self.__aggregate(md_dict,now_time)
logger.debug(md_dict)
md_dict['type'] = interface.node.type
md_dict['network_name'] = interface.node.network_name
### md_dict={type:switch,network_name:xxx,node_name:xxx,
### port:xxx,val_list:list(val_dict[param_name:value])}
all_md_list.append(md_dict)
if not all_md_list:
logger.debug('monitoring-data is no data.(all interface)')
return
# parse monitoring-data-list to monitoring-data-xml.
md_xml = create_monitoring_data_xml(logger,all_md_list)
if not md_xml:
logger.debug('monitoring-data-xml is null.')
return
logger.debug(md_xml)
# upload monitoring-data to DB.
logger.debug('upload monitoring-data to DB.')
if not db_upld.upload_monitoring_data_all(md_xml):
logger.debug('upload monitoring-data is null.')
return
# post the monitoring-data to the master-monitoring-server.
logger.debug('post the monitoring-data to the master-monitoring-server.')
res_flg,res = post_md(POST_URI,md_xml,'yes')
if res_flg is False:
logger.error('post monitoring-data error.(post_uri={0})'.format(POST_URI))
if res:
logger.debug("HTTP Response({0}):{1}".format(res.status_code,res.text))
except Exception:
logger.exception(COL_NAME)
print(COL_NAME + ' -exception-')
finally:
self.before_time = now_time
logger.debug(COL_NAME + ' -end-')
print(COL_NAME + ' -end-')
return
|
{
"content_hash": "daf7b6218a3a8dfa491bc92f5ac963bd",
"timestamp": "",
"source": "github",
"line_count": 340,
"max_line_length": 121,
"avg_line_length": 38.726470588235294,
"alnum_prop": 0.5597326649958229,
"repo_name": "ict-felix/stack",
"id": "357532986b2bad179d94cb47295bbff4df370521",
"size": "13215",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mseu/module/collector/ps/sdn.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "337811"
},
{
"name": "Elixir",
"bytes": "17243"
},
{
"name": "Emacs Lisp",
"bytes": "1098"
},
{
"name": "Groff",
"bytes": "1735"
},
{
"name": "HTML",
"bytes": "660363"
},
{
"name": "Java",
"bytes": "18362"
},
{
"name": "JavaScript",
"bytes": "838960"
},
{
"name": "Makefile",
"bytes": "11581"
},
{
"name": "Perl",
"bytes": "5416"
},
{
"name": "Python",
"bytes": "8073455"
},
{
"name": "Shell",
"bytes": "259720"
}
],
"symlink_target": ""
}
|
"""
Classes, functions and utilties related to hyde layouts
"""
import os
from fswrap import File, Folder
HYDE_DATA = "HYDE_DATA"
LAYOUTS = "layouts"
class Layout(object):
"""
Represents a layout package
"""
@staticmethod
def find_layout(layout_name='basic'):
"""
Find the layout with a given name.
Search order:
1. env(HYDE_DATA)
2. <hyde script path>/layouts/
"""
layout_folder = None
if HYDE_DATA in os.environ:
layout_folder = Layout._get_layout_folder(
os.environ[HYDE_DATA], layout_name)
if not layout_folder:
layout_folder = Layout._get_layout_folder(
File(__file__).parent, layout_name)
return layout_folder
@staticmethod
def _get_layout_folder(root, layout_name='basic'):
"""
Finds the layout folder from the given root folder.
If it does not exist, return None
"""
layouts_folder = Folder(unicode(root)).child_folder(LAYOUTS)
layout_folder = layouts_folder.child_folder(layout_name)
return layout_folder if layout_folder.exists else None
|
{
"content_hash": "463d7bff83b86db7aff00660edff40f7",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 68,
"avg_line_length": 27.325581395348838,
"alnum_prop": 0.6042553191489362,
"repo_name": "jd/hyde",
"id": "31defe1e635eef625dc4b657f814a50a20e6ee5d",
"size": "1199",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "hyde/layout.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16649"
},
{
"name": "HTML",
"bytes": "147697"
},
{
"name": "JavaScript",
"bytes": "370"
},
{
"name": "Python",
"bytes": "348223"
}
],
"symlink_target": ""
}
|
angle = radians(10.)
patches = radians(360.)/angle
theta = np.arange(0,radians(360.),angle)
count = [0]*patches
for i, item in enumerate(some_array_of_azimuth_directions):
temp = int((item - item%angle)/angle)
count[temp] += 1
width = angle * np.ones(patches)
# force square figure and square axes looks better for polar, IMO
fig = plt.figure(figsize=(8,8))
ax = fig.add_axes([0.1, 0.1, 0.8, 0.8], polar=True)
rmax = max(count) + 1
ax.set_rlim(0,rmax)
ax.set_theta_offset(np.pi/2)
ax.set_thetagrids(np.arange(0,360,10))
ax.set_theta_direction(-1)
# project strike distribution as histogram bars
bars = ax.bar(theta, count, width=width)
r_values = []
colors = []
for r,bar in zip(count, bars):
r_values.append(r/float(max(count)))
colors.append(cm.jet(r_values[-1], alpha=0.5))
bar.set_facecolor(colors[-1])
bar.set_edgecolor('grey')
bar.set_alpha(0.5)
# Add colorbar, make sure to specify tick locations to match desired ticklabels
colorlist = []
r_values.sort()
values = []
for val in r_values:
if val not in values:
values.append(val*float(max(count)))
color = cm.jet(val, alpha=0.5)
if color not in colorlist:
colorlist.append(color)
cpt = mpl.colors.ListedColormap(colorlist)
bounds = range(max(count)+1)
norm = mpl.colors.BoundaryNorm(values, cpt.N-1)
cax = fig.add_axes([0.97, 0.3, 0.03, 0.4])
cb = mpl.colorbar.ColorbarBase(cax, cmap=cpt,
norm=norm,
boundaries=bounds,
# Make the length of each extension
# the same as the length of the
# interior colors:
extendfrac='auto',
ticks=[bounds[i] for i in range(0, len(bounds), 2)],
#ticks=bounds,
spacing='uniform')
|
{
"content_hash": "5560aa47b88a0b8972ca366b326d69e3",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 89,
"avg_line_length": 33.89655172413793,
"alnum_prop": 0.5696846388606307,
"repo_name": "Leviyu/Maligaro",
"id": "4934797129039e5d756d0b2a7bfdb094d395f752",
"size": "1966",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cpp_lib/backup/10_plotly/rose.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "661451"
},
{
"name": "C++",
"bytes": "1191412"
},
{
"name": "Go",
"bytes": "324"
},
{
"name": "HTML",
"bytes": "370"
},
{
"name": "JavaScript",
"bytes": "4634"
},
{
"name": "Makefile",
"bytes": "11314"
},
{
"name": "Perl",
"bytes": "42104"
},
{
"name": "PostScript",
"bytes": "477128"
},
{
"name": "Python",
"bytes": "274660"
},
{
"name": "Shell",
"bytes": "1465"
},
{
"name": "Terra",
"bytes": "2003"
}
],
"symlink_target": ""
}
|
"""music.py
Joint AoD AoA estimation using 2D music Algorithm
using raw.dat file as Input
Naoufal Mahfoudi (c) 2016 mohamed-naoufal.mahfoudi@inria.fr
"""
from numpy import linalg as ln
from numpy import *
from detect_peaks import *
from phase_correction import *
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import cm
def music(csi_corr, csi_target, Ntx, Nrx, d_tx, d_rx, t):
"""Joint AoD AoA estimation using 2D music Algorithm
using raw.dat file as Input
Args:
csi_corr: The reference data file corresponding to the calibration phase.
csi_target: The data file corresponding to the monitored yaw motion.
Ntx: Number of transmitting antennas.
Nrx: Number of receiving antennas.
d_tx: distance between the Tx antennas.
d_rx: distance between the Rx antennas.
t: The packet index number
Returns:
An array 'angles' with the estimated DoA and DoD in this order.
2D surface Plot of the 2D music pseudos-prectrum
"""
In = 0
s = phase_correction(csi_corr, csi_target)
s_lin = (s[:, :, 0, t:t + 2].reshape(6, 2, order='F'))
'''Compute the covariance matrix and the eigendecompositon'''
R_hat = np.cov(s_lin)
D, Q = ln.eig(R_hat)
'''Sort the eigenvalues in D'''
Do = np.abs(D)
D = np.sort(Do)[::-1]
I = np.argsort(Do)[::-1]
Q = Q[:, I]
''' Compute the Number of signal that are significative'''
T = np.cumsum(np.real(D))
for i in range(1, 1, np.size(T)):
if T(i) >= 0.99 * T(np.size(T)):
In = i
break
''' Get the signal eigenvectors'''
In = 0 # take the first signal
Qs = Q[:, :In]
''' Get the noise eigenvectors'''
Qn = Q[:, In + 1:]
''' Angles at which MUSIC Pseudospectrum will be computed '''
angles1 = np.arange(-90, 90, 1)
angles2 = np.arange(-90, 90, 1)
'''Compute steering vectors corresponding values in angles'''
a1 = np.exp(-1.j * 2 * np.pi * d_rx * np.tensordot(arange(Nrx), sin(angles1 * np.pi / 180), 0))
a2 = np.exp(-1.j * 2 * np.pi * d_tx * np.tensordot(arange(Ntx), sin(angles1 * np.pi / 180), 0))
'''Compute MUSIC "spectrum" '''
music_spectrum = np.zeros((np.size(angles1), np.size(angles2)), dtype=complex)
for k in range(1, np.size(angles2)):
for j in range(1, np.size(angles1)):
K = np.kron(a1[:, j], a2[:, k])
s = dot(K.T, Qn)
music_spectrum[j, k] = 1 / dot(abs(s), abs(s).T)
''' compute the mesh and plot the surf of the pseudospectrum '''
fig = plt.figure()
ax = fig.gca(projection='3d')
x = angles2
y = angles1
X, Y = np.meshgrid(x, y)
Z = np.abs(np.squeeze(music_spectrum))
ax = fig.add_subplot(111, projection='3d')
ax.set_ylabel('AoA')
ax.set_xlabel('AoD')
ax.set_xlim3d(-90, 90)
ax.set_ylim3d(-90, 90)
ax.plot_surface(X, Y, Z, rstride=2, cstride=2, cmap=cm.jet, alpha=0.7, linewidth=0.25)
''' detect the peaks corresponding to DoD and DoA '''
detect = detect_peaks(Z)
index_max = np.column_stack(np.where(detect))
x_ind = index_max[:, 0]
y_ind = index_max[:, 1]
tab = (np.transpose(np.array((Z[x_ind, y_ind], x[x_ind], y[y_ind])))).tolist()
tab.sort(key=lambda e: e[0], reverse=True)
myarray = np.asarray(tab[0])
angles = myarray[1:]
plt.show()
return angles
|
{
"content_hash": "c447efb66471d66327bb4c8ec3382932",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 99,
"avg_line_length": 31.953703703703702,
"alnum_prop": 0.6041727035641843,
"repo_name": "naoufal51/Orion",
"id": "42144d8be2aaccb40a6317374a5062e7c7ebd440",
"size": "3451",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/music.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "379791"
},
{
"name": "Python",
"bytes": "43629"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from .responses import KmsResponse
url_bases = ["https?://kms.(.+).amazonaws.com"]
url_paths = {"{0}/$": KmsResponse.dispatch}
|
{
"content_hash": "cfc1d6653ca59b15e00901a8a3c42c77",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 47,
"avg_line_length": 28.166666666666668,
"alnum_prop": 0.6923076923076923,
"repo_name": "william-richard/moto",
"id": "97e1a37204f13d6467b4e5d3f89824ddf70a3c33",
"size": "169",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "moto/kms/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "443"
},
{
"name": "HTML",
"bytes": "5848"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "756"
},
{
"name": "Makefile",
"bytes": "1213"
},
{
"name": "Python",
"bytes": "6637538"
},
{
"name": "Ruby",
"bytes": "188"
},
{
"name": "Scala",
"bytes": "782"
},
{
"name": "Shell",
"bytes": "797"
}
],
"symlink_target": ""
}
|
import json
import random
from django.conf import settings
from django.contrib.messages import constants
from django.contrib.messages.storage.base import Message
from django.contrib.messages.storage.cookie import (
CookieStorage, MessageDecoder, MessageEncoder,
)
from django.test import SimpleTestCase, override_settings
from django.utils.crypto import get_random_string
from django.utils.safestring import SafeData, mark_safe
from .base import BaseTests
def set_cookie_data(storage, messages, invalid=False, encode_empty=False):
"""
Set ``request.COOKIES`` with the encoded data and remove the storage
backend's loaded data cache.
"""
encoded_data = storage._encode(messages, encode_empty=encode_empty)
if invalid:
# Truncate the first character so that the hash is invalid.
encoded_data = encoded_data[1:]
storage.request.COOKIES = {CookieStorage.cookie_name: encoded_data}
if hasattr(storage, '_loaded_data'):
del storage._loaded_data
def stored_cookie_messages_count(storage, response):
"""
Return an integer containing the number of messages stored.
"""
# Get a list of cookies, excluding ones with a max-age of 0 (because
# they have been marked for deletion).
cookie = response.cookies.get(storage.cookie_name)
if not cookie or cookie['max-age'] == 0:
return 0
data = storage._decode(cookie.value)
if not data:
return 0
if data[-1] == CookieStorage.not_finished:
data.pop()
return len(data)
@override_settings(SESSION_COOKIE_DOMAIN='.example.com', SESSION_COOKIE_SECURE=True, SESSION_COOKIE_HTTPONLY=True)
class CookieTests(BaseTests, SimpleTestCase):
storage_class = CookieStorage
def stored_messages_count(self, storage, response):
return stored_cookie_messages_count(storage, response)
def test_get(self):
storage = self.storage_class(self.get_request())
# Set initial data.
example_messages = ['test', 'me']
set_cookie_data(storage, example_messages)
# The message contains what's expected.
self.assertEqual(list(storage), example_messages)
@override_settings(SESSION_COOKIE_SAMESITE='Strict')
def test_cookie_setings(self):
"""
CookieStorage honors SESSION_COOKIE_DOMAIN, SESSION_COOKIE_SECURE, and
SESSION_COOKIE_HTTPONLY (#15618, #20972).
"""
# Test before the messages have been consumed
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'test')
storage.update(response)
messages = storage._decode(response.cookies['messages'].value)
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0].message, 'test')
self.assertEqual(response.cookies['messages']['domain'], '.example.com')
self.assertEqual(response.cookies['messages']['expires'], '')
self.assertIs(response.cookies['messages']['secure'], True)
self.assertIs(response.cookies['messages']['httponly'], True)
self.assertEqual(response.cookies['messages']['samesite'], 'Strict')
# Test deletion of the cookie (storing with an empty value) after the messages have been consumed
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'test')
for m in storage:
pass # Iterate through the storage to simulate consumption of messages.
storage.update(response)
self.assertEqual(response.cookies['messages'].value, '')
self.assertEqual(response.cookies['messages']['domain'], '.example.com')
self.assertEqual(response.cookies['messages']['expires'], 'Thu, 01 Jan 1970 00:00:00 GMT')
self.assertEqual(
response.cookies['messages']['samesite'],
settings.SESSION_COOKIE_SAMESITE,
)
def test_get_bad_cookie(self):
request = self.get_request()
storage = self.storage_class(request)
# Set initial (invalid) data.
example_messages = ['test', 'me']
set_cookie_data(storage, example_messages, invalid=True)
# The message actually contains what we expect.
self.assertEqual(list(storage), [])
def test_max_cookie_length(self):
"""
If the data exceeds what is allowed in a cookie, older messages are
removed before saving (and returned by the ``update`` method).
"""
storage = self.get_storage()
response = self.get_response()
# When storing as a cookie, the cookie has constant overhead of approx
# 54 chars, and each message has a constant overhead of about 37 chars
# and a variable overhead of zero in the best case. We aim for a message
# size which will fit 4 messages into the cookie, but not 5.
# See also FallbackTest.test_session_fallback
msg_size = int((CookieStorage.max_cookie_size - 54) / 4.5 - 37)
first_msg = None
# Generate the same (tested) content every time that does not get run
# through zlib compression.
random.seed(42)
for i in range(5):
msg = get_random_string(msg_size)
storage.add(constants.INFO, msg)
if i == 0:
first_msg = msg
unstored_messages = storage.update(response)
cookie_storing = self.stored_messages_count(storage, response)
self.assertEqual(cookie_storing, 4)
self.assertEqual(len(unstored_messages), 1)
self.assertEqual(unstored_messages[0].message, first_msg)
def test_message_rfc6265(self):
non_compliant_chars = ['\\', ',', ';', '"']
messages = ['\\te,st', ';m"e', '\u2019', '123"NOTRECEIVED"']
storage = self.get_storage()
encoded = storage._encode(messages)
for illegal in non_compliant_chars:
self.assertEqual(encoded.find(illegal), -1)
def test_json_encoder_decoder(self):
"""
A complex nested data structure containing Message
instances is properly encoded/decoded by the custom JSON
encoder/decoder classes.
"""
messages = [
{
'message': Message(constants.INFO, 'Test message'),
'message_list': [
Message(constants.INFO, 'message %s') for x in range(5)
] + [{'another-message': Message(constants.ERROR, 'error')}],
},
Message(constants.INFO, 'message %s'),
]
encoder = MessageEncoder()
value = encoder.encode(messages)
decoded_messages = json.loads(value, cls=MessageDecoder)
self.assertEqual(messages, decoded_messages)
def test_safedata(self):
"""
A message containing SafeData is keeping its safe status when
retrieved from the message storage.
"""
def encode_decode(data):
message = Message(constants.DEBUG, data)
encoded = storage._encode(message)
decoded = storage._decode(encoded)
return decoded.message
storage = self.get_storage()
self.assertIsInstance(encode_decode(mark_safe("<b>Hello Django!</b>")), SafeData)
self.assertNotIsInstance(encode_decode("<b>Hello Django!</b>"), SafeData)
|
{
"content_hash": "afd5bbfed934e3081ebab14647dbcccd",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 114,
"avg_line_length": 40.910614525139664,
"alnum_prop": 0.6431790249897583,
"repo_name": "ar4s/django",
"id": "53cf693f61c86807cb9d706f40d39bcfbc78e290",
"size": "7323",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/messages_tests/test_cookie.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import datetime
import csv
import logging
import re
from django.contrib.postgres.search import SearchQuery, SearchRank
from django.shortcuts import render, redirect
from django.core.paginator import PageNotAnInteger, EmptyPage, InvalidPage
from django.http import Http404, HttpResponse
from rest_framework.renderers import JSONRenderer
from django.views.decorators.csrf import csrf_exempt
from visitors.models import Visitor, Statistic, Statistic_detail, Developer, VisitorScrapeProgress
from visitors.utils import Paginator, get_sort_field, get_user_profile
logger = logging.getLogger(__name__)
class JSONResponse(HttpResponse):
"""
An HttpResponse that renders its content into JSON.
"""
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs['content_type'] = 'application/json'
super(JSONResponse, self).__init__(content, **kwargs)
def index(request):
user_profile = get_user_profile(request)
stats = Statistic.objects.last()
if stats:
count = stats.visitor_count
else:
count = 0
if stats and stats.updated_institutions:
institutions = stats.updated_institutions
for institution in institutions:
institution['last_updated'] = datetime.datetime.strptime(institution['last_updated'], '%Y-%m-%d')
else:
institutions = []
return render(
request,
"index.html",
{
'count': count,
'user_profile': user_profile,
'institutions': institutions,
},
)
def about(request):
developers = Developer.objects.all().order_by('rank')
context = get_user_profile(request)
context['developers'] = developers
return render(
request,
"about.html",
context
)
def statistics(request):
user_profile = get_user_profile(request)
visitors = Statistic_detail.objects.all()
visitor_counts = dict()
for entry in VisitorScrapeProgress.objects.all().order_by('cutoff_date'):
date_str = str(entry.cutoff_date.strftime('%Y'))
visitor_counts[date_str] = entry.visitor_count
print(visitor_counts)
return render(
request,
"statistics.html",
{
'user_profile': user_profile,
'visitors': visitors,
'visitor_counts': list(visitor_counts.values()),
'visitor_counts_start': list(visitor_counts.keys())[0],
},
)
def statistics_api(request):
try:
stats = Statistic.objects.all()[0]
data = stats.data
except IndexError:
logger.warning("Need to compute statistics")
data = '{"error": "no data"}'
return HttpResponse(data)
@csrf_exempt
def search(request):
user_profile = get_user_profile(request)
query = request.GET.get('q') or ''
query = query.strip()
if query_is_dni(query):
# do dni search
all_items = do_dni_search(query)
else:
all_items = Visitor.objects.filter(
full_search=SearchQuery(query)
)
# sort queryset
all_items = do_sorting(request, all_items)
# paginate queryset
paginator, page = do_pagination(request, all_items)
json_path = request.get_full_path() + '&json'
tsv_path = request.get_full_path() + '&tsv'
return render(
request,
"search/search.html",
{
"paginator": paginator,
"page": page,
"query": query,
"json_path": json_path,
"tsv_path": tsv_path,
'user_profile': user_profile,
},
)
def query_is_dni(query):
if re.search(r'^(\d{5,})', query):
return True
else:
return False
def do_dni_search(query):
return Visitor.objects.filter(
id_number=query,
).order_by('-date')
def search_date(request):
user_profile = get_user_profile(request)
if 'q' in request.GET:
query = request.GET['q']
if query.strip() == '':
return redirect('/')
try:
query_date_obj = datetime.datetime.strptime(query, '%d/%m/%Y')
except ValueError:
results = "No se encontraron resultados."
return render(
request,
"search/search.html",
{
'items': results,
'keyword': query,
'user_profile': user_profile,
},
)
six_months_ago = datetime.datetime.today() - datetime.timedelta(days=180)
if query_date_obj < six_months_ago:
can_show_results = True
else:
try:
if request.user.subscriber.credits > 0:
can_show_results = True
else:
can_show_results = False
except AttributeError:
# user has no subscriber
can_show_results = False
date_str = datetime.datetime.strftime(query_date_obj, '%Y-%m-%d')
# TODO: implement django queryset search here
results = []
paginator, page = do_pagination(request, results)
context = {
"paginator": paginator,
"query": query,
'user_profile': user_profile,
}
if can_show_results:
try:
if len(results) > 0 and request.user.subscriber:
if request.user.subscriber.credits is not None:
request.user.subscriber.credits -= 1
request.user.subscriber.save()
except AttributeError:
pass
context["page"] = page
else:
context["extra_premium_results"] = len(results)
return render(request, "search/search.html", context)
else:
return redirect('/')
def data_as_csv(request, paginator):
if 'page' in request.GET:
page = request.GET['page']
else:
page = ''
try:
articles = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page
articles = paginator.page(1)
except EmptyPage:
# If page is out of range, deliver last page
articles = paginator.page(paginator.num_pages)
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="manolo_data.tsv"'
writer = csv.writer(response, dialect='excel-tab')
for i in articles.object_list:
writer.writerow([i.id, i.institution, i.date, i.full_name,
i.id_document, i.id_number, i.entity, i.reason,
i.host_name, i.office, i.meeting_place,
i.time_start, i.time_end])
return response
def api(request):
return render(request, "api.html")
def do_pagination(request, all_items):
"""
:param request: contains the current page requested by user
:param all_items:
:return: dict containing paginated items and pagination bar
"""
results_per_page = 20
results = all_items
try:
page_no = int(request.GET.get('page', 1))
except (TypeError, ValueError):
raise Http404("Not a valid number for page.")
if page_no < 1:
raise Http404("Pages should be 1 or greater.")
paginator = Paginator(results, results_per_page)
try:
page = paginator.page(page_no)
except InvalidPage:
raise Http404("No such page!")
return paginator, page
def do_sorting(request, queryset):
ordering = get_sort_field(request)
if not ordering:
return queryset.order_by('-date')
return queryset.order_by(ordering)
|
{
"content_hash": "00a74f54d75445349de8b8e4ac3116ef",
"timestamp": "",
"source": "github",
"line_count": 271,
"max_line_length": 109,
"avg_line_length": 28.45018450184502,
"alnum_prop": 0.5876783398184177,
"repo_name": "aniversarioperu/django-manolo",
"id": "6252e6687554e78ce68bf31d653f77873c304a7f",
"size": "7710",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "visitors/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "901"
},
{
"name": "CSS",
"bytes": "842"
},
{
"name": "HTML",
"bytes": "35219"
},
{
"name": "JavaScript",
"bytes": "1767"
},
{
"name": "Makefile",
"bytes": "2626"
},
{
"name": "Python",
"bytes": "48190"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import json
from django.template import Library
from django.template.base import Node, NodeList, TextNode, VariableNode
from django.core.exceptions import ImproperlyConfigured
from django.utils.safestring import mark_safe
from djng.core.urlresolvers import get_all_remote_methods, get_current_remote_methods
register = Library()
class CsrfValueNode(Node):
def render(self, context):
csrf_token = context.get('csrf_token', None)
if not csrf_token:
raise ImproperlyConfigured('Template must be rendered using a RequestContext')
if csrf_token == 'NOTPROVIDED':
return mark_safe('')
else:
return mark_safe(csrf_token)
@register.tag(name='csrf_value')
def render_csrf_value(parser, token):
return CsrfValueNode()
@register.simple_tag(name='djng_all_rmi')
def djng_all_rmi():
"""
Returns a dictionary of all methods for all Views available for this project, marked with the
``@allow_remote_invocation`` decorator. The return string can be used directly to initialize
the AngularJS provider, such as ``djangoRMIProvider.configure({% djng_rmi_configs %});``
"""
return mark_safe(json.dumps(get_all_remote_methods()))
@register.simple_tag(name='djng_current_rmi', takes_context=True)
def djng_current_rmi(context):
"""
Returns a dictionary of all methods for the current View of this request, marked with the
@allow_remote_invocation decorator. The return string can be used directly to initialize
the AngularJS provider, such as ``djangoRMIProvider.configure({% djng_current_rmi %});``
"""
return mark_safe(json.dumps(get_current_remote_methods(context.get('view'))))
@register.simple_tag(name='load_djng_urls', takes_context=True)
def djng_urls(context, *namespaces):
raise DeprecationWarning(
"load_djng_urls templatetag is deprecated and has been removed from this version of django-angular."
"Please refer to documentation for updated way to manage django urls in angular.")
class AngularJsNode(Node):
def __init__(self, django_nodelist, angular_nodelist, variable):
self.django_nodelist = django_nodelist
self.angular_nodelist = angular_nodelist
self.variable = variable
def render(self, context):
if self.variable.resolve(context):
return self.angular_nodelist.render(context)
return self.django_nodelist.render(context)
@register.tag
def angularjs(parser, token):
"""
Conditionally switch between AngularJS and Django variable expansion for ``{{`` and ``}}``
keeping Django's expansion for ``{%`` and ``%}``
Usage::
{% angularjs 1 %} or simply {% angularjs %}
{% process variables through the AngularJS template engine %}
{% endangularjs %}
{% angularjs 0 %}
{% process variables through the Django template engine %}
{% endangularjs %}
Instead of 0 and 1, it is possible to use a context variable.
"""
bits = token.contents.split()
if len(bits) < 2:
bits.append('1')
values = [parser.compile_filter(bit) for bit in bits[1:]]
django_nodelist = parser.parse(('endangularjs',))
angular_nodelist = NodeList()
for node in django_nodelist:
# convert all occurrences of VariableNode into a TextNode using the
# AngularJS double curly bracket notation
if isinstance(node, VariableNode):
# convert Django's array notation into JS array notation
tokens = node.filter_expression.token.split('.')
token = tokens[0]
for part in tokens[1:]:
if part.isdigit():
token += '[%s]' % part
else:
token += '.%s' % part
node = TextNode('{{ %s }}' % token)
angular_nodelist.append(node)
parser.delete_first_token()
return AngularJsNode(django_nodelist, angular_nodelist, values[0])
|
{
"content_hash": "108860b0d99d0df259d4b278ba32ca13",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 108,
"avg_line_length": 37.867924528301884,
"alnum_prop": 0.6644245142002989,
"repo_name": "adrienbrunet/django-angular",
"id": "52da9b480cf0c8a563701ed69979b57444402820",
"size": "4042",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "djng/templatetags/djng_tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "830"
},
{
"name": "JavaScript",
"bytes": "64907"
},
{
"name": "Python",
"bytes": "70189"
}
],
"symlink_target": ""
}
|
from msrest.serialization import Model
class OperationDisplay(Model):
"""The object that represents the operation.
:param provider: Service provider: Microsoft.ResourceProvider
:type provider: str
:param resource: Resource on which the operation is performed: Profile,
endpoint, etc.
:type resource: str
:param operation: Operation type: Read, write, delete, etc.
:type operation: str
:param description: Description of operation
:type description: str
"""
_attribute_map = {
'provider': {'key': 'Provider', 'type': 'str'},
'resource': {'key': 'Resource', 'type': 'str'},
'operation': {'key': 'Operation', 'type': 'str'},
'description': {'key': 'Description', 'type': 'str'},
}
def __init__(self, provider=None, resource=None, operation=None, description=None):
super(OperationDisplay, self).__init__()
self.provider = provider
self.resource = resource
self.operation = operation
self.description = description
|
{
"content_hash": "edd782f8c70b3dc17dcb6c89a3b8aef4",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 87,
"avg_line_length": 34.833333333333336,
"alnum_prop": 0.6392344497607656,
"repo_name": "lmazuel/azure-sdk-for-python",
"id": "39eab11883f391844c015afac1611f1a99c901aa",
"size": "1519",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/operation_display.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42572767"
}
],
"symlink_target": ""
}
|
from Tkinter import *
import matplotlib.pyplot as plt
from matplotlib.figure import Figure
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
class SeesawApp:
def __init__(self, master):
self.frame = Frame(master)
self.frame.pack()
self.number_of_groups = 5
self.bar_width = 0.2
self.groups = []
Label(self.frame, text="Treatment A").grid(row=0, column=0, columnspan=1)
self.treatment_a_entry = Entry(self.frame)
self.treatment_a_entry.grid(row=0, column=1, columnspan=3)
Label(self.frame, text="Treatment B").grid(row=1, column=0, columnspan=1)
self.treatment_b_entry = Entry(self.frame)
self.treatment_b_entry.grid(row=1, column=1, columnspan=3)
Label(self.frame, text="Goal").grid(row=2, column=1)
Label(self.frame, text="Importance").grid(row=2, column=2)
Label(self.frame, text="Probability A").grid(row=2, column=3)
for group_number in range(0, self.number_of_groups):
self.groups.append(GroupEntry(self.frame, group_number))
b = Button(self.frame, text="Draw", command=self.button_callback)
b.grid(row=self.number_of_groups+3, columnspan=4)
self.figure = plt.figure()
# remove axis
self.ax = plt.gca()
self.ax.get_yaxis().set_visible(False)
# plot lines
plt.vlines(0,0,1,lw=5)
plt.hlines(0,-1,1,lw=5)
# set axis size
plt.axis([-1,1,0,1])
# add bars
self.barlist = plt.bar([0]*self.number_of_groups,
[0]*self.number_of_groups,
width = self.bar_width,
align='center')
bar_col = ['b','g','r','c','m','y']
i=0
for cs in self.barlist:
cs.set_color(bar_col[i])
i=i+1
# add bar names
self.label_locs, self.labels = plt.xticks([0] * self.number_of_groups, [""] * self.number_of_groups)
# display treatment titles
self.title_a = plt.figtext(0.25,0.95,"Treatment A",horizontalalignment='center')
self.title_b = plt.figtext(0.75,0.95,"Treatment B",horizontalalignment='center')
self.canvas = FigureCanvasTkAgg(self.figure, master=self.frame)
self.canvas.show()
self.canvas.get_tk_widget().grid(row=self.number_of_groups+4, columnspan=4)
def button_callback(self):
treatment_a_name = self.treatment_a_entry.get()
treatment_b_name = self.treatment_b_entry.get()
goal_list = []
for group in self.groups:
name = group.e1.get()
importance = group.importance_slider.get()
probability_a = group.prob_a_slider.get()
goal_list.append(Goal(name, importance, probability_a))
decision = Decision(treatment_a_name, treatment_b_name, goal_list)
self.draw_decision(decision)
def draw_decision(self, decision):
''' There will be an issue if a smaller importance is plotted before
a large importance with the same probability'''
x = [goal.prob_a for goal in decision.goals]
y = [goal.importance for goal in decision.goals]
new_labels = [goal.name for goal in decision.goals]
self.title_a.set_text(decision.treatment_a)
self.title_b.set_text(decision.treatment_b)
self.ax.set_xticks(x)
self.ax.set_xticklabels(new_labels)
# change bar chart
for i in range(len(self.barlist)):
self.barlist[i].set_height(y[i])
self.barlist[i].set_x(x[i] - (self.bar_width / 2))
self.canvas.draw()
class Decision:
def __init__(self, treatment_a, treatment_b, goals):
self.treatment_a = treatment_a
self.treatment_b = treatment_b
self.goals = goals
class Goal:
def __init__(self, name, importance, probability_a):
self.name = name
self.importance = importance
self.prob_a = probability_a
class GroupEntry:
def __init__(self, master, group_number):
row_number = group_number + 3
label = Label(master, text="Goal " + str(group_number+1)).grid(row=row_number, column=0)
self.e1 = Entry(master)
self.e1.grid(row=row_number, column=1)
self.importance_slider = Scale(master, from_=0, to=1, orient=HORIZONTAL, show=0, resolution=0.1)
self.importance_slider.grid(row=row_number, column=2)
self.prob_a_slider = Scale(master, from_=-1, to=1, orient=HORIZONTAL, show=0, resolution=0.1)
self.prob_a_slider.grid(row=row_number, column=3)
root = Tk()
root.wm_title("Balance")
seesaw_app = SeesawApp(root)
root.mainloop()
|
{
"content_hash": "491fc2016d005c316ef2fc27ba7aa826",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 108,
"avg_line_length": 35.661654135338345,
"alnum_prop": 0.6019397006114273,
"repo_name": "nhs-seesaw/seesaw",
"id": "aa21fc0365d03bfd343e8517ee429f6356040730",
"size": "4762",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "seesaw.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5580"
}
],
"symlink_target": ""
}
|
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
|
{
"content_hash": "7dd1db4aa037986442e31e90e48d50c0",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 43,
"avg_line_length": 16,
"alnum_prop": 0.78125,
"repo_name": "ednapiranha/detour",
"id": "2fe63be0ec210a5d2b6ac093bcfdccc60cee9acb",
"size": "64",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "detour/database.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "123757"
},
{
"name": "Python",
"bytes": "27786"
}
],
"symlink_target": ""
}
|
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
from django.core.urlresolvers import reverse
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.utils.translation import activate
class TestGoogleLogin(StaticLiveServerTestCase):
fixtures = ['allauth_fixture']
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.browser.wait = WebDriverWait(self.browser, 10)
activate('en')
def tearDown(self):
self.browser.quit()
def get_element_by_id(self, element_id):
return self.browser.wait.until(EC.presence_of_element_located(
(By.ID, element_id)))
def get_button_by_id(self, element_id):
return self.browser.wait.until(EC.element_to_be_clickable(
(By.ID, element_id)))
def get_full_url(self, namespace):
return self.live_server_url + reverse(namespace)
def user_login(self):
import json
with open('demoapp/fixtures/google_user.json') as f:
credentials = json.loads(f.read())
self.get_element_by_id('Email').send_keys(credentials['Email'])
self.get_element_by_id('next').click()
self.get_element_by_id('Passwd').send_keys(credentials['Passwd'])
for btn in ["signIn", "submit_approve_access"]:
self.get_button_by_id(btn).click()
return
def test_google_login(self):
self.browser.get(self.get_full_url("home"))
google_login = self.get_element_by_id("google_login")
with self.assertRaises(TimeoutException):
self.get_element_by_id("logout")
self.assertEqual(google_login.get_attribute("href"),
self.live_server_url + "/accounts/google/login")
google_login.click()
self.user_login()
with self.assertRaises(TimeoutException):
self.get_element_by_id("google_login")
google_logout = self.get_element_by_id("logout")
google_logout.click()
google_login = self.get_element_by_id("google_login")
|
{
"content_hash": "00c93ecfbbd5c52c5cd19c23822d0e43",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 73,
"avg_line_length": 38.74576271186441,
"alnum_prop": 0.6657917760279966,
"repo_name": "andri-ch/incling",
"id": "2a9f9338f8b4dcccc80d27199dd08cd3a4471bc9",
"size": "2310",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "functional_tests/test_allauth.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "259"
},
{
"name": "HTML",
"bytes": "7394"
},
{
"name": "JavaScript",
"bytes": "1"
},
{
"name": "Python",
"bytes": "22519"
}
],
"symlink_target": ""
}
|
class ChecksumMismatchException(Exception):
""" A checksum check has failed """
pass
class ConfigurationException(Exception):
""" Configuration exception """
pass
class HookExecutionException(Exception):
""" Failed to execute a hook """
pass
class InvalidTemplateException(Exception):
""" Invalid CloudFormation template """
pass
class UnsupportedCompression(Exception):
""" An unsupported compression format for the bundle found """
pass
|
{
"content_hash": "d0e684a34ac1f85cb08c3c824e35f595",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 66,
"avg_line_length": 21.17391304347826,
"alnum_prop": 0.7104722792607803,
"repo_name": "skymill/cumulus",
"id": "f356b81100ff9a4e6a5dda3a9cd77ff2402c1072",
"size": "487",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cumulus/cumulus_ds/exceptions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "87696"
}
],
"symlink_target": ""
}
|
import argparse
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
ALL_SPIDERS = {"small": ["infinitefpv", "voodooquads", "shendrones", "demonframes", "lizardrc", "miniquadfpv", "hoverthings", "impulserc", "fpvreconn", "armattan", "dronematters", "flitetest", "boltrceu", "miniquadbros"],
"medium": ["multirotorparts", "buzzhobbies", "flyingrobot", "uavobjects", "rotorgeeks", "fpvmodel", "multirotormania", "readytoflyquads", "flyduino", "banggood", "getfpv", "stoneblueairlines"],
"large": ["readymaderc", "multirotorsuperstore", "liftrc", "boltrc", "myrcmart"],
"huge": ["hobbyking", "innov8tivedesigns"]}
parser = argparse.ArgumentParser()
parser.add_argument("spider_size")
args = parser.parse_args()
process = CrawlerProcess(get_project_settings())
spiders = []
if args.spider_size == "all":
for size in ALL_SPIDERS:
spiders.extend(ALL_SPIDERS[size])
else:
spiders = ALL_SPIDERS[args.spider_size]
for spider in spiders:
process.crawl(spider)
process.start()
|
{
"content_hash": "95bfed8a9a59fd89b07a3a16ef0bdb13",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 221,
"avg_line_length": 41.15384615384615,
"alnum_prop": 0.697196261682243,
"repo_name": "rcbuild-info/scrape",
"id": "c1d1230e25b50e07426d84f5dea6fc764b78e2f3",
"size": "1070",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rcbi/rcbi/run.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "128020"
},
{
"name": "Shell",
"bytes": "34"
}
],
"symlink_target": ""
}
|
from django.contrib.admin import SimpleListFilter
from django.utils.translation import ugettext_lazy as _
from django.contrib.sites.models import get_current_site
from django.db.models import Max, Count
from django.utils import timezone
from django.contrib.auth import get_user_model
from opps.channels.models import Channel
from opps.containers.models import Container
User = get_user_model()
class ChannelListFilter(SimpleListFilter):
# Human-readable title which will be displayed in the
# right admin sidebar just above the filter options.
title = _(u'Channel')
# Parameter for the filter that will be used in the URL query.
parameter_name = 'channel'
def lookups(self, request, model_admin):
"""
Returns a list of tuples. The first element in each
tuple is the coded value for the option that will
appear in the URL query. The second element is the
human-readable name for the option that will appear
in the right sidebar.
"""
qs = model_admin.queryset(request)
qs = qs.order_by(
'channel_long_slug'
).distinct().values('channel_long_slug')
if qs:
channels = set([(item['channel_long_slug'] or 'nochannel',
item['channel_long_slug'] or _(u'No channel'))
for item in qs])
long_slug_list = sorted([i[0] for i in channels])
items = []
for channel in channels:
items.append(channel)
_value = channel[0]
if self._get_descendant_count(_value, long_slug_list) > 0:
value = "{0}/*".format(_value)
human_readable = "{0}/*".format(_value)
items.append((value, human_readable))
return sorted(items)
def _get_descendant_count(self, item, channel_list):
"""
Search item occurrences on channel_list
"""
children = []
item_set = set(item.split('/'))
for channel in channel_list:
splt = set(channel.split('/'))
if item != channel and item_set.issubset(splt):
children.append(channel)
return len(children)
def queryset(self, request, queryset):
"""
Returns the filtered queryset based on the value
provided in the query string and retrievable via
`self.value()`.
"""
value = self.value()
if value == "nochannel":
queryset = queryset.filter(channel_long_slug__isnull=True)
elif value and "*" in value:
site = get_current_site(request)
long_slug = value.replace('/*', '')
channel = Channel.objects.filter(site=site, long_slug=long_slug)[0]
child_channels = channel.get_descendants(include_self=True)
queryset = queryset.filter(channel__in=child_channels)
elif value:
queryset = queryset.filter(channel_long_slug=value)
return queryset
class ChildClassListFilter(SimpleListFilter):
# Human-readable title which will be displayed in the
# right admin sidebar just above the filter options.
title = _(u'Child class')
# Parameter for the filter that will be used in the URL query.
parameter_name = 'child_class'
def lookups(self, request, model_admin):
site = get_current_site(request)
child_classes = [
(i['child_class'], _(i['child_class'])) for i in
Container.objects.values('child_class').filter(
published=True,
date_available__lte=timezone.now(),
site=site).annotate(child=Count('child_class'),
date=Max('date_available')
).order_by('-date')]
return child_classes
def queryset(self, request, queryset):
child_class = self.value()
if child_class:
queryset = queryset.filter(child_class=child_class)
return queryset
class HasQuerySet(SimpleListFilter):
# Human-readable title which will be displayed in the
# right admin sidebar just above the filter options.
title = _(u'Has queryset')
# Parameter for the filter that will be used in the URL query.
parameter_name = 'hasqueryset'
def lookups(self, request, model_admin):
"""
Returns a list of tuples. The first element in each
tuple is the coded value for the option that will
appear in the URL query. The second element is the
human-readable name for the option that will appear
in the right sidebar.
"""
return (
('no', _(u'No')),
('yes', _(u'Yes'))
)
def queryset(self, request, queryset):
"""
Returns the filtered queryset based on the value
provided in the query string and retrievable via
`self.value()`.
"""
if self.value() == "no":
queryset = queryset.filter(queryset__isnull=True)
elif self.value() == 'yes':
queryset = queryset.filter(queryset__isnull=False)
return queryset
class UserListFilter(SimpleListFilter):
# Human-readable title which will be displayed in the
# right admin sidebar just above the filter options.
title = _(u'User')
# Parameter for the filter that will be used in the URL query.
parameter_name = u'user'
def lookups(self, request, model_admin):
"""
Returns a list of tuples. The first element in each
tuple is the coded value for the option that will
appear in the URL query. The second element is the
human-readable name for the option that will appear
in the right sidebar.
"""
# filter only users with images
qs = User.objects.filter(image__isnull=False).distinct()
if qs:
return set([(item.username,
u"{0} ({1})".format(item.get_full_name(), item.email))
for item in qs])
def queryset(self, request, queryset):
"""
Returns the filtered queryset based on the value
provided in the query string and retrievable via
`self.value()`.
"""
if self.value() == u"nouser":
queryset = queryset.filter(user__isnull=True)
elif self.value():
queryset = queryset.filter(user__username=self.value())
return queryset
|
{
"content_hash": "49338fff80055feaefe85eb6d5e3f81b",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 79,
"avg_line_length": 35.6448087431694,
"alnum_prop": 0.5949716388164955,
"repo_name": "jeanmask/opps",
"id": "f2001f32108b94a881ffabec2baac7bfbcb013b9",
"size": "6539",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "opps/core/filters.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13004"
},
{
"name": "HTML",
"bytes": "56903"
},
{
"name": "JavaScript",
"bytes": "62514"
},
{
"name": "Makefile",
"bytes": "848"
},
{
"name": "Python",
"bytes": "1207954"
},
{
"name": "Shell",
"bytes": "661"
}
],
"symlink_target": ""
}
|
"""Orthogonal matching pursuit algorithms
"""
# Author: Vlad Niculae
#
# License: BSD 3 clause
import warnings
from math import sqrt
import numpy as np
from scipy import linalg
from scipy.linalg.lapack import get_lapack_funcs
from joblib import Parallel
from ._base import LinearModel, _pre_fit, _deprecate_normalize
from ..base import RegressorMixin, MultiOutputMixin
from ..utils import as_float_array, check_array
from ..utils.fixes import delayed
from ..model_selection import check_cv
premature = (
"Orthogonal matching pursuit ended prematurely due to linear"
" dependence in the dictionary. The requested precision might"
" not have been met."
)
def _cholesky_omp(X, y, n_nonzero_coefs, tol=None, copy_X=True, return_path=False):
"""Orthogonal Matching Pursuit step using the Cholesky decomposition.
Parameters
----------
X : ndarray of shape (n_samples, n_features)
Input dictionary. Columns are assumed to have unit norm.
y : ndarray of shape (n_samples,)
Input targets.
n_nonzero_coefs : int
Targeted number of non-zero elements.
tol : float, default=None
Targeted squared error, if not None overrides n_nonzero_coefs.
copy_X : bool, default=True
Whether the design matrix X must be copied by the algorithm. A false
value is only helpful if X is already Fortran-ordered, otherwise a
copy is made anyway.
return_path : bool, default=False
Whether to return every value of the nonzero coefficients along the
forward path. Useful for cross-validation.
Returns
-------
gamma : ndarray of shape (n_nonzero_coefs,)
Non-zero elements of the solution.
idx : ndarray of shape (n_nonzero_coefs,)
Indices of the positions of the elements in gamma within the solution
vector.
coef : ndarray of shape (n_features, n_nonzero_coefs)
The first k values of column k correspond to the coefficient value
for the active features at that step. The lower left triangle contains
garbage. Only returned if ``return_path=True``.
n_active : int
Number of active features at convergence.
"""
if copy_X:
X = X.copy("F")
else: # even if we are allowed to overwrite, still copy it if bad order
X = np.asfortranarray(X)
min_float = np.finfo(X.dtype).eps
nrm2, swap = linalg.get_blas_funcs(("nrm2", "swap"), (X,))
(potrs,) = get_lapack_funcs(("potrs",), (X,))
alpha = np.dot(X.T, y)
residual = y
gamma = np.empty(0)
n_active = 0
indices = np.arange(X.shape[1]) # keeping track of swapping
max_features = X.shape[1] if tol is not None else n_nonzero_coefs
L = np.empty((max_features, max_features), dtype=X.dtype)
if return_path:
coefs = np.empty_like(L)
while True:
lam = np.argmax(np.abs(np.dot(X.T, residual)))
if lam < n_active or alpha[lam] ** 2 < min_float:
# atom already selected or inner product too small
warnings.warn(premature, RuntimeWarning, stacklevel=2)
break
if n_active > 0:
# Updates the Cholesky decomposition of X' X
L[n_active, :n_active] = np.dot(X[:, :n_active].T, X[:, lam])
linalg.solve_triangular(
L[:n_active, :n_active],
L[n_active, :n_active],
trans=0,
lower=1,
overwrite_b=True,
check_finite=False,
)
v = nrm2(L[n_active, :n_active]) ** 2
Lkk = linalg.norm(X[:, lam]) ** 2 - v
if Lkk <= min_float: # selected atoms are dependent
warnings.warn(premature, RuntimeWarning, stacklevel=2)
break
L[n_active, n_active] = sqrt(Lkk)
else:
L[0, 0] = linalg.norm(X[:, lam])
X.T[n_active], X.T[lam] = swap(X.T[n_active], X.T[lam])
alpha[n_active], alpha[lam] = alpha[lam], alpha[n_active]
indices[n_active], indices[lam] = indices[lam], indices[n_active]
n_active += 1
# solves LL'x = X'y as a composition of two triangular systems
gamma, _ = potrs(
L[:n_active, :n_active], alpha[:n_active], lower=True, overwrite_b=False
)
if return_path:
coefs[:n_active, n_active - 1] = gamma
residual = y - np.dot(X[:, :n_active], gamma)
if tol is not None and nrm2(residual) ** 2 <= tol:
break
elif n_active == max_features:
break
if return_path:
return gamma, indices[:n_active], coefs[:, :n_active], n_active
else:
return gamma, indices[:n_active], n_active
def _gram_omp(
Gram,
Xy,
n_nonzero_coefs,
tol_0=None,
tol=None,
copy_Gram=True,
copy_Xy=True,
return_path=False,
):
"""Orthogonal Matching Pursuit step on a precomputed Gram matrix.
This function uses the Cholesky decomposition method.
Parameters
----------
Gram : ndarray of shape (n_features, n_features)
Gram matrix of the input data matrix.
Xy : ndarray of shape (n_features,)
Input targets.
n_nonzero_coefs : int
Targeted number of non-zero elements.
tol_0 : float, default=None
Squared norm of y, required if tol is not None.
tol : float, default=None
Targeted squared error, if not None overrides n_nonzero_coefs.
copy_Gram : bool, default=True
Whether the gram matrix must be copied by the algorithm. A false
value is only helpful if it is already Fortran-ordered, otherwise a
copy is made anyway.
copy_Xy : bool, default=True
Whether the covariance vector Xy must be copied by the algorithm.
If False, it may be overwritten.
return_path : bool, default=False
Whether to return every value of the nonzero coefficients along the
forward path. Useful for cross-validation.
Returns
-------
gamma : ndarray of shape (n_nonzero_coefs,)
Non-zero elements of the solution.
idx : ndarray of shape (n_nonzero_coefs,)
Indices of the positions of the elements in gamma within the solution
vector.
coefs : ndarray of shape (n_features, n_nonzero_coefs)
The first k values of column k correspond to the coefficient value
for the active features at that step. The lower left triangle contains
garbage. Only returned if ``return_path=True``.
n_active : int
Number of active features at convergence.
"""
Gram = Gram.copy("F") if copy_Gram else np.asfortranarray(Gram)
if copy_Xy or not Xy.flags.writeable:
Xy = Xy.copy()
min_float = np.finfo(Gram.dtype).eps
nrm2, swap = linalg.get_blas_funcs(("nrm2", "swap"), (Gram,))
(potrs,) = get_lapack_funcs(("potrs",), (Gram,))
indices = np.arange(len(Gram)) # keeping track of swapping
alpha = Xy
tol_curr = tol_0
delta = 0
gamma = np.empty(0)
n_active = 0
max_features = len(Gram) if tol is not None else n_nonzero_coefs
L = np.empty((max_features, max_features), dtype=Gram.dtype)
L[0, 0] = 1.0
if return_path:
coefs = np.empty_like(L)
while True:
lam = np.argmax(np.abs(alpha))
if lam < n_active or alpha[lam] ** 2 < min_float:
# selected same atom twice, or inner product too small
warnings.warn(premature, RuntimeWarning, stacklevel=3)
break
if n_active > 0:
L[n_active, :n_active] = Gram[lam, :n_active]
linalg.solve_triangular(
L[:n_active, :n_active],
L[n_active, :n_active],
trans=0,
lower=1,
overwrite_b=True,
check_finite=False,
)
v = nrm2(L[n_active, :n_active]) ** 2
Lkk = Gram[lam, lam] - v
if Lkk <= min_float: # selected atoms are dependent
warnings.warn(premature, RuntimeWarning, stacklevel=3)
break
L[n_active, n_active] = sqrt(Lkk)
else:
L[0, 0] = sqrt(Gram[lam, lam])
Gram[n_active], Gram[lam] = swap(Gram[n_active], Gram[lam])
Gram.T[n_active], Gram.T[lam] = swap(Gram.T[n_active], Gram.T[lam])
indices[n_active], indices[lam] = indices[lam], indices[n_active]
Xy[n_active], Xy[lam] = Xy[lam], Xy[n_active]
n_active += 1
# solves LL'x = X'y as a composition of two triangular systems
gamma, _ = potrs(
L[:n_active, :n_active], Xy[:n_active], lower=True, overwrite_b=False
)
if return_path:
coefs[:n_active, n_active - 1] = gamma
beta = np.dot(Gram[:, :n_active], gamma)
alpha = Xy - beta
if tol is not None:
tol_curr += delta
delta = np.inner(gamma, beta[:n_active])
tol_curr -= delta
if abs(tol_curr) <= tol:
break
elif n_active == max_features:
break
if return_path:
return gamma, indices[:n_active], coefs[:, :n_active], n_active
else:
return gamma, indices[:n_active], n_active
def orthogonal_mp(
X,
y,
*,
n_nonzero_coefs=None,
tol=None,
precompute=False,
copy_X=True,
return_path=False,
return_n_iter=False,
):
r"""Orthogonal Matching Pursuit (OMP).
Solves n_targets Orthogonal Matching Pursuit problems.
An instance of the problem has the form:
When parametrized by the number of non-zero coefficients using
`n_nonzero_coefs`:
argmin ||y - X\gamma||^2 subject to ||\gamma||_0 <= n_{nonzero coefs}
When parametrized by error using the parameter `tol`:
argmin ||\gamma||_0 subject to ||y - X\gamma||^2 <= tol
Read more in the :ref:`User Guide <omp>`.
Parameters
----------
X : ndarray of shape (n_samples, n_features)
Input data. Columns are assumed to have unit norm.
y : ndarray of shape (n_samples,) or (n_samples, n_targets)
Input targets.
n_nonzero_coefs : int, default=None
Desired number of non-zero entries in the solution. If None (by
default) this value is set to 10% of n_features.
tol : float, default=None
Maximum norm of the residual. If not None, overrides n_nonzero_coefs.
precompute : 'auto' or bool, default=False
Whether to perform precomputations. Improves performance when n_targets
or n_samples is very large.
copy_X : bool, default=True
Whether the design matrix X must be copied by the algorithm. A false
value is only helpful if X is already Fortran-ordered, otherwise a
copy is made anyway.
return_path : bool, default=False
Whether to return every value of the nonzero coefficients along the
forward path. Useful for cross-validation.
return_n_iter : bool, default=False
Whether or not to return the number of iterations.
Returns
-------
coef : ndarray of shape (n_features,) or (n_features, n_targets)
Coefficients of the OMP solution. If `return_path=True`, this contains
the whole coefficient path. In this case its shape is
(n_features, n_features) or (n_features, n_targets, n_features) and
iterating over the last axis yields coefficients in increasing order
of active features.
n_iters : array-like or int
Number of active features across every target. Returned only if
`return_n_iter` is set to True.
See Also
--------
OrthogonalMatchingPursuit
orthogonal_mp_gram
lars_path
sklearn.decomposition.sparse_encode
Notes
-----
Orthogonal matching pursuit was introduced in S. Mallat, Z. Zhang,
Matching pursuits with time-frequency dictionaries, IEEE Transactions on
Signal Processing, Vol. 41, No. 12. (December 1993), pp. 3397-3415.
(http://blanche.polytechnique.fr/~mallat/papiers/MallatPursuit93.pdf)
This implementation is based on Rubinstein, R., Zibulevsky, M. and Elad,
M., Efficient Implementation of the K-SVD Algorithm using Batch Orthogonal
Matching Pursuit Technical Report - CS Technion, April 2008.
https://www.cs.technion.ac.il/~ronrubin/Publications/KSVD-OMP-v2.pdf
"""
X = check_array(X, order="F", copy=copy_X)
copy_X = False
if y.ndim == 1:
y = y.reshape(-1, 1)
y = check_array(y)
if y.shape[1] > 1: # subsequent targets will be affected
copy_X = True
if n_nonzero_coefs is None and tol is None:
# default for n_nonzero_coefs is 0.1 * n_features
# but at least one.
n_nonzero_coefs = max(int(0.1 * X.shape[1]), 1)
if tol is not None and tol < 0:
raise ValueError("Epsilon cannot be negative")
if tol is None and n_nonzero_coefs <= 0:
raise ValueError("The number of atoms must be positive")
if tol is None and n_nonzero_coefs > X.shape[1]:
raise ValueError(
"The number of atoms cannot be more than the number of features"
)
if precompute == "auto":
precompute = X.shape[0] > X.shape[1]
if precompute:
G = np.dot(X.T, X)
G = np.asfortranarray(G)
Xy = np.dot(X.T, y)
if tol is not None:
norms_squared = np.sum((y ** 2), axis=0)
else:
norms_squared = None
return orthogonal_mp_gram(
G,
Xy,
n_nonzero_coefs=n_nonzero_coefs,
tol=tol,
norms_squared=norms_squared,
copy_Gram=copy_X,
copy_Xy=False,
return_path=return_path,
)
if return_path:
coef = np.zeros((X.shape[1], y.shape[1], X.shape[1]))
else:
coef = np.zeros((X.shape[1], y.shape[1]))
n_iters = []
for k in range(y.shape[1]):
out = _cholesky_omp(
X, y[:, k], n_nonzero_coefs, tol, copy_X=copy_X, return_path=return_path
)
if return_path:
_, idx, coefs, n_iter = out
coef = coef[:, :, : len(idx)]
for n_active, x in enumerate(coefs.T):
coef[idx[: n_active + 1], k, n_active] = x[: n_active + 1]
else:
x, idx, n_iter = out
coef[idx, k] = x
n_iters.append(n_iter)
if y.shape[1] == 1:
n_iters = n_iters[0]
if return_n_iter:
return np.squeeze(coef), n_iters
else:
return np.squeeze(coef)
def orthogonal_mp_gram(
Gram,
Xy,
*,
n_nonzero_coefs=None,
tol=None,
norms_squared=None,
copy_Gram=True,
copy_Xy=True,
return_path=False,
return_n_iter=False,
):
"""Gram Orthogonal Matching Pursuit (OMP).
Solves n_targets Orthogonal Matching Pursuit problems using only
the Gram matrix X.T * X and the product X.T * y.
Read more in the :ref:`User Guide <omp>`.
Parameters
----------
Gram : ndarray of shape (n_features, n_features)
Gram matrix of the input data: X.T * X.
Xy : ndarray of shape (n_features,) or (n_features, n_targets)
Input targets multiplied by X: X.T * y.
n_nonzero_coefs : int, default=None
Desired number of non-zero entries in the solution. If None (by
default) this value is set to 10% of n_features.
tol : float, default=None
Maximum norm of the residual. If not None, overrides n_nonzero_coefs.
norms_squared : array-like of shape (n_targets,), default=None
Squared L2 norms of the lines of y. Required if tol is not None.
copy_Gram : bool, default=True
Whether the gram matrix must be copied by the algorithm. A false
value is only helpful if it is already Fortran-ordered, otherwise a
copy is made anyway.
copy_Xy : bool, default=True
Whether the covariance vector Xy must be copied by the algorithm.
If False, it may be overwritten.
return_path : bool, default=False
Whether to return every value of the nonzero coefficients along the
forward path. Useful for cross-validation.
return_n_iter : bool, default=False
Whether or not to return the number of iterations.
Returns
-------
coef : ndarray of shape (n_features,) or (n_features, n_targets)
Coefficients of the OMP solution. If `return_path=True`, this contains
the whole coefficient path. In this case its shape is
(n_features, n_features) or (n_features, n_targets, n_features) and
iterating over the last axis yields coefficients in increasing order
of active features.
n_iters : array-like or int
Number of active features across every target. Returned only if
`return_n_iter` is set to True.
See Also
--------
OrthogonalMatchingPursuit
orthogonal_mp
lars_path
sklearn.decomposition.sparse_encode
Notes
-----
Orthogonal matching pursuit was introduced in G. Mallat, Z. Zhang,
Matching pursuits with time-frequency dictionaries, IEEE Transactions on
Signal Processing, Vol. 41, No. 12. (December 1993), pp. 3397-3415.
(http://blanche.polytechnique.fr/~mallat/papiers/MallatPursuit93.pdf)
This implementation is based on Rubinstein, R., Zibulevsky, M. and Elad,
M., Efficient Implementation of the K-SVD Algorithm using Batch Orthogonal
Matching Pursuit Technical Report - CS Technion, April 2008.
https://www.cs.technion.ac.il/~ronrubin/Publications/KSVD-OMP-v2.pdf
"""
Gram = check_array(Gram, order="F", copy=copy_Gram)
Xy = np.asarray(Xy)
if Xy.ndim > 1 and Xy.shape[1] > 1:
# or subsequent target will be affected
copy_Gram = True
if Xy.ndim == 1:
Xy = Xy[:, np.newaxis]
if tol is not None:
norms_squared = [norms_squared]
if copy_Xy or not Xy.flags.writeable:
# Make the copy once instead of many times in _gram_omp itself.
Xy = Xy.copy()
if n_nonzero_coefs is None and tol is None:
n_nonzero_coefs = int(0.1 * len(Gram))
if tol is not None and norms_squared is None:
raise ValueError(
"Gram OMP needs the precomputed norms in order "
"to evaluate the error sum of squares."
)
if tol is not None and tol < 0:
raise ValueError("Epsilon cannot be negative")
if tol is None and n_nonzero_coefs <= 0:
raise ValueError("The number of atoms must be positive")
if tol is None and n_nonzero_coefs > len(Gram):
raise ValueError(
"The number of atoms cannot be more than the number of features"
)
if return_path:
coef = np.zeros((len(Gram), Xy.shape[1], len(Gram)))
else:
coef = np.zeros((len(Gram), Xy.shape[1]))
n_iters = []
for k in range(Xy.shape[1]):
out = _gram_omp(
Gram,
Xy[:, k],
n_nonzero_coefs,
norms_squared[k] if tol is not None else None,
tol,
copy_Gram=copy_Gram,
copy_Xy=False,
return_path=return_path,
)
if return_path:
_, idx, coefs, n_iter = out
coef = coef[:, :, : len(idx)]
for n_active, x in enumerate(coefs.T):
coef[idx[: n_active + 1], k, n_active] = x[: n_active + 1]
else:
x, idx, n_iter = out
coef[idx, k] = x
n_iters.append(n_iter)
if Xy.shape[1] == 1:
n_iters = n_iters[0]
if return_n_iter:
return np.squeeze(coef), n_iters
else:
return np.squeeze(coef)
class OrthogonalMatchingPursuit(MultiOutputMixin, RegressorMixin, LinearModel):
"""Orthogonal Matching Pursuit model (OMP).
Read more in the :ref:`User Guide <omp>`.
Parameters
----------
n_nonzero_coefs : int, default=None
Desired number of non-zero entries in the solution. If None (by
default) this value is set to 10% of n_features.
tol : float, default=None
Maximum norm of the residual. If not None, overrides n_nonzero_coefs.
fit_intercept : bool, default=True
whether to calculate the intercept for this model. If set
to false, no intercept will be used in calculations
(i.e. data is expected to be centered).
normalize : bool, default=True
This parameter is ignored when ``fit_intercept`` is set to False.
If True, the regressors X will be normalized before regression by
subtracting the mean and dividing by the l2-norm.
If you wish to standardize, please use
:class:`~sklearn.preprocessing.StandardScaler` before calling ``fit``
on an estimator with ``normalize=False``.
.. deprecated:: 1.0
``normalize`` was deprecated in version 1.0. It will default
to False in 1.2 and be removed in 1.4.
precompute : 'auto' or bool, default='auto'
Whether to use a precomputed Gram and Xy matrix to speed up
calculations. Improves performance when :term:`n_targets` or
:term:`n_samples` is very large. Note that if you already have such
matrices, you can pass them directly to the fit method.
Attributes
----------
coef_ : ndarray of shape (n_features,) or (n_targets, n_features)
Parameter vector (w in the formula).
intercept_ : float or ndarray of shape (n_targets,)
Independent term in decision function.
n_iter_ : int or array-like
Number of active features across every target.
n_nonzero_coefs_ : int
The number of non-zero coefficients in the solution. If
`n_nonzero_coefs` is None and `tol` is None this value is either set
to 10% of `n_features` or 1, whichever is greater.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
Examples
--------
>>> from sklearn.linear_model import OrthogonalMatchingPursuit
>>> from sklearn.datasets import make_regression
>>> X, y = make_regression(noise=4, random_state=0)
>>> reg = OrthogonalMatchingPursuit(normalize=False).fit(X, y)
>>> reg.score(X, y)
0.9991...
>>> reg.predict(X[:1,])
array([-78.3854...])
Notes
-----
Orthogonal matching pursuit was introduced in G. Mallat, Z. Zhang,
Matching pursuits with time-frequency dictionaries, IEEE Transactions on
Signal Processing, Vol. 41, No. 12. (December 1993), pp. 3397-3415.
(http://blanche.polytechnique.fr/~mallat/papiers/MallatPursuit93.pdf)
This implementation is based on Rubinstein, R., Zibulevsky, M. and Elad,
M., Efficient Implementation of the K-SVD Algorithm using Batch Orthogonal
Matching Pursuit Technical Report - CS Technion, April 2008.
https://www.cs.technion.ac.il/~ronrubin/Publications/KSVD-OMP-v2.pdf
See Also
--------
orthogonal_mp
orthogonal_mp_gram
lars_path
Lars
LassoLars
sklearn.decomposition.sparse_encode
OrthogonalMatchingPursuitCV
"""
def __init__(
self,
*,
n_nonzero_coefs=None,
tol=None,
fit_intercept=True,
normalize="deprecated",
precompute="auto",
):
self.n_nonzero_coefs = n_nonzero_coefs
self.tol = tol
self.fit_intercept = fit_intercept
self.normalize = normalize
self.precompute = precompute
def fit(self, X, y):
"""Fit the model using X, y as training data.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Training data.
y : array-like of shape (n_samples,) or (n_samples, n_targets)
Target values. Will be cast to X's dtype if necessary
Returns
-------
self : object
returns an instance of self.
"""
_normalize = _deprecate_normalize(
self.normalize, default=True, estimator_name=self.__class__.__name__
)
X, y = self._validate_data(X, y, multi_output=True, y_numeric=True)
n_features = X.shape[1]
X, y, X_offset, y_offset, X_scale, Gram, Xy = _pre_fit(
X, y, None, self.precompute, _normalize, self.fit_intercept, copy=True
)
if y.ndim == 1:
y = y[:, np.newaxis]
if self.n_nonzero_coefs is None and self.tol is None:
# default for n_nonzero_coefs is 0.1 * n_features
# but at least one.
self.n_nonzero_coefs_ = max(int(0.1 * n_features), 1)
else:
self.n_nonzero_coefs_ = self.n_nonzero_coefs
if Gram is False:
coef_, self.n_iter_ = orthogonal_mp(
X,
y,
n_nonzero_coefs=self.n_nonzero_coefs_,
tol=self.tol,
precompute=False,
copy_X=True,
return_n_iter=True,
)
else:
norms_sq = np.sum(y ** 2, axis=0) if self.tol is not None else None
coef_, self.n_iter_ = orthogonal_mp_gram(
Gram,
Xy=Xy,
n_nonzero_coefs=self.n_nonzero_coefs_,
tol=self.tol,
norms_squared=norms_sq,
copy_Gram=True,
copy_Xy=True,
return_n_iter=True,
)
self.coef_ = coef_.T
self._set_intercept(X_offset, y_offset, X_scale)
return self
def _omp_path_residues(
X_train,
y_train,
X_test,
y_test,
copy=True,
fit_intercept=True,
normalize=True,
max_iter=100,
):
"""Compute the residues on left-out data for a full LARS path.
Parameters
----------
X_train : ndarray of shape (n_samples, n_features)
The data to fit the LARS on.
y_train : ndarray of shape (n_samples)
The target variable to fit LARS on.
X_test : ndarray of shape (n_samples, n_features)
The data to compute the residues on.
y_test : ndarray of shape (n_samples)
The target variable to compute the residues on.
copy : bool, default=True
Whether X_train, X_test, y_train and y_test should be copied. If
False, they may be overwritten.
fit_intercept : bool, default=True
Whether to calculate the intercept for this model. If set
to false, no intercept will be used in calculations
(i.e. data is expected to be centered).
normalize : bool, default=True
This parameter is ignored when ``fit_intercept`` is set to False.
If True, the regressors X will be normalized before regression by
subtracting the mean and dividing by the l2-norm.
If you wish to standardize, please use
:class:`~sklearn.preprocessing.StandardScaler` before calling ``fit``
on an estimator with ``normalize=False``.
.. deprecated:: 1.0
``normalize`` was deprecated in version 1.0. It will default
to False in 1.2 and be removed in 1.4.
max_iter : int, default=100
Maximum numbers of iterations to perform, therefore maximum features
to include. 100 by default.
Returns
-------
residues : ndarray of shape (n_samples, max_features)
Residues of the prediction on the test data.
"""
if copy:
X_train = X_train.copy()
y_train = y_train.copy()
X_test = X_test.copy()
y_test = y_test.copy()
if fit_intercept:
X_mean = X_train.mean(axis=0)
X_train -= X_mean
X_test -= X_mean
y_mean = y_train.mean(axis=0)
y_train = as_float_array(y_train, copy=False)
y_train -= y_mean
y_test = as_float_array(y_test, copy=False)
y_test -= y_mean
if normalize:
norms = np.sqrt(np.sum(X_train ** 2, axis=0))
nonzeros = np.flatnonzero(norms)
X_train[:, nonzeros] /= norms[nonzeros]
coefs = orthogonal_mp(
X_train,
y_train,
n_nonzero_coefs=max_iter,
tol=None,
precompute=False,
copy_X=False,
return_path=True,
)
if coefs.ndim == 1:
coefs = coefs[:, np.newaxis]
if normalize:
coefs[nonzeros] /= norms[nonzeros][:, np.newaxis]
return np.dot(coefs.T, X_test.T) - y_test
class OrthogonalMatchingPursuitCV(RegressorMixin, LinearModel):
"""Cross-validated Orthogonal Matching Pursuit model (OMP).
See glossary entry for :term:`cross-validation estimator`.
Read more in the :ref:`User Guide <omp>`.
Parameters
----------
copy : bool, default=True
Whether the design matrix X must be copied by the algorithm. A false
value is only helpful if X is already Fortran-ordered, otherwise a
copy is made anyway.
fit_intercept : bool, default=True
whether to calculate the intercept for this model. If set
to false, no intercept will be used in calculations
(i.e. data is expected to be centered).
normalize : bool, default=True
This parameter is ignored when ``fit_intercept`` is set to False.
If True, the regressors X will be normalized before regression by
subtracting the mean and dividing by the l2-norm.
If you wish to standardize, please use
:class:`~sklearn.preprocessing.StandardScaler` before calling ``fit``
on an estimator with ``normalize=False``.
.. deprecated:: 1.0
``normalize`` was deprecated in version 1.0. It will default
to False in 1.2 and be removed in 1.4.
max_iter : int, default=None
Maximum numbers of iterations to perform, therefore maximum features
to include. 10% of ``n_features`` but at least 5 if available.
cv : int, cross-validation generator or iterable, default=None
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 5-fold cross-validation,
- integer, to specify the number of folds.
- :term:`CV splitter`,
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, :class:`KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
.. versionchanged:: 0.22
``cv`` default value if None changed from 3-fold to 5-fold.
n_jobs : int, default=None
Number of CPUs to use during the cross validation.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
verbose : bool or int, default=False
Sets the verbosity amount.
Attributes
----------
intercept_ : float or ndarray of shape (n_targets,)
Independent term in decision function.
coef_ : ndarray of shape (n_features,) or (n_targets, n_features)
Parameter vector (w in the problem formulation).
n_nonzero_coefs_ : int
Estimated number of non-zero coefficients giving the best mean squared
error over the cross-validation folds.
n_iter_ : int or array-like
Number of active features across every target for the model refit with
the best hyperparameters got by cross-validating across all folds.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
Examples
--------
>>> from sklearn.linear_model import OrthogonalMatchingPursuitCV
>>> from sklearn.datasets import make_regression
>>> X, y = make_regression(n_features=100, n_informative=10,
... noise=4, random_state=0)
>>> reg = OrthogonalMatchingPursuitCV(cv=5, normalize=False).fit(X, y)
>>> reg.score(X, y)
0.9991...
>>> reg.n_nonzero_coefs_
10
>>> reg.predict(X[:1,])
array([-78.3854...])
See Also
--------
orthogonal_mp
orthogonal_mp_gram
lars_path
Lars
LassoLars
OrthogonalMatchingPursuit
LarsCV
LassoLarsCV
sklearn.decomposition.sparse_encode
"""
def __init__(
self,
*,
copy=True,
fit_intercept=True,
normalize="deprecated",
max_iter=None,
cv=None,
n_jobs=None,
verbose=False,
):
self.copy = copy
self.fit_intercept = fit_intercept
self.normalize = normalize
self.max_iter = max_iter
self.cv = cv
self.n_jobs = n_jobs
self.verbose = verbose
def fit(self, X, y):
"""Fit the model using X, y as training data.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Training data.
y : array-like of shape (n_samples,)
Target values. Will be cast to X's dtype if necessary.
Returns
-------
self : object
returns an instance of self.
"""
_normalize = _deprecate_normalize(
self.normalize, default=True, estimator_name=self.__class__.__name__
)
X, y = self._validate_data(
X, y, y_numeric=True, ensure_min_features=2, estimator=self
)
X = as_float_array(X, copy=False, force_all_finite=False)
cv = check_cv(self.cv, classifier=False)
max_iter = (
min(max(int(0.1 * X.shape[1]), 5), X.shape[1])
if not self.max_iter
else self.max_iter
)
cv_paths = Parallel(n_jobs=self.n_jobs, verbose=self.verbose)(
delayed(_omp_path_residues)(
X[train],
y[train],
X[test],
y[test],
self.copy,
self.fit_intercept,
_normalize,
max_iter,
)
for train, test in cv.split(X)
)
min_early_stop = min(fold.shape[0] for fold in cv_paths)
mse_folds = np.array(
[(fold[:min_early_stop] ** 2).mean(axis=1) for fold in cv_paths]
)
best_n_nonzero_coefs = np.argmin(mse_folds.mean(axis=0)) + 1
self.n_nonzero_coefs_ = best_n_nonzero_coefs
omp = OrthogonalMatchingPursuit(
n_nonzero_coefs=best_n_nonzero_coefs,
fit_intercept=self.fit_intercept,
normalize=_normalize,
)
omp.fit(X, y)
self.coef_ = omp.coef_
self.intercept_ = omp.intercept_
self.n_iter_ = omp.n_iter_
return self
|
{
"content_hash": "2bcb8170d79928f91c20cdb23e228c6a",
"timestamp": "",
"source": "github",
"line_count": 1049,
"max_line_length": 84,
"avg_line_length": 33.00095328884652,
"alnum_prop": 0.5977815009532613,
"repo_name": "huzq/scikit-learn",
"id": "c4bdbb6248726558d84ae66daaadf1eb7fa3f776",
"size": "34618",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "sklearn/linear_model/_omp.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "C",
"bytes": "394787"
},
{
"name": "C++",
"bytes": "140225"
},
{
"name": "Makefile",
"bytes": "1579"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "6394128"
},
{
"name": "Shell",
"bytes": "9250"
}
],
"symlink_target": ""
}
|
"""DQN Agent with time input."""
import collections
import functools
from typing import Tuple
from dopamine.jax import losses
from dopamine.jax import networks
from dopamine.jax.agents.dqn import dqn_agent
from dopamine.replay_memory import prioritized_replay_buffer
from flax import linen as nn
import gin
import jax
import jax.numpy as jnp
import numpy as onp
import optax
import tensorflow as tf
from aux_tasks.auxiliary_mc import monte_carlo_replay_buffer as monte_carlo_rb
from aux_tasks.auxiliary_mc import networks as aux_mc_networks
AuxiliaryPredictionDQNNetworkType = collections.namedtuple(
'dqn_network_with_random_rewards', ['q_values', 'aux_prediction'])
@gin.configurable
class DQNNetworkWithAuxiliaryPredictions(nn.Module):
"""Generates q_values with per-state auxiliary predictions.
Attributes:
num_actions: int, number of actions the agent can take at any state.
num_predictions: int, number of auxiliary predictions.
rng_key: int, Fixed rng for random reward generation.
inputs_preprocessed: bool, Whether inputs are already preprocessed.
"""
num_actions: int
num_predictions: int
inputs_preprocessed: bool = False
@nn.compact
def __call__(self, x):
initializer = nn.initializers.xavier_uniform()
if not self.inputs_preprocessed:
x = networks.preprocess_atari_inputs(x)
hidden_sizes = [32, 64, 64]
kernel_sizes = [8, 4, 3]
stride_sizes = [4, 2, 1]
for hidden_size, kernel_size, stride_size in zip(hidden_sizes, kernel_sizes,
stride_sizes):
x = nn.Conv(
features=hidden_size,
kernel_size=(kernel_size, kernel_size),
strides=(stride_size, stride_size),
kernel_init=initializer)(x)
x = nn.relu(x)
features = x.reshape((-1)) # flatten
x = nn.Dense(features=512, kernel_init=initializer)(features)
x = nn.relu(x)
q_values = nn.Dense(features=self.num_actions, kernel_init=initializer)(x)
# MSE loss for Auxiliary task MC predictions.
auxiliary_pred = nn.Dense(features=512, kernel_init=initializer)(features)
auxiliary_pred = nn.relu(auxiliary_pred)
auxiliary_pred = nn.Dense(
features=self.num_predictions, kernel_init=initializer)(auxiliary_pred)
return AuxiliaryPredictionDQNNetworkType(q_values, auxiliary_pred)
@gin.configurable
class ImpalaEncoderWithAuxiliaryPredictions(nn.Module):
"""Impala Network generating q_values with per-state auxiliary predictions."""
num_actions: int
num_predictions: int
inputs_preprocessed: bool = False
stack_sizes: Tuple[int, Ellipsis] = (16, 32, 32)
num_blocks: int = 2
def setup(self):
self.encoder = aux_mc_networks.ImpalaEncoder()
@nn.compact
def __call__(self, x, key=None):
# Generate a random number generation key if not provided
initializer = nn.initializers.xavier_uniform()
if not self.inputs_preprocessed:
x = networks.preprocess_atari_inputs(x)
x = self.encoder(x)
features = x.reshape((-1)) # flatten
x = nn.Dense(
features=512, kernel_init=initializer)(features)
x = nn.relu(x)
q_values = nn.Dense(features=self.num_actions, kernel_init=initializer)(x)
# MSE loss for Auxiliary task MC predictions.
auxiliary_pred = nn.Dense(features=512, kernel_init=initializer)(features)
auxiliary_pred = nn.relu(auxiliary_pred)
auxiliary_pred = nn.Dense(
features=self.num_predictions, kernel_init=initializer)(auxiliary_pred)
return AuxiliaryPredictionDQNNetworkType(q_values, auxiliary_pred)
@gin.configurable
class RandomRewardNetwork(nn.Module):
"""Generates random rewards using a noisy network.
Attributes:
num_actions: int, number of actions the agent can take at any state.
num_rewards: int, number of random rewards to generate.
rng_key: int, Fixed rng for random reward generation.
inputs_preprocessed: bool, Whether inputs are already preprocessed.
"""
num_actions: int
num_rewards: int
inputs_preprocessed: bool = False
@nn.compact
def __call__(self, x, rng_key):
initializer = nn.initializers.xavier_uniform()
if not self.inputs_preprocessed:
x = networks.preprocess_atari_inputs(x)
hidden_sizes = [32, 64, 64]
kernel_sizes = [8, 4, 3]
stride_sizes = [4, 2, 1]
for hidden_size, kernel_size, stride_size in zip(hidden_sizes, kernel_sizes,
stride_sizes):
x = nn.Conv(
features=hidden_size,
kernel_size=(kernel_size, kernel_size),
strides=(stride_size, stride_size),
kernel_init=initializer)(x)
x = nn.relu(x)
features = x.reshape((-1)) # flatten
# Use a fixed random seed for NoisyNetwork.
net = networks.NoisyNetwork(rng_key=rng_key, eval_mode=False)
# Return `self.num_rewards` random outputs.
rewards = net(features, self.num_rewards)
x = jax.nn.sigmoid(features) # clip rewards between -1 and 1
return rewards
@functools.partial(jax.jit, static_argnames=('network_def'))
def get_rewards(network_def, params, state, rng_key):
return network_def.apply(params, state, rng_key=rng_key)
@functools.partial(
jax.jit,
static_argnames=('network_def', 'optimizer', 'cumulative_gamma',
'loss_type'))
def train(network_def,
online_params,
target_params,
optimizer,
optimizer_state,
states,
auxiliary_mc_returns,
actions,
next_states,
rewards,
terminals,
cumulative_gamma,
auxloss_weight=0.0):
"""Run the training step."""
def loss_fn(params, target, auxiliary_target):
def q_online(state):
return network_def.apply(params, state)
model_output = jax.vmap(q_online)(states)
q_values = jnp.squeeze(model_output.q_values)
replay_chosen_q = jax.vmap(lambda x, y: x[y])(q_values, actions)
td_loss = jnp.mean(jax.vmap(losses.mse_loss)(target, replay_chosen_q))
# Auxiliary task loss.
auxiliary_predictions = jnp.squeeze(model_output.aux_prediction)
aux_loss = jnp.mean(jax.vmap(losses.mse_loss)(
auxiliary_predictions, auxiliary_target))
loss = ((1. - auxloss_weight) * td_loss +
auxloss_weight * aux_loss)
return loss, (td_loss, aux_loss)
def q_target(state):
return network_def.apply(target_params, state)
target = dqn_agent.target_q(q_target, next_states, rewards, terminals,
cumulative_gamma)
grad_fn = jax.value_and_grad(loss_fn, has_aux=True)
(loss, component_losses), grad = grad_fn(online_params, target,
auxiliary_mc_returns)
td_loss, aux_loss = component_losses
updates, optimizer_state = optimizer.update(grad, optimizer_state,
params=online_params)
online_params = optax.apply_updates(online_params, updates)
return optimizer_state, online_params, loss, td_loss, aux_loss
@gin.configurable
class CumulantJaxDQNAgentWithAuxiliaryMC(dqn_agent.JaxDQNAgent):
"""An implementation of the DQN agent with replay buffer logging to disk."""
def __init__(self,
num_actions,
network=ImpalaEncoderWithAuxiliaryPredictions,
num_rewards=2,
auxloss_weight=0.0,
summary_writer=None,
preprocess_fn=None,
seed=None):
"""Initializes the agent and constructs the components of its graph.
Args:
num_actions: int, number of actions the agent can take at any state.
network: Jax network to use for training.
num_rewards: int, Number of random rewards to generate at each step.
auxloss_weight: float: weight for aux loss.
summary_writer: Tensorflow summary writer for logging summaries.
preprocess_fn: Preprocessing function.
seed: int, Agent seed.
"""
network = functools.partial(network, num_predictions=num_rewards)
self.num_rewards = num_rewards
self._auxloss_weight = auxloss_weight
super().__init__(
num_actions, network=network, summary_writer=summary_writer, seed=seed,
preprocess_fn=preprocess_fn)
# Create network for random reward generation.
inputs_preprocessed = True if preprocess_fn else False
self.reward_network_def = RandomRewardNetwork(
num_actions=num_actions, num_rewards=num_rewards,
inputs_preprocessed=inputs_preprocessed)
self._build_reward_net_params()
def _build_replay_buffer(self):
"""Creates a monte carlo replay buffer used by the agent."""
extra_storage_types = [
monte_carlo_rb.ReplayElement('auxiliary_rewards', (self.num_rewards,),
onp.float32)
]
return monte_carlo_rb.OutOfGraphReplayBufferWithMC(
observation_shape=self.observation_shape,
stack_size=self.stack_size,
update_horizon=self.update_horizon,
gamma=self.gamma,
observation_dtype=self.observation_dtype,
extra_storage_types=extra_storage_types,
# Pass a compy of `extra_storage_types` to avoid updating it when
# updating `extra_monte_carlo_storage_types`.
extra_monte_carlo_storage_types=extra_storage_types[:],
reverse_fill=True)
def _get_random_reward(self, state):
return onp.asarray(
get_rewards(self.reward_network_def, self.reward_net_params, state,
self._reward_rng))
def _build_reward_net_params(self):
self._rng, self._reward_rng, rng = jax.random.split(self._rng, 3)
self.reward_net_params = self.reward_network_def.init(
rng, x=self.state, rng_key=self._reward_rng)
def _store_transition(self,
last_observation,
action,
reward,
is_terminal,
*args,
priority=None,
episode_end=False):
""""""
is_prioritized = isinstance(
self._replay,
prioritized_replay_buffer.OutOfGraphPrioritizedReplayBuffer)
if is_prioritized and priority is None:
if self._replay_scheme == 'uniform':
priority = 1.
else:
priority = self._replay.sum_tree.max_recorded_priority
if not self.eval_mode:
# Store the current auxiliary reward here.
self._replay.add(
last_observation,
action,
reward,
is_terminal,
self._auxiliary_reward,
*args,
priority=priority,
episode_end=episode_end)
def _train_step(self):
"""Runs a single training step."""
# Run a train op at the rate of self.update_period if enough training steps
# have been run. This matches the Nature DQN behaviour.
if self._replay.add_count > self.min_replay_history:
if self.training_steps % self.update_period == 0:
self._sample_from_replay_buffer()
states = self.preprocess_fn(self.replay_elements['state'])
next_states = self.preprocess_fn(self.replay_elements['next_state'])
self.optimizer_state, self.online_params, loss, td_loss, auxloss = train(
self.network_def,
self.online_params,
self.target_network_params,
self.optimizer,
self.optimizer_state,
states,
self.replay_elements['monte_carlo_auxiliary_rewards'],
self.replay_elements['action'],
next_states,
self.replay_elements['reward'],
self.replay_elements['terminal'],
self.cumulative_gamma,
self._auxloss_weight)
if (self.summary_writer is not None and
self.training_steps > 0 and
self.training_steps % self.summary_writing_frequency == 0):
with self.summary_writer.as_default():
tf.summary.scalar('Losses/Aggregate', loss, step=self.training_steps)
tf.summary.scalar(
'Losses/Auxiliary',
auxloss,
step=self.training_steps)
tf.summary.scalar('Losses/TD', td_loss, step=self.training_steps)
self.summary_writer.flush()
if self.training_steps % self.target_update_period == 0:
self._sync_weights()
self.training_steps += 1
def step(self, reward, observation):
"""Records the most recent transition and returns the agent's next action."""
action = super().step(reward, observation)
# Generate auxiliary reward for current state.
self._auxiliary_reward = self._get_random_reward(
self.preprocess_fn(self.state))
return action
def begin_episode(self, observation):
"""Returns the agent's first action for this episode."""
self.action = super().begin_episode(observation)
self._auxiliary_reward = self._get_random_reward(
self.preprocess_fn(self.state))
return self.action
|
{
"content_hash": "80cadcd862ffd0dc8848e6018c125593",
"timestamp": "",
"source": "github",
"line_count": 350,
"max_line_length": 81,
"avg_line_length": 37.10857142857143,
"alnum_prop": 0.647828765013859,
"repo_name": "google-research/google-research",
"id": "723f12b0ebbb3259f16f716ec5341980520399cf",
"size": "13596",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aux_tasks/auxiliary_mc/dqn_agent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "9817"
},
{
"name": "C++",
"bytes": "4166670"
},
{
"name": "CMake",
"bytes": "6412"
},
{
"name": "CSS",
"bytes": "27092"
},
{
"name": "Cuda",
"bytes": "1431"
},
{
"name": "Dockerfile",
"bytes": "7145"
},
{
"name": "Gnuplot",
"bytes": "11125"
},
{
"name": "HTML",
"bytes": "77599"
},
{
"name": "ImageJ Macro",
"bytes": "50488"
},
{
"name": "Java",
"bytes": "487585"
},
{
"name": "JavaScript",
"bytes": "896512"
},
{
"name": "Julia",
"bytes": "67986"
},
{
"name": "Jupyter Notebook",
"bytes": "71290299"
},
{
"name": "Lua",
"bytes": "29905"
},
{
"name": "MATLAB",
"bytes": "103813"
},
{
"name": "Makefile",
"bytes": "5636"
},
{
"name": "NASL",
"bytes": "63883"
},
{
"name": "Perl",
"bytes": "8590"
},
{
"name": "Python",
"bytes": "53790200"
},
{
"name": "R",
"bytes": "101058"
},
{
"name": "Roff",
"bytes": "1208"
},
{
"name": "Rust",
"bytes": "2389"
},
{
"name": "Shell",
"bytes": "730444"
},
{
"name": "Smarty",
"bytes": "5966"
},
{
"name": "Starlark",
"bytes": "245038"
}
],
"symlink_target": ""
}
|
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test the --install-sandbox commandline option for Install() and InstallAs().
"""
import os.path
import TestSCons
test = TestSCons.TestSCons()
test.subdir('install', 'subdir')
target = 'destination'
destdir = test.workpath( target )
_SUBDIR_file3_out = os.path.join('$SUBDIR', 'file3.out')
_SUBDIR_file3_in = os.path.join('$SUBDIR', 'file3.in')
target_file2_out = os.path.join(target, 'file2.out')
subdir_file3_in = os.path.join('subdir', 'file3.in')
target_subdir_file3_out = os.path.join(target, 'subdir', 'file3.out')
file1_out = target+os.path.join( target,
os.path.splitdrive(destdir)[1],
'file1.out' )
#
test.write('SConstruct', r"""
env = Environment(SUBDIR='subdir')
f1 = env.Install(r'%(destdir)s', 'file1.out')
f2 = env.InstallAs(['file2.out', r'%(_SUBDIR_file3_out)s'],
['file2.in', r'%(_SUBDIR_file3_in)s'])
env.Depends(f2, f1)
""" % locals())
test.write('file1.out', "file1.out\n")
test.write('file2.in', "file2.in\n")
test.write(['subdir', 'file3.in'], "subdir/file3.in\n")
expect = test.wrap_stdout("""\
Install file: "file1.out" as "%(file1_out)s"
Install file: "file2.in" as "%(target_file2_out)s"
Install file: "%(subdir_file3_in)s" as "%(target_subdir_file3_out)s"
""" % locals())
test.run(arguments = '--install-sandbox=%s' % destdir, stdout=expect)
test.must_match(file1_out, "file1.out\n")
test.must_match('destination/file2.out', "file2.in\n")
test.must_match('destination/subdir/file3.out', "subdir/file3.in\n")
#
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
{
"content_hash": "2203a20481782b0fea2a9c05db5ed9b4",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 76,
"avg_line_length": 30.086206896551722,
"alnum_prop": 0.6349570200573066,
"repo_name": "andrewyoung1991/scons",
"id": "020cd3acd55e3e351f19e1a9ae26fa24c9acc559",
"size": "2847",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "test/Install/option--install-sandbox.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2437"
},
{
"name": "C",
"bytes": "746"
},
{
"name": "C++",
"bytes": "518"
},
{
"name": "CSS",
"bytes": "18502"
},
{
"name": "D",
"bytes": "1817"
},
{
"name": "DTrace",
"bytes": "180"
},
{
"name": "HTML",
"bytes": "857084"
},
{
"name": "Java",
"bytes": "6860"
},
{
"name": "JavaScript",
"bytes": "215495"
},
{
"name": "Makefile",
"bytes": "3795"
},
{
"name": "Perl",
"bytes": "44714"
},
{
"name": "Python",
"bytes": "7385906"
},
{
"name": "Ruby",
"bytes": "10888"
},
{
"name": "Shell",
"bytes": "52194"
},
{
"name": "XSLT",
"bytes": "7567242"
}
],
"symlink_target": ""
}
|
"""
aliases so that a real error message is displayed if someone
uses the old class named
"""
from .pageobject import Page, Element, Elements, ElementMap
from .testcase import TestCase
# pylint: disable=invalid-name
# pylint: disable=too-few-public-methods
# pylint: disable=no-member
class Deprecated(object):
"""
meta class to create an object that throws a Syntax error on
construction
"""
def __new__(cls, *_):
raise SyntaxError(
"%s has been removed as of version 0.4. Use %s instead" % (
cls.cur, cls.alt.__name__
)
)
def construct_deprecated(name, alt):
"""
create a type for the alias
"""
doc = """Deprecated alias for :class:`%s`""" % alt.__name__
cls = type(name, (Deprecated, alt),
dict(cur=name, alt=alt, __doc__=doc))
return cls
PageObject = construct_deprecated("PageObject", Page)
PageElement = construct_deprecated("PageElement", Element)
PageElements = construct_deprecated("PageElements", Elements)
PageElementMap = construct_deprecated("PageElementMap", ElementMap)
HolmiumTestCase = construct_deprecated("HolmiumTestCase", TestCase)
|
{
"content_hash": "a431a0e35311140e93dabd9ef623999e",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 71,
"avg_line_length": 28.682926829268293,
"alnum_prop": 0.6641156462585034,
"repo_name": "tjlee/holmium.core",
"id": "1e8c5bc8bd267cdab9a180bc91ceb2b4bd7ae2b0",
"size": "1176",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "holmium/core/deprecated.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1410"
},
{
"name": "Cucumber",
"bytes": "2539"
},
{
"name": "HTML",
"bytes": "4271"
},
{
"name": "JavaScript",
"bytes": "400"
},
{
"name": "Python",
"bytes": "188644"
},
{
"name": "Shell",
"bytes": "987"
}
],
"symlink_target": ""
}
|
from collections import OrderedDict
import numpy as np
def getNames():
names = []
# 8
# sum quadrants
names.append('orders_longs_above')
names.append('orders_shorts_above')
names.append('positions_longs_above')
names.append('positions_shorts_above')
names.append('orders_longs_below')
names.append('orders_shorts_below')
names.append('positions_longs_below')
names.append('positions_shorts_below')
return names
def extractFeatures(data):
features = []
# order timestamps
data = OrderedDict(sorted(data.items()))
for timestamp, data in data.iteritems():
if type(data) is int:
raise Exception('Invalid file')
tmp = [0] * 8
rate_current = data['rate']
# sum quadrants
for rate_point, percs in data['price_points'].iteritems():
if float(rate_point) > rate_current:
tmp[0] += percs['ol']
tmp[1] += percs['os']
tmp[2] += percs['pl']
tmp[3] += percs['ps']
else:
tmp[4] += percs['ol']
tmp[5] += percs['os']
tmp[6] += percs['pl']
tmp[7] += percs['ps']
# add timestamp point to features
features.append(tmp)
return features
'''
u'1411548001': {
'rate': 1.2853,
'sums': {
'positions': {
'longs': {'below': 4.757999999999999, 'above': 51.35969999999999},
'shorts': {'below': 19.2438, 'above': 24.639399999999995}
},
'orders': {
'longs': {'below': 15.541600000000006, 'above': 16.711199999999995},
'shorts': {'below': 11.835200000000002, 'above': 55.6986}
}
}
},
'''
def calcRewards(data):
results = []
iMax = 5 * 1
rates = [v['rate'] for v in data.values()]
# print rates
for pos, rate in enumerate(rates):
tmp = [0]
for i in xrange(1, iMax):
index = pos + i
if index >= len(rates) - 2:
break
tmp.append(rates[index] - rate)
results.append(sum(tmp))
mean = np.mean([abs(r) for r in results])
print 'mean', round(mean, 4)
mean /= 1.25
# print 'halve-mean', round(mean, 4)
rewards = ['long' if abs(r) > mean and r > 0 else 'short' if abs(r) > mean and r < 0 else 'none' for r in results]
return rewards
def getSplit(features, rewards, split=0.70):
# sk.cross_validation.train_test_split(
# features,
# rewards,
# test_size=0.30,
# random_state=0,
# )
size = len(features)
cutOff = int(size * split)
X_train = features[:cutOff]
X_test = features[cutOff:]
Y_train = rewards[:cutOff]
Y_test = rewards[cutOff:]
return X_train, X_test, Y_train, Y_test
|
{
"content_hash": "556f4124a3f8ac03b94def7d5d622c4b",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 118,
"avg_line_length": 27.067307692307693,
"alnum_prop": 0.5403197158081705,
"repo_name": "Tjorriemorrie/trading",
"id": "620f50571543c1753d4704a8b372354ea4a8680c",
"size": "3524",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "04_oanda/features.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "586"
},
{
"name": "HTML",
"bytes": "10059"
},
{
"name": "JavaScript",
"bytes": "1812"
},
{
"name": "Jupyter Notebook",
"bytes": "682876"
},
{
"name": "Less",
"bytes": "671"
},
{
"name": "M4",
"bytes": "18975"
},
{
"name": "Python",
"bytes": "636401"
},
{
"name": "Shell",
"bytes": "670"
},
{
"name": "q",
"bytes": "478327533"
}
],
"symlink_target": ""
}
|
from django.core.management.base import BaseCommand, CommandError
from conference import models
from conference import utils
from optparse import make_option
class Command(BaseCommand):
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('conference')
# Named (optional) arguments
parser.add_argument(
'--missing-vote',
action='store',
dest='missing_vote',
default=0,
type=float,
help='Used when a user didn\'t vote a talk'
)
parser.add_argument(
'--show-input',
action='store_true',
dest='show_input',
default=False,
help='Show the input data piped to votengine',
)
def handle(self, *args, **options):
try:
conference = options['conference']
except IndexError:
raise CommandError('conference not specified')
talks = models.Talk.objects\
.filter(conference=conference, status='proposed')
if options['show_input']:
print(utils._input_for_ranking_of_talks(talks, missing_vote=options['missing_vote']))
else:
qs = models.VotoTalk.objects\
.filter(talk__in=talks)\
.values('user')
votes = qs.count()
users = qs.distinct().count()
print(f'Talk voting results for {conference}: {talks.count()} talks / {users} users / {votes} votes')
print('')
print(f'Rank,TalkID,TalkType,TalkLanguage,TalkTitle,FirstSpeaker,AllSpeakers')
for ix, t in enumerate(utils.ranking_of_talks(talks, missing_vote=options['missing_vote'])):
speakers = [str(speaker) for speaker in list(t.get_all_speakers())]
first_speaker = speakers[0]
all_speakers = ', '.join(speakers)
print(f'{ix + 1},{t.id},{t.type},{t.language},"{t.title}","{first_speaker}","{all_speakers}"')
|
{
"content_hash": "9fbae0ed7ac4411194848018be0c8ed0",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 113,
"avg_line_length": 37.09090909090909,
"alnum_prop": 0.5681372549019608,
"repo_name": "EuroPython/epcon",
"id": "9e9bbf54772ee38f802306945ae7cfd98f1a115d",
"size": "2040",
"binary": false,
"copies": "1",
"ref": "refs/heads/ep2021",
"path": "conference/management/commands/ranking_of_talks.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "6475"
},
{
"name": "Dockerfile",
"bytes": "609"
},
{
"name": "HTML",
"bytes": "412025"
},
{
"name": "JavaScript",
"bytes": "421281"
},
{
"name": "Makefile",
"bytes": "4679"
},
{
"name": "Python",
"bytes": "991334"
},
{
"name": "Shell",
"bytes": "1182"
}
],
"symlink_target": ""
}
|
from trt_layer_auto_scan_test import TrtLayerAutoScanTest
from program_config import TensorConfig, ProgramConfig
import numpy as np
import paddle.inference as paddle_infer
from functools import partial
from typing import List
import unittest
class TrtConvertHardSigmoidTest_dim_2(TrtLayerAutoScanTest):
def is_program_valid(self, program_config: ProgramConfig) -> bool:
return True
def sample_program_configs(self):
def generate_input(shape):
return np.random.random(shape).astype(np.float32)
for batch in [1, 4]:
for shape in [[batch, 32], [batch, 16, 32], [batch, 32, 16, 128]]:
self.input_dim = len(shape)
for slope in [0.1, 0.5]:
for offset in [0.2, 0.7]:
dics = [{"slope": slope, "offset": offset}]
ops_config = [
{
"op_type": "hard_sigmoid",
"op_inputs": {
"X": ["input_data"],
},
"op_outputs": {"Out": ["output_data"]},
"op_attrs": dics[0],
}
]
ops = self.generate_op_config(ops_config)
program_config = ProgramConfig(
ops=ops,
weights={},
inputs={
"input_data": TensorConfig(
data_gen=partial(generate_input, shape)
)
},
outputs=["output_data"],
)
yield program_config
def sample_predictor_configs(
self, program_config
) -> (paddle_infer.Config, List[int], float):
def generate_dynamic_shape(attrs):
if self.input_dim == 2:
self.dynamic_shape.min_input_shape = {"input_data": [1, 8]}
self.dynamic_shape.max_input_shape = {"input_data": [4, 32]}
self.dynamic_shape.opt_input_shape = {"input_data": [2, 16]}
elif self.input_dim == 3:
self.dynamic_shape.min_input_shape = {"input_data": [1, 8, 8]}
self.dynamic_shape.max_input_shape = {"input_data": [4, 16, 32]}
self.dynamic_shape.opt_input_shape = {"input_data": [4, 16, 32]}
elif self.input_dim == 4:
self.dynamic_shape.min_input_shape = {
"input_data": [1, 8, 8, 4]
}
self.dynamic_shape.max_input_shape = {
"input_data": [4, 32, 16, 128]
}
self.dynamic_shape.opt_input_shape = {
"input_data": [4, 32, 16, 128]
}
def clear_dynamic_shape():
self.dynamic_shape.max_input_shape = {}
self.dynamic_shape.min_input_shape = {}
self.dynamic_shape.opt_input_shape = {}
attrs = [
program_config.ops[i].attrs for i in range(len(program_config.ops))
]
# for static_shape
clear_dynamic_shape()
self.trt_param.precision = paddle_infer.PrecisionType.Float32
yield self.create_inference_config(), (1, 2), 1e-5
self.trt_param.precision = paddle_infer.PrecisionType.Half
yield self.create_inference_config(), (1, 2), 1e-3
# for dynamic_shape
generate_dynamic_shape(attrs)
self.trt_param.precision = paddle_infer.PrecisionType.Float32
yield self.create_inference_config(), (1, 2), 1e-5
self.trt_param.precision = paddle_infer.PrecisionType.Half
yield self.create_inference_config(), (1, 2), 1e-3
def test(self):
self.run_test()
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "544bb1c31d6145e62a976fcee54120db",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 80,
"avg_line_length": 39.89,
"alnum_prop": 0.48383053396841313,
"repo_name": "luotao1/Paddle",
"id": "3c8cd8948f4153d01987ab36cf40ebf23633fa30",
"size": "4600",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_hard_sigmoid.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36771446"
},
{
"name": "CMake",
"bytes": "903079"
},
{
"name": "Cuda",
"bytes": "5200715"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36248258"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553175"
}
],
"symlink_target": ""
}
|
"""
Views for managing Neutron Routers.
"""
from django.core.urlresolvers import reverse_lazy
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import tables
from horizon import tabs
from horizon.utils import memoized
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.routers\
import forms as project_forms
from openstack_dashboard.dashboards.project.routers import tables as rtables
from openstack_dashboard.dashboards.project.routers import tabs as rdtabs
class IndexView(tables.DataTableView):
table_class = rtables.RoutersTable
template_name = 'project/routers/index.html'
def _get_routers(self, search_opts=None):
try:
tenant_id = self.request.user.tenant_id
routers = api.neutron.router_list(self.request,
tenant_id=tenant_id,
search_opts=search_opts)
except Exception:
routers = []
exceptions.handle(self.request,
_('Unable to retrieve router list.'))
ext_net_dict = self._list_external_networks()
for r in routers:
r.set_id_as_name_if_empty()
self._set_external_network(r, ext_net_dict)
return routers
def get_data(self):
routers = self._get_routers()
return routers
def _list_external_networks(self):
try:
search_opts = {'router:external': True}
ext_nets = api.neutron.network_list(self.request,
**search_opts)
for ext_net in ext_nets:
ext_net.set_id_as_name_if_empty()
ext_net_dict = SortedDict((n['id'], n.name) for n in ext_nets)
except Exception as e:
msg = _('Unable to retrieve a list of external networks "%s".') % e
exceptions.handle(self.request, msg)
ext_net_dict = {}
return ext_net_dict
def _set_external_network(self, router, ext_net_dict):
gateway_info = router.external_gateway_info
if gateway_info:
ext_net_id = gateway_info['network_id']
if ext_net_id in ext_net_dict:
gateway_info['network'] = ext_net_dict[ext_net_id]
else:
msg = _('External network "%s" not found.') % (ext_net_id)
exceptions.handle(self.request, msg)
class DetailView(tabs.TabbedTableView):
tab_group_class = rdtabs.RouterDetailTabs
template_name = 'project/routers/detail.html'
failure_url = reverse_lazy('horizon:project:routers:index')
@memoized.memoized_method
def _get_data(self):
try:
router_id = self.kwargs['router_id']
router = api.neutron.router_get(self.request, router_id)
router.set_id_as_name_if_empty(length=0)
except Exception:
msg = _('Unable to retrieve details for router "%s".') \
% (router_id)
exceptions.handle(self.request, msg, redirect=self.failure_url)
if router.external_gateway_info:
ext_net_id = router.external_gateway_info['network_id']
try:
ext_net = api.neutron.network_get(self.request, ext_net_id,
expand_subnet=False)
ext_net.set_id_as_name_if_empty(length=0)
router.external_gateway_info['network'] = ext_net.name
except Exception:
msg = _('Unable to retrieve an external network "%s".') \
% (ext_net_id)
exceptions.handle(self.request, msg)
router.external_gateway_info['network'] = ext_net_id
return router
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
context["router"] = self._get_data()
return context
def get(self, request, *args, **kwargs):
router = self._get_data()
self.kwargs['router'] = router
return super(DetailView, self).get(request, *args, **kwargs)
class CreateView(forms.ModalFormView):
form_class = project_forms.CreateForm
template_name = 'project/routers/create.html'
success_url = reverse_lazy("horizon:project:routers:index")
|
{
"content_hash": "28cd584f7e4e9b6879fa384aa2581732",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 79,
"avg_line_length": 38.76521739130435,
"alnum_prop": 0.5975773889636609,
"repo_name": "JioCloud/horizon",
"id": "68977115d00f16bd03727a4c863cd56f7afe9ae0",
"size": "5125",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "openstack_dashboard/dashboards/project/routers/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "296932"
},
{
"name": "JavaScript",
"bytes": "713370"
},
{
"name": "Python",
"bytes": "3614755"
},
{
"name": "Shell",
"bytes": "15387"
}
],
"symlink_target": ""
}
|
import sys
import os
import shlex
import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('../'))
sys.path.append("../")
sys.path.append(".")
#using this to mock modules for Read the Docs
if sys.version_info < (3,):
from mock import Mock as MagicMock
else:
from unittest.mock import MagicMock # added to unittest in python 3.3
class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return Mock()
MOCK_MODULES = ['paramiko']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
'mockautodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'computer'
copyright = u'2016, Author'
author = u'Author'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
mockautodoc = {
'mockimport': ['paramiko','getpass', 'subprocess', 'signal'],
}
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# html_theme = 'alabaster'
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'computerdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'computer.tex', u'computer Documentation',
u'Author', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'computer', u'computer Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'computer', u'computer Documentation',
author, 'computer', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
|
{
"content_hash": "dc1f0822ed70ff391e83e543046e41c5",
"timestamp": "",
"source": "github",
"line_count": 369,
"max_line_length": 80,
"avg_line_length": 31.127371273712736,
"alnum_prop": 0.7032909629113704,
"repo_name": "jrising/computer",
"id": "45d2e02c3cce868f885f6b52de549e327c544b0d",
"size": "11907",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "33383"
}
],
"symlink_target": ""
}
|
import sys
import logging
import random
from time import sleep
logging.basicConfig(filename='random_delay_middleware.log', level=logging.DEBUG)
logging.debug('Random delay middleware is called')
# set delay to random value less than one second
SLEEP_SECS = random.random()
def main():
data = sys.stdin.readlines()
# this is a json string in one line so we are interested in that one line
payload = data[0]
logging.debug("sleeping for %s seconds" % SLEEP_SECS)
sleep(SLEEP_SECS)
# do not modifying payload, returning same one
print(payload)
if __name__ == "__main__":
main()
|
{
"content_hash": "3ac621c5c5f72669bdc85845e16395ba",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 80,
"avg_line_length": 23.615384615384617,
"alnum_prop": 0.7052117263843648,
"repo_name": "SpectoLabs/hoverfly",
"id": "be14d352cb5bdec3df2d5ed511099268909d71f1",
"size": "636",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "functional-tests/hoverctl/testdata/add_random_delay.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "435"
},
{
"name": "Go",
"bytes": "1926816"
},
{
"name": "Makefile",
"bytes": "2348"
},
{
"name": "Shell",
"bytes": "2071"
}
],
"symlink_target": ""
}
|
from functools import wraps
from flask import abort
from flask.ext.login import current_user
from .models import Permission
def permission_required(permission):
def decorator(f):
@wraps(f)
def decorated_function(*args,**kwargs):
if not current_user.can(permission):
abort(403)
return f(*args,**kwargs)
return decorated_function
return decorator
def admin_required(f):
return permission_required(Permission.ADMINISTER)(f)
|
{
"content_hash": "bea28d571e7a33016ea5985fb0da7a1a",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 53,
"avg_line_length": 27,
"alnum_prop": 0.7363834422657952,
"repo_name": "HeathKang/flasky",
"id": "5abb15eefa68c959bb82f9d489896d2a2603a834",
"size": "503",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/decorators.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7591"
},
{
"name": "HTML",
"bytes": "538278"
},
{
"name": "JavaScript",
"bytes": "20876"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "91046"
}
],
"symlink_target": ""
}
|
"""HTTP server base class.
Note: the class in this module doesn't implement any HTTP request; see
SimpleHTTPServer for simple implementations of GET, HEAD and POST
(including CGI scripts). It does, however, optionally implement HTTP/1.1
persistent connections, as of version 0.3.
Contents:
- BaseHTTPRequestHandler: HTTP request handler base class
- test: test function
XXX To do:
- log requests even later (to capture byte count)
- log user-agent header and other interesting goodies
- send error log to separate file
"""
# See also:
#
# HTTP Working Group T. Berners-Lee
# INTERNET-DRAFT R. T. Fielding
# <draft-ietf-http-v10-spec-00.txt> H. Frystyk Nielsen
# Expires September 8, 1995 March 8, 1995
#
# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt
#
# and
#
# Network Working Group R. Fielding
# Request for Comments: 2616 et al
# Obsoletes: 2068 June 1999
# Category: Standards Track
#
# URL: http://www.faqs.org/rfcs/rfc2616.html
# Log files
# ---------
#
# Here's a quote from the NCSA httpd docs about log file format.
#
# | The logfile format is as follows. Each line consists of:
# |
# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb
# |
# | host: Either the DNS name or the IP number of the remote client
# | rfc931: Any information returned by identd for this person,
# | - otherwise.
# | authuser: If user sent a userid for authentication, the user name,
# | - otherwise.
# | DD: Day
# | Mon: Month (calendar name)
# | YYYY: Year
# | hh: hour (24-hour format, the machine's timezone)
# | mm: minutes
# | ss: seconds
# | request: The first line of the HTTP request as sent by the client.
# | ddd: the status code returned by the server, - if not available.
# | bbbb: the total number of bytes sent,
# | *not including the HTTP/1.0 header*, - if not available
# |
# | You can determine the name of the file accessed through request.
#
# (Actually, the latter is only true if you know the server configuration
# at the time the request was made!)
__version__ = "0.3"
__all__ = ["HTTPServer", "BaseHTTPRequestHandler"]
import sys
import time
import socket # For gethostbyaddr()
from warnings import filterwarnings, catch_warnings
with catch_warnings():
if sys.py3kwarning:
filterwarnings("ignore", ".*mimetools has been removed",
DeprecationWarning)
import mimetools
import SocketServer
# Default error message template
DEFAULT_ERROR_MESSAGE = """\
<head>
<title>Error response</title>
</head>
<body>
<h1>Error response</h1>
<p>Error code %(code)d.
<p>Message: %(message)s.
<p>Error code explanation: %(code)s = %(explain)s.
</body>
"""
DEFAULT_ERROR_CONTENT_TYPE = "text/html"
def _quote_html(html):
return html.replace("&", "&").replace("<", "<").replace(">", ">")
class HTTPServer(SocketServer.TCPServer):
allow_reuse_address = 1 # Seems to make sense in testing environment
def server_bind(self):
"""Override server_bind to store the server name."""
SocketServer.TCPServer.server_bind(self)
try:
host, port = self.socket.getsockname()[:2]
self.server_name = socket.getfqdn(host)
self.server_port = port
except socket.error:
pass
def server_activate(self):
SocketServer.TCPServer.server_activate(self)
# Adding a second call to getsockname() because of this issue
# http://wiki.python.org/jython/NewSocketModule#Deferredsocketcreationonjython
host, port = self.socket.getsockname()[:2]
self.server_name = socket.getfqdn(host)
self.server_port = port
class BaseHTTPRequestHandler(SocketServer.StreamRequestHandler):
"""HTTP request handler base class.
The following explanation of HTTP serves to guide you through the
code as well as to expose any misunderstandings I may have about
HTTP (so you don't need to read the code to figure out I'm wrong
:-).
HTTP (HyperText Transfer Protocol) is an extensible protocol on
top of a reliable stream transport (e.g. TCP/IP). The protocol
recognizes three parts to a request:
1. One line identifying the request type and path
2. An optional set of RFC-822-style headers
3. An optional data part
The headers and data are separated by a blank line.
The first line of the request has the form
<command> <path> <version>
where <command> is a (case-sensitive) keyword such as GET or POST,
<path> is a string containing path information for the request,
and <version> should be the string "HTTP/1.0" or "HTTP/1.1".
<path> is encoded using the URL encoding scheme (using %xx to signify
the ASCII character with hex code xx).
The specification specifies that lines are separated by CRLF but
for compatibility with the widest range of clients recommends
servers also handle LF. Similarly, whitespace in the request line
is treated sensibly (allowing multiple spaces between components
and allowing trailing whitespace).
Similarly, for output, lines ought to be separated by CRLF pairs
but most clients grok LF characters just fine.
If the first line of the request has the form
<command> <path>
(i.e. <version> is left out) then this is assumed to be an HTTP
0.9 request; this form has no optional headers and data part and
the reply consists of just the data.
The reply form of the HTTP 1.x protocol again has three parts:
1. One line giving the response code
2. An optional set of RFC-822-style headers
3. The data
Again, the headers and data are separated by a blank line.
The response code line has the form
<version> <responsecode> <responsestring>
where <version> is the protocol version ("HTTP/1.0" or "HTTP/1.1"),
<responsecode> is a 3-digit response code indicating success or
failure of the request, and <responsestring> is an optional
human-readable string explaining what the response code means.
This server parses the request and the headers, and then calls a
function specific to the request type (<command>). Specifically,
a request SPAM will be handled by a method do_SPAM(). If no
such method exists the server sends an error response to the
client. If it exists, it is called with no arguments:
do_SPAM()
Note that the request name is case sensitive (i.e. SPAM and spam
are different requests).
The various request details are stored in instance variables:
- client_address is the client IP address in the form (host,
port);
- command, path and version are the broken-down request line;
- headers is an instance of mimetools.Message (or a derived
class) containing the header information;
- rfile is a file object open for reading positioned at the
start of the optional input data part;
- wfile is a file object open for writing.
IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING!
The first thing to be written must be the response line. Then
follow 0 or more header lines, then a blank line, and then the
actual data (if any). The meaning of the header lines depends on
the command executed by the server; in most cases, when data is
returned, there should be at least one header line of the form
Content-type: <type>/<subtype>
where <type> and <subtype> should be registered MIME types,
e.g. "text/html" or "text/plain".
"""
# The Python system version, truncated to its first component.
sys_version = "Python/" + sys.version.split()[0]
# The server software version. You may want to override this.
# The format is multiple whitespace-separated strings,
# where each string is of the form name[/version].
server_version = "BaseHTTP/" + __version__
# The default request version. This only affects responses up until
# the point where the request line is parsed, so it mainly decides what
# the client gets back when sending a malformed request line.
# Most web servers default to HTTP 0.9, i.e. don't send a status line.
default_request_version = "HTTP/0.9"
def parse_request(self):
"""Parse a request (internal).
The request should be stored in self.raw_requestline; the results
are in self.command, self.path, self.request_version and
self.headers.
Return True for success, False for failure; on failure, an
error is sent back.
"""
self.command = None # set in case of error on the first line
self.request_version = version = self.default_request_version
self.close_connection = 1
requestline = self.raw_requestline
requestline = requestline.rstrip('\r\n')
self.requestline = requestline
words = requestline.split()
if len(words) == 3:
command, path, version = words
if version[:5] != 'HTTP/':
self.send_error(400, "Bad request version (%r)" % version)
return False
try:
base_version_number = version.split('/', 1)[1]
version_number = base_version_number.split(".")
# RFC 2145 section 3.1 says there can be only one "." and
# - major and minor numbers MUST be treated as
# separate integers;
# - HTTP/2.4 is a lower version than HTTP/2.13, which in
# turn is lower than HTTP/12.3;
# - Leading zeros MUST be ignored by recipients.
if len(version_number) != 2:
raise ValueError
version_number = int(version_number[0]), int(version_number[1])
except (ValueError, IndexError):
self.send_error(400, "Bad request version (%r)" % version)
return False
if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1":
self.close_connection = 0
if version_number >= (2, 0):
self.send_error(505,
"Invalid HTTP Version (%s)" % base_version_number)
return False
elif len(words) == 2:
command, path = words
self.close_connection = 1
if command != 'GET':
self.send_error(400,
"Bad HTTP/0.9 request type (%r)" % command)
return False
elif not words:
return False
else:
self.send_error(400, "Bad request syntax (%r)" % requestline)
return False
self.command, self.path, self.request_version = command, path, version
# Examine the headers and look for a Connection directive
self.headers = self.MessageClass(self.rfile, 0)
conntype = self.headers.get('Connection', "")
if conntype.lower() == 'close':
self.close_connection = 1
elif (conntype.lower() == 'keep-alive' and
self.protocol_version >= "HTTP/1.1"):
self.close_connection = 0
return True
def handle_one_request(self):
"""Handle a single HTTP request.
You normally don't need to override this method; see the class
__doc__ string for information on how to handle specific HTTP
commands such as GET and POST.
"""
try:
self.raw_requestline = self.rfile.readline(65537)
if len(self.raw_requestline) > 65536:
self.requestline = ''
self.request_version = ''
self.command = ''
self.send_error(414)
return
if not self.raw_requestline:
self.close_connection = 1
return
if not self.parse_request():
# An error code has been sent, just exit
return
mname = 'do_' + self.command
if not hasattr(self, mname):
self.send_error(501, "Unsupported method (%r)" % self.command)
return
method = getattr(self, mname)
method()
self.wfile.flush() #actually send the response if not already done.
except socket.timeout, e:
#a read or a write timed out. Discard this connection
self.log_error("Request timed out: %r", e)
self.close_connection = 1
return
def handle(self):
"""Handle multiple requests if necessary."""
self.close_connection = 1
self.handle_one_request()
while not self.close_connection:
self.handle_one_request()
def send_error(self, code, message=None):
"""Send and log an error reply.
Arguments are the error code, and a detailed message.
The detailed message defaults to the short entry matching the
response code.
This sends an error response (so it must be called before any
output has been generated), logs the error, and finally sends
a piece of HTML explaining the error to the user.
"""
try:
short, long = self.responses[code]
except KeyError:
short, long = '???', '???'
if message is None:
message = short
explain = long
self.log_error("code %d, message %s", code, message)
# using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
content = (self.error_message_format %
{'code': code, 'message': _quote_html(message), 'explain': explain})
self.send_response(code, message)
self.send_header("Content-Type", self.error_content_type)
self.send_header('Connection', 'close')
self.end_headers()
if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
self.wfile.write(content)
error_message_format = DEFAULT_ERROR_MESSAGE
error_content_type = DEFAULT_ERROR_CONTENT_TYPE
def send_response(self, code, message=None):
"""Send the response header and log the response code.
Also send two standard headers with the server software
version and the current date.
"""
self.log_request(code)
if message is None:
if code in self.responses:
message = self.responses[code][0]
else:
message = ''
if self.request_version != 'HTTP/0.9':
self.wfile.write("%s %d %s\r\n" %
(self.protocol_version, code, message))
# print (self.protocol_version, code, message)
self.send_header('Server', self.version_string())
self.send_header('Date', self.date_time_string())
def send_header(self, keyword, value):
"""Send a MIME header."""
if self.request_version != 'HTTP/0.9':
self.wfile.write("%s: %s\r\n" % (keyword, value))
if keyword.lower() == 'connection':
if value.lower() == 'close':
self.close_connection = 1
elif value.lower() == 'keep-alive':
self.close_connection = 0
def end_headers(self):
"""Send the blank line ending the MIME headers."""
if self.request_version != 'HTTP/0.9':
self.wfile.write("\r\n")
def log_request(self, code='-', size='-'):
"""Log an accepted request.
This is called by send_response().
"""
self.log_message('"%s" %s %s',
self.requestline, str(code), str(size))
def log_error(self, format, *args):
"""Log an error.
This is called when a request cannot be fulfilled. By
default it passes the message on to log_message().
Arguments are the same as for log_message().
XXX This should go to the separate error log.
"""
self.log_message(format, *args)
def log_message(self, format, *args):
"""Log an arbitrary message.
This is used by all other logging functions. Override
it if you have specific logging wishes.
The first argument, FORMAT, is a format string for the
message to be logged. If the format string contains
any % escapes requiring parameters, they should be
specified as subsequent arguments (it's just like
printf!).
The client ip address and current date/time are prefixed to every
message.
"""
sys.stderr.write("%s - - [%s] %s\n" %
(self.client_address[0],
self.log_date_time_string(),
format%args))
def version_string(self):
"""Return the server software version string."""
return self.server_version + ' ' + self.sys_version
def date_time_string(self, timestamp=None):
"""Return the current date and time formatted for a message header."""
if timestamp is None:
timestamp = time.time()
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp)
s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
self.weekdayname[wd],
day, self.monthname[month], year,
hh, mm, ss)
return s
def log_date_time_string(self):
"""Return the current time formatted for logging."""
now = time.time()
year, month, day, hh, mm, ss, x, y, z = time.localtime(now)
s = "%02d/%3s/%04d %02d:%02d:%02d" % (
day, self.monthname[month], year, hh, mm, ss)
return s
weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
def address_string(self):
"""Return the client address formatted for logging.
This version looks up the full hostname using gethostbyaddr(),
and tries to find a name that contains at least one dot.
"""
host, port = self.client_address[:2]
return socket.getfqdn(host)
# Essentially static class variables
# The version of the HTTP protocol we support.
# Set this to HTTP/1.1 to enable automatic keepalive
protocol_version = "HTTP/1.0"
# The Message-like class used to parse headers
MessageClass = mimetools.Message
# Table mapping response codes to messages; entries have the
# form {code: (shortmessage, longmessage)}.
# See RFC 2616.
responses = {
100: ('Continue', 'Request received, please continue'),
101: ('Switching Protocols',
'Switching to new protocol; obey Upgrade header'),
200: ('OK', 'Request fulfilled, document follows'),
201: ('Created', 'Document created, URL follows'),
202: ('Accepted',
'Request accepted, processing continues off-line'),
203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
204: ('No Content', 'Request fulfilled, nothing follows'),
205: ('Reset Content', 'Clear input form for further input.'),
206: ('Partial Content', 'Partial content follows.'),
300: ('Multiple Choices',
'Object has several resources -- see URI list'),
301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
302: ('Found', 'Object moved temporarily -- see URI list'),
303: ('See Other', 'Object moved -- see Method and URL list'),
304: ('Not Modified',
'Document has not changed since given time'),
305: ('Use Proxy',
'You must use proxy specified in Location to access this '
'resource.'),
307: ('Temporary Redirect',
'Object moved temporarily -- see URI list'),
400: ('Bad Request',
'Bad request syntax or unsupported method'),
401: ('Unauthorized',
'No permission -- see authorization schemes'),
402: ('Payment Required',
'No payment -- see charging schemes'),
403: ('Forbidden',
'Request forbidden -- authorization will not help'),
404: ('Not Found', 'Nothing matches the given URI'),
405: ('Method Not Allowed',
'Specified method is invalid for this resource.'),
406: ('Not Acceptable', 'URI not available in preferred format.'),
407: ('Proxy Authentication Required', 'You must authenticate with '
'this proxy before proceeding.'),
408: ('Request Timeout', 'Request timed out; try again later.'),
409: ('Conflict', 'Request conflict.'),
410: ('Gone',
'URI no longer exists and has been permanently removed.'),
411: ('Length Required', 'Client must specify Content-Length.'),
412: ('Precondition Failed', 'Precondition in headers is false.'),
413: ('Request Entity Too Large', 'Entity is too large.'),
414: ('Request-URI Too Long', 'URI is too long.'),
415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
416: ('Requested Range Not Satisfiable',
'Cannot satisfy request range.'),
417: ('Expectation Failed',
'Expect condition could not be satisfied.'),
500: ('Internal Server Error', 'Server got itself in trouble'),
501: ('Not Implemented',
'Server does not support this operation'),
502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
503: ('Service Unavailable',
'The server cannot process the request due to a high load'),
504: ('Gateway Timeout',
'The gateway server did not receive a timely response'),
505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
}
def test(HandlerClass = BaseHTTPRequestHandler,
ServerClass = HTTPServer, protocol="HTTP/1.0"):
"""Test the HTTP request handler class.
This runs an HTTP server on port 8000 (or the first command line
argument).
"""
if sys.argv[1:]:
port = int(sys.argv[1])
else:
port = 8000
server_address = ('', port)
HandlerClass.protocol_version = protocol
httpd = ServerClass(server_address, HandlerClass)
sa = httpd.socket.getsockname()
print "Serving HTTP on", sa[0], "port", sa[1], "..."
httpd.serve_forever()
if __name__ == '__main__':
test()
|
{
"content_hash": "777d26e966c8cc5cd6377c67a528f46a",
"timestamp": "",
"source": "github",
"line_count": 614,
"max_line_length": 87,
"avg_line_length": 37.30456026058632,
"alnum_prop": 0.6037982973149967,
"repo_name": "nelmiux/CarnotKE",
"id": "4e7ef9338fe77f110316df75d309a49a73e0985f",
"size": "22905",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "jyhton/Lib/BaseHTTPServer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1605"
},
{
"name": "Batchfile",
"bytes": "23996"
},
{
"name": "C",
"bytes": "2514"
},
{
"name": "CSS",
"bytes": "83366"
},
{
"name": "GAP",
"bytes": "129850"
},
{
"name": "Groff",
"bytes": "42"
},
{
"name": "HTML",
"bytes": "12867403"
},
{
"name": "Java",
"bytes": "16007057"
},
{
"name": "JavaScript",
"bytes": "11934"
},
{
"name": "Makefile",
"bytes": "2261"
},
{
"name": "PLSQL",
"bytes": "45772"
},
{
"name": "Perl",
"bytes": "9821"
},
{
"name": "Python",
"bytes": "41375827"
},
{
"name": "R",
"bytes": "2740"
},
{
"name": "Shell",
"bytes": "70220"
},
{
"name": "Visual Basic",
"bytes": "962"
},
{
"name": "XSLT",
"bytes": "218435"
}
],
"symlink_target": ""
}
|
from app import globvars
from app.entity.human.behavior.health_behavior import HealthBehavior
from app.entity.human.behavior.simple_behavior import SimpleBehavior
from app.entity.human.behavior.temperature_behavior import TemperatureBehavior
from app.entity.human.human import Human
from app.entity.human.human_body import HumanBody
from app.entity.human.human_data import HumanData
from core.entity.imovable import IMovable
from core.entity.visual_entity import VisualEntity
from core.geometry import convert
class VisualHuman(Human, VisualEntity, IMovable):
def __init__(self, body, human_data=None):
# type: (HumanBody, HumanData) -> None
Human.__init__(self, human_data)
VisualEntity.__init__(self, body)
IMovable.__init__(self)
self.behavior = SimpleBehavior(self)
self.current_cell = None
def move(self, dx, dy):
# type: (float, float) -> None
if dx != 0 or dy != 0:
self.body.position.move(dx, dy)
def update(self, delta):
Human.update(self, delta)
IMovable.update(self, delta)
self.behavior.update(delta)
pos = convert.pixel_to_line(
self.body.position.x - globvars.cell_size,
self.body.position.y - globvars.cell_size,
globvars.cell_size,
globvars.grid_width)
planet_area = globvars.game.planet_area
if 0 <= pos < len(planet_area):
self.current_cell = planet_area[pos]
else:
self.current_cell = None
TemperatureBehavior.update(self)
HealthBehavior.update(self)
|
{
"content_hash": "8308e396d0c004081d34c5ef3ac4ee4d",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 78,
"avg_line_length": 31.666666666666668,
"alnum_prop": 0.6625386996904025,
"repo_name": "Diralf/evolution",
"id": "65b699600ed0e89220e2cd77e04a6d3c9b1190b1",
"size": "1615",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/entity/human/visual_human.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "62541"
}
],
"symlink_target": ""
}
|
import os
# try/except added for compatibility with python < 3.8
try:
from unittest import mock
from unittest.mock import AsyncMock # pragma: NO COVER
except ImportError: # pragma: NO COVER
import mock
import math
from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
import google.auth
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.oauth2 import service_account
from google.protobuf import timestamp_pb2 # type: ignore
import grpc
from grpc.experimental import aio
from proto.marshal.rules import wrappers
from proto.marshal.rules.dates import DurationRule, TimestampRule
import pytest
from google.cloud.billing_v1.services.cloud_catalog import (
CloudCatalogAsyncClient,
CloudCatalogClient,
pagers,
transports,
)
from google.cloud.billing_v1.types import cloud_catalog
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert CloudCatalogClient._get_default_mtls_endpoint(None) is None
assert (
CloudCatalogClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
)
assert (
CloudCatalogClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
CloudCatalogClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
CloudCatalogClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert CloudCatalogClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize(
"client_class,transport_name",
[
(CloudCatalogClient, "grpc"),
(CloudCatalogAsyncClient, "grpc_asyncio"),
],
)
def test_cloud_catalog_client_from_service_account_info(client_class, transport_name):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info, transport=transport_name)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == ("cloudbilling.googleapis.com:443")
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.CloudCatalogGrpcTransport, "grpc"),
(transports.CloudCatalogGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_cloud_catalog_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class,transport_name",
[
(CloudCatalogClient, "grpc"),
(CloudCatalogAsyncClient, "grpc_asyncio"),
],
)
def test_cloud_catalog_client_from_service_account_file(client_class, transport_name):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file(
"dummy/file/path.json", transport=transport_name
)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json(
"dummy/file/path.json", transport=transport_name
)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == ("cloudbilling.googleapis.com:443")
def test_cloud_catalog_client_get_transport_class():
transport = CloudCatalogClient.get_transport_class()
available_transports = [
transports.CloudCatalogGrpcTransport,
]
assert transport in available_transports
transport = CloudCatalogClient.get_transport_class("grpc")
assert transport == transports.CloudCatalogGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(CloudCatalogClient, transports.CloudCatalogGrpcTransport, "grpc"),
(
CloudCatalogAsyncClient,
transports.CloudCatalogGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
CloudCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudCatalogClient)
)
@mock.patch.object(
CloudCatalogAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(CloudCatalogAsyncClient),
)
def test_cloud_catalog_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(CloudCatalogClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(CloudCatalogClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# Check the case api_endpoint is provided
options = client_options.ClientOptions(
api_audience="https://language.googleapis.com"
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience="https://language.googleapis.com",
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(CloudCatalogClient, transports.CloudCatalogGrpcTransport, "grpc", "true"),
(
CloudCatalogAsyncClient,
transports.CloudCatalogGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(CloudCatalogClient, transports.CloudCatalogGrpcTransport, "grpc", "false"),
(
CloudCatalogAsyncClient,
transports.CloudCatalogGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
CloudCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudCatalogClient)
)
@mock.patch.object(
CloudCatalogAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(CloudCatalogAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_cloud_catalog_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
@pytest.mark.parametrize("client_class", [CloudCatalogClient, CloudCatalogAsyncClient])
@mock.patch.object(
CloudCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudCatalogClient)
)
@mock.patch.object(
CloudCatalogAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(CloudCatalogAsyncClient),
)
def test_cloud_catalog_client_get_mtls_endpoint_and_cert_source(client_class):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(CloudCatalogClient, transports.CloudCatalogGrpcTransport, "grpc"),
(
CloudCatalogAsyncClient,
transports.CloudCatalogGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_cloud_catalog_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(
scopes=["1", "2"],
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
CloudCatalogClient,
transports.CloudCatalogGrpcTransport,
"grpc",
grpc_helpers,
),
(
CloudCatalogAsyncClient,
transports.CloudCatalogGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_cloud_catalog_client_client_options_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
def test_cloud_catalog_client_client_options_from_dict():
with mock.patch(
"google.cloud.billing_v1.services.cloud_catalog.transports.CloudCatalogGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = CloudCatalogClient(client_options={"api_endpoint": "squid.clam.whelk"})
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
CloudCatalogClient,
transports.CloudCatalogGrpcTransport,
"grpc",
grpc_helpers,
),
(
CloudCatalogAsyncClient,
transports.CloudCatalogGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_cloud_catalog_client_create_channel_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# test that the credentials from file are saved and used as the credentials.
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel"
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
file_creds = ga_credentials.AnonymousCredentials()
load_creds.return_value = (file_creds, None)
adc.return_value = (creds, None)
client = client_class(client_options=options, transport=transport_name)
create_channel.assert_called_with(
"cloudbilling.googleapis.com:443",
credentials=file_creds,
credentials_file=None,
quota_project_id=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-billing",
"https://www.googleapis.com/auth/cloud-billing.readonly",
"https://www.googleapis.com/auth/cloud-platform",
),
scopes=None,
default_host="cloudbilling.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"request_type",
[
cloud_catalog.ListServicesRequest,
dict,
],
)
def test_list_services(request_type, transport: str = "grpc"):
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_services), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_catalog.ListServicesResponse(
next_page_token="next_page_token_value",
)
response = client.list_services(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_catalog.ListServicesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListServicesPager)
assert response.next_page_token == "next_page_token_value"
def test_list_services_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_services), "__call__") as call:
client.list_services()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_catalog.ListServicesRequest()
@pytest.mark.asyncio
async def test_list_services_async(
transport: str = "grpc_asyncio", request_type=cloud_catalog.ListServicesRequest
):
client = CloudCatalogAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_services), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_catalog.ListServicesResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_services(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_catalog.ListServicesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListServicesAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_services_async_from_dict():
await test_list_services_async(request_type=dict)
def test_list_services_pager(transport_name: str = "grpc"):
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_services), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_catalog.ListServicesResponse(
services=[
cloud_catalog.Service(),
cloud_catalog.Service(),
cloud_catalog.Service(),
],
next_page_token="abc",
),
cloud_catalog.ListServicesResponse(
services=[],
next_page_token="def",
),
cloud_catalog.ListServicesResponse(
services=[
cloud_catalog.Service(),
],
next_page_token="ghi",
),
cloud_catalog.ListServicesResponse(
services=[
cloud_catalog.Service(),
cloud_catalog.Service(),
],
),
RuntimeError,
)
metadata = ()
pager = client.list_services(request={})
assert pager._metadata == metadata
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, cloud_catalog.Service) for i in results)
def test_list_services_pages(transport_name: str = "grpc"):
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_services), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_catalog.ListServicesResponse(
services=[
cloud_catalog.Service(),
cloud_catalog.Service(),
cloud_catalog.Service(),
],
next_page_token="abc",
),
cloud_catalog.ListServicesResponse(
services=[],
next_page_token="def",
),
cloud_catalog.ListServicesResponse(
services=[
cloud_catalog.Service(),
],
next_page_token="ghi",
),
cloud_catalog.ListServicesResponse(
services=[
cloud_catalog.Service(),
cloud_catalog.Service(),
],
),
RuntimeError,
)
pages = list(client.list_services(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_services_async_pager():
client = CloudCatalogAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_services), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_catalog.ListServicesResponse(
services=[
cloud_catalog.Service(),
cloud_catalog.Service(),
cloud_catalog.Service(),
],
next_page_token="abc",
),
cloud_catalog.ListServicesResponse(
services=[],
next_page_token="def",
),
cloud_catalog.ListServicesResponse(
services=[
cloud_catalog.Service(),
],
next_page_token="ghi",
),
cloud_catalog.ListServicesResponse(
services=[
cloud_catalog.Service(),
cloud_catalog.Service(),
],
),
RuntimeError,
)
async_pager = await client.list_services(
request={},
)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager: # pragma: no branch
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, cloud_catalog.Service) for i in responses)
@pytest.mark.asyncio
async def test_list_services_async_pages():
client = CloudCatalogAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_services), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_catalog.ListServicesResponse(
services=[
cloud_catalog.Service(),
cloud_catalog.Service(),
cloud_catalog.Service(),
],
next_page_token="abc",
),
cloud_catalog.ListServicesResponse(
services=[],
next_page_token="def",
),
cloud_catalog.ListServicesResponse(
services=[
cloud_catalog.Service(),
],
next_page_token="ghi",
),
cloud_catalog.ListServicesResponse(
services=[
cloud_catalog.Service(),
cloud_catalog.Service(),
],
),
RuntimeError,
)
pages = []
async for page_ in (
await client.list_services(request={})
).pages: # pragma: no branch
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type",
[
cloud_catalog.ListSkusRequest,
dict,
],
)
def test_list_skus(request_type, transport: str = "grpc"):
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_skus), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_catalog.ListSkusResponse(
next_page_token="next_page_token_value",
)
response = client.list_skus(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_catalog.ListSkusRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListSkusPager)
assert response.next_page_token == "next_page_token_value"
def test_list_skus_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_skus), "__call__") as call:
client.list_skus()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_catalog.ListSkusRequest()
@pytest.mark.asyncio
async def test_list_skus_async(
transport: str = "grpc_asyncio", request_type=cloud_catalog.ListSkusRequest
):
client = CloudCatalogAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_skus), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_catalog.ListSkusResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_skus(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_catalog.ListSkusRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListSkusAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_skus_async_from_dict():
await test_list_skus_async(request_type=dict)
def test_list_skus_field_headers():
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_catalog.ListSkusRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_skus), "__call__") as call:
call.return_value = cloud_catalog.ListSkusResponse()
client.list_skus(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_skus_field_headers_async():
client = CloudCatalogAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_catalog.ListSkusRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_skus), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_catalog.ListSkusResponse()
)
await client.list_skus(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_list_skus_flattened():
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_skus), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_catalog.ListSkusResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_skus(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_skus_flattened_error():
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_skus(
cloud_catalog.ListSkusRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_skus_flattened_async():
client = CloudCatalogAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_skus), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_catalog.ListSkusResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_catalog.ListSkusResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_skus(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_skus_flattened_error_async():
client = CloudCatalogAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_skus(
cloud_catalog.ListSkusRequest(),
parent="parent_value",
)
def test_list_skus_pager(transport_name: str = "grpc"):
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_skus), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_catalog.ListSkusResponse(
skus=[
cloud_catalog.Sku(),
cloud_catalog.Sku(),
cloud_catalog.Sku(),
],
next_page_token="abc",
),
cloud_catalog.ListSkusResponse(
skus=[],
next_page_token="def",
),
cloud_catalog.ListSkusResponse(
skus=[
cloud_catalog.Sku(),
],
next_page_token="ghi",
),
cloud_catalog.ListSkusResponse(
skus=[
cloud_catalog.Sku(),
cloud_catalog.Sku(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_skus(request={})
assert pager._metadata == metadata
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, cloud_catalog.Sku) for i in results)
def test_list_skus_pages(transport_name: str = "grpc"):
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_skus), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_catalog.ListSkusResponse(
skus=[
cloud_catalog.Sku(),
cloud_catalog.Sku(),
cloud_catalog.Sku(),
],
next_page_token="abc",
),
cloud_catalog.ListSkusResponse(
skus=[],
next_page_token="def",
),
cloud_catalog.ListSkusResponse(
skus=[
cloud_catalog.Sku(),
],
next_page_token="ghi",
),
cloud_catalog.ListSkusResponse(
skus=[
cloud_catalog.Sku(),
cloud_catalog.Sku(),
],
),
RuntimeError,
)
pages = list(client.list_skus(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_skus_async_pager():
client = CloudCatalogAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_skus), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_catalog.ListSkusResponse(
skus=[
cloud_catalog.Sku(),
cloud_catalog.Sku(),
cloud_catalog.Sku(),
],
next_page_token="abc",
),
cloud_catalog.ListSkusResponse(
skus=[],
next_page_token="def",
),
cloud_catalog.ListSkusResponse(
skus=[
cloud_catalog.Sku(),
],
next_page_token="ghi",
),
cloud_catalog.ListSkusResponse(
skus=[
cloud_catalog.Sku(),
cloud_catalog.Sku(),
],
),
RuntimeError,
)
async_pager = await client.list_skus(
request={},
)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager: # pragma: no branch
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, cloud_catalog.Sku) for i in responses)
@pytest.mark.asyncio
async def test_list_skus_async_pages():
client = CloudCatalogAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_skus), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_catalog.ListSkusResponse(
skus=[
cloud_catalog.Sku(),
cloud_catalog.Sku(),
cloud_catalog.Sku(),
],
next_page_token="abc",
),
cloud_catalog.ListSkusResponse(
skus=[],
next_page_token="def",
),
cloud_catalog.ListSkusResponse(
skus=[
cloud_catalog.Sku(),
],
next_page_token="ghi",
),
cloud_catalog.ListSkusResponse(
skus=[
cloud_catalog.Sku(),
cloud_catalog.Sku(),
],
),
RuntimeError,
)
pages = []
async for page_ in (
await client.list_skus(request={})
).pages: # pragma: no branch
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.CloudCatalogGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.CloudCatalogGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = CloudCatalogClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.CloudCatalogGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = CloudCatalogClient(
client_options=options,
transport=transport,
)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = CloudCatalogClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.CloudCatalogGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = CloudCatalogClient(
client_options={"scopes": ["1", "2"]},
transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.CloudCatalogGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = CloudCatalogClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.CloudCatalogGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.CloudCatalogGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[
transports.CloudCatalogGrpcTransport,
transports.CloudCatalogGrpcAsyncIOTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
@pytest.mark.parametrize(
"transport_name",
[
"grpc",
],
)
def test_transport_kind(transport_name):
transport = CloudCatalogClient.get_transport_class(transport_name)(
credentials=ga_credentials.AnonymousCredentials(),
)
assert transport.kind == transport_name
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport,
transports.CloudCatalogGrpcTransport,
)
def test_cloud_catalog_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.CloudCatalogTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_cloud_catalog_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.billing_v1.services.cloud_catalog.transports.CloudCatalogTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.CloudCatalogTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"list_services",
"list_skus",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
# Catch all for all remaining methods and properties
remainder = [
"kind",
]
for r in remainder:
with pytest.raises(NotImplementedError):
getattr(transport, r)()
def test_cloud_catalog_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.billing_v1.services.cloud_catalog.transports.CloudCatalogTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.CloudCatalogTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-billing",
"https://www.googleapis.com/auth/cloud-billing.readonly",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
def test_cloud_catalog_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.billing_v1.services.cloud_catalog.transports.CloudCatalogTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.CloudCatalogTransport()
adc.assert_called_once()
def test_cloud_catalog_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
CloudCatalogClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-billing",
"https://www.googleapis.com/auth/cloud-billing.readonly",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.CloudCatalogGrpcTransport,
transports.CloudCatalogGrpcAsyncIOTransport,
],
)
def test_cloud_catalog_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=(
"https://www.googleapis.com/auth/cloud-billing",
"https://www.googleapis.com/auth/cloud-billing.readonly",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class",
[
transports.CloudCatalogGrpcTransport,
transports.CloudCatalogGrpcAsyncIOTransport,
],
)
def test_cloud_catalog_transport_auth_gdch_credentials(transport_class):
host = "https://language.com"
api_audience_tests = [None, "https://language2.com"]
api_audience_expect = [host, "https://language2.com"]
for t, e in zip(api_audience_tests, api_audience_expect):
with mock.patch.object(google.auth, "default", autospec=True) as adc:
gdch_mock = mock.MagicMock()
type(gdch_mock).with_gdch_audience = mock.PropertyMock(
return_value=gdch_mock
)
adc.return_value = (gdch_mock, None)
transport_class(host=host, api_audience=t)
gdch_mock.with_gdch_audience.assert_called_once_with(e)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.CloudCatalogGrpcTransport, grpc_helpers),
(transports.CloudCatalogGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_cloud_catalog_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"cloudbilling.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
"https://www.googleapis.com/auth/cloud-billing",
"https://www.googleapis.com/auth/cloud-billing.readonly",
"https://www.googleapis.com/auth/cloud-platform",
),
scopes=["1", "2"],
default_host="cloudbilling.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[transports.CloudCatalogGrpcTransport, transports.CloudCatalogGrpcAsyncIOTransport],
)
def test_cloud_catalog_grpc_transport_client_cert_source_for_mtls(transport_class):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
@pytest.mark.parametrize(
"transport_name",
[
"grpc",
"grpc_asyncio",
],
)
def test_cloud_catalog_host_no_port(transport_name):
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="cloudbilling.googleapis.com"
),
transport=transport_name,
)
assert client.transport._host == ("cloudbilling.googleapis.com:443")
@pytest.mark.parametrize(
"transport_name",
[
"grpc",
"grpc_asyncio",
],
)
def test_cloud_catalog_host_with_port(transport_name):
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="cloudbilling.googleapis.com:8000"
),
transport=transport_name,
)
assert client.transport._host == ("cloudbilling.googleapis.com:8000")
def test_cloud_catalog_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.CloudCatalogGrpcTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_cloud_catalog_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.CloudCatalogGrpcAsyncIOTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[transports.CloudCatalogGrpcTransport, transports.CloudCatalogGrpcAsyncIOTransport],
)
def test_cloud_catalog_transport_channel_mtls_with_client_cert_source(transport_class):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[transports.CloudCatalogGrpcTransport, transports.CloudCatalogGrpcAsyncIOTransport],
)
def test_cloud_catalog_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_service_path():
service = "squid"
expected = "services/{service}".format(
service=service,
)
actual = CloudCatalogClient.service_path(service)
assert expected == actual
def test_parse_service_path():
expected = {
"service": "clam",
}
path = CloudCatalogClient.service_path(**expected)
# Check that the path construction is reversible.
actual = CloudCatalogClient.parse_service_path(path)
assert expected == actual
def test_sku_path():
service = "whelk"
sku = "octopus"
expected = "services/{service}/skus/{sku}".format(
service=service,
sku=sku,
)
actual = CloudCatalogClient.sku_path(service, sku)
assert expected == actual
def test_parse_sku_path():
expected = {
"service": "oyster",
"sku": "nudibranch",
}
path = CloudCatalogClient.sku_path(**expected)
# Check that the path construction is reversible.
actual = CloudCatalogClient.parse_sku_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "cuttlefish"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = CloudCatalogClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "mussel",
}
path = CloudCatalogClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = CloudCatalogClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "winkle"
expected = "folders/{folder}".format(
folder=folder,
)
actual = CloudCatalogClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "nautilus",
}
path = CloudCatalogClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = CloudCatalogClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "scallop"
expected = "organizations/{organization}".format(
organization=organization,
)
actual = CloudCatalogClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "abalone",
}
path = CloudCatalogClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = CloudCatalogClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "squid"
expected = "projects/{project}".format(
project=project,
)
actual = CloudCatalogClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "clam",
}
path = CloudCatalogClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = CloudCatalogClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "whelk"
location = "octopus"
expected = "projects/{project}/locations/{location}".format(
project=project,
location=location,
)
actual = CloudCatalogClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "oyster",
"location": "nudibranch",
}
path = CloudCatalogClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = CloudCatalogClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.CloudCatalogTransport, "_prep_wrapped_messages"
) as prep:
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.CloudCatalogTransport, "_prep_wrapped_messages"
) as prep:
transport_class = CloudCatalogClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = CloudCatalogAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc_asyncio",
)
with mock.patch.object(
type(getattr(client.transport, "grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"grpc",
]
for transport in transports:
client = CloudCatalogClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[
(CloudCatalogClient, transports.CloudCatalogGrpcTransport),
(CloudCatalogAsyncClient, transports.CloudCatalogGrpcAsyncIOTransport),
],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
|
{
"content_hash": "57848211bcda8ab7795a44f8aac515d3",
"timestamp": "",
"source": "github",
"line_count": 2087,
"max_line_length": 112,
"avg_line_length": 35.69477719214183,
"alnum_prop": 0.6165380226860863,
"repo_name": "googleapis/python-billing",
"id": "830e11c3133d45250f158d5c1f0a8f99e1b56fc5",
"size": "75095",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/unit/gapic/billing_v1/test_cloud_catalog.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "604403"
},
{
"name": "Shell",
"bytes": "30663"
}
],
"symlink_target": ""
}
|
import order
import report_lunch_order
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
{
"content_hash": "65ff8491fb4a9aa45218312e33de1dc6",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 65,
"avg_line_length": 26.5,
"alnum_prop": 0.8207547169811321,
"repo_name": "ntiufalara/openerp7",
"id": "4a56869bf832a0720c9d85fe000e731c64d35dca",
"size": "1087",
"binary": false,
"copies": "433",
"ref": "refs/heads/master",
"path": "openerp/addons/lunch/report/__init__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "9611"
},
{
"name": "C#",
"bytes": "93691"
},
{
"name": "C++",
"bytes": "108790"
},
{
"name": "CSS",
"bytes": "583265"
},
{
"name": "Groff",
"bytes": "8138"
},
{
"name": "HTML",
"bytes": "125159"
},
{
"name": "JavaScript",
"bytes": "5109152"
},
{
"name": "Makefile",
"bytes": "14036"
},
{
"name": "NSIS",
"bytes": "14114"
},
{
"name": "PHP",
"bytes": "14033"
},
{
"name": "Python",
"bytes": "9373763"
},
{
"name": "Ruby",
"bytes": "220"
},
{
"name": "Shell",
"bytes": "6430"
},
{
"name": "XSLT",
"bytes": "156761"
}
],
"symlink_target": ""
}
|
def _jupyter_labextension_paths():
return [{"src": "labextension", "dest": "jupyterlab-plotly"}]
def _jupyter_nbextension_paths():
return [
{
"section": "notebook",
"src": "nbextension",
"dest": "jupyterlab-plotly",
"require": "jupyterlab-plotly/extension",
}
]
|
{
"content_hash": "996a6c09231d1e1347bf6b92c939415d",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 65,
"avg_line_length": 26.153846153846153,
"alnum_prop": 0.5323529411764706,
"repo_name": "plotly/plotly.py",
"id": "9dbaebe9b17264cd5fa9e3950ccddd2cd8612a4d",
"size": "340",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/jupyterlab_plotly/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
}
|
import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
if __name__ == "__main__":
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
{
"content_hash": "d061af367be5db59271aca46b82a63e2",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 64,
"avg_line_length": 27.5,
"alnum_prop": 0.7227272727272728,
"repo_name": "mozilla/mozilla-badges",
"id": "574a065cd5688e3bf25c60a802d9bcb533286daa",
"size": "242",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vendor-local/src/django-valet-keys/manage.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "5364"
},
{
"name": "Perl",
"bytes": "6953"
},
{
"name": "Puppet",
"bytes": "6677"
},
{
"name": "Python",
"bytes": "258788"
},
{
"name": "Shell",
"bytes": "3065"
}
],
"symlink_target": ""
}
|
"""Tests for the Config Entry Flow helper."""
import pytest
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.config import async_process_ha_core_config
from homeassistant.helpers import config_entry_flow
from tests.async_mock import Mock, patch
from tests.common import (
MockConfigEntry,
MockModule,
mock_entity_platform,
mock_integration,
)
@pytest.fixture
def discovery_flow_conf(hass):
"""Register a handler."""
handler_conf = {"discovered": False}
async def has_discovered_devices(hass):
"""Mock if we have discovered devices."""
return handler_conf["discovered"]
with patch.dict(config_entries.HANDLERS):
config_entry_flow.register_discovery_flow(
"test", "Test", has_discovered_devices, config_entries.CONN_CLASS_LOCAL_POLL
)
yield handler_conf
@pytest.fixture
def webhook_flow_conf(hass):
"""Register a handler."""
with patch.dict(config_entries.HANDLERS):
config_entry_flow.register_webhook_flow("test_single", "Test Single", {}, False)
config_entry_flow.register_webhook_flow(
"test_multiple", "Test Multiple", {}, True
)
yield {}
async def test_single_entry_allowed(hass, discovery_flow_conf):
"""Test only a single entry is allowed."""
flow = config_entries.HANDLERS["test"]()
flow.hass = hass
flow.context = {}
MockConfigEntry(domain="test").add_to_hass(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
async def test_user_no_devices_found(hass, discovery_flow_conf):
"""Test if no devices found."""
flow = config_entries.HANDLERS["test"]()
flow.hass = hass
flow.context = {"source": config_entries.SOURCE_USER}
result = await flow.async_step_confirm(user_input={})
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "no_devices_found"
async def test_user_has_confirmation(hass, discovery_flow_conf):
"""Test user requires confirmation to setup."""
discovery_flow_conf["discovered"] = True
mock_entity_platform(hass, "config_flow.test", None)
result = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_USER}, data={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "confirm"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
@pytest.mark.parametrize("source", ["discovery", "ssdp", "zeroconf"])
async def test_discovery_single_instance(hass, discovery_flow_conf, source):
"""Test we not allow duplicates."""
flow = config_entries.HANDLERS["test"]()
flow.hass = hass
flow.context = {}
MockConfigEntry(domain="test").add_to_hass(hass)
result = await getattr(flow, f"async_step_{source}")({})
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
@pytest.mark.parametrize("source", ["discovery", "ssdp", "zeroconf"])
async def test_discovery_confirmation(hass, discovery_flow_conf, source):
"""Test we ask for confirmation via discovery."""
flow = config_entries.HANDLERS["test"]()
flow.hass = hass
flow.context = {"source": source}
result = await getattr(flow, f"async_step_{source}")({})
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "confirm"
result = await flow.async_step_confirm({})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
async def test_multiple_discoveries(hass, discovery_flow_conf):
"""Test we only create one instance for multiple discoveries."""
mock_entity_platform(hass, "config_flow.test", None)
result = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_DISCOVERY}, data={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
# Second discovery
result = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_DISCOVERY}, data={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_only_one_in_progress(hass, discovery_flow_conf):
"""Test a user initialized one will finish and cancel discovered one."""
mock_entity_platform(hass, "config_flow.test", None)
# Discovery starts flow
result = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_DISCOVERY}, data={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
# User starts flow
result = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_USER}, data={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
# Discovery flow has not been aborted
assert len(hass.config_entries.flow.async_progress()) == 2
# Discovery should be aborted once user confirms
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert len(hass.config_entries.flow.async_progress()) == 0
async def test_import_abort_discovery(hass, discovery_flow_conf):
"""Test import will finish and cancel discovered one."""
mock_entity_platform(hass, "config_flow.test", None)
# Discovery starts flow
result = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_DISCOVERY}, data={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
# Start import flow
result = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_IMPORT}, data={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
# Discovery flow has been aborted
assert len(hass.config_entries.flow.async_progress()) == 0
async def test_import_no_confirmation(hass, discovery_flow_conf):
"""Test import requires no confirmation to set up."""
flow = config_entries.HANDLERS["test"]()
flow.hass = hass
flow.context = {}
discovery_flow_conf["discovered"] = True
result = await flow.async_step_import(None)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
async def test_import_single_instance(hass, discovery_flow_conf):
"""Test import doesn't create second instance."""
flow = config_entries.HANDLERS["test"]()
flow.hass = hass
flow.context = {}
discovery_flow_conf["discovered"] = True
MockConfigEntry(domain="test").add_to_hass(hass)
result = await flow.async_step_import(None)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_ignored_discoveries(hass, discovery_flow_conf):
"""Test we can ignore discovered entries."""
mock_entity_platform(hass, "config_flow.test", None)
result = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_DISCOVERY}, data={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
flow = next(
(
flw
for flw in hass.config_entries.flow.async_progress()
if flw["flow_id"] == result["flow_id"]
),
None,
)
# Ignore it.
await hass.config_entries.flow.async_init(
flow["handler"],
context={"source": config_entries.SOURCE_IGNORE},
data={"unique_id": flow["context"]["unique_id"]},
)
# Second discovery should be aborted
result = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_DISCOVERY}, data={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_webhook_single_entry_allowed(hass, webhook_flow_conf):
"""Test only a single entry is allowed."""
flow = config_entries.HANDLERS["test_single"]()
flow.hass = hass
MockConfigEntry(domain="test_single").add_to_hass(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "one_instance_allowed"
async def test_webhook_multiple_entries_allowed(hass, webhook_flow_conf):
"""Test multiple entries are allowed when specified."""
flow = config_entries.HANDLERS["test_multiple"]()
flow.hass = hass
MockConfigEntry(domain="test_multiple").add_to_hass(hass)
hass.config.api = Mock(base_url="http://example.com")
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
async def test_webhook_config_flow_registers_webhook(hass, webhook_flow_conf):
"""Test setting up an entry creates a webhook."""
flow = config_entries.HANDLERS["test_single"]()
flow.hass = hass
await async_process_ha_core_config(
hass, {"external_url": "https://example.com"},
)
result = await flow.async_step_user(user_input={})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"]["webhook_id"] is not None
async def test_webhook_create_cloudhook(hass, webhook_flow_conf):
"""Test only a single entry is allowed."""
assert await setup.async_setup_component(hass, "cloud", {})
async_setup_entry = Mock(return_value=True)
async_unload_entry = Mock(return_value=True)
mock_integration(
hass,
MockModule(
"test_single",
async_setup_entry=async_setup_entry,
async_unload_entry=async_unload_entry,
async_remove_entry=config_entry_flow.webhook_async_remove_entry,
),
)
mock_entity_platform(hass, "config_flow.test_single", None)
result = await hass.config_entries.flow.async_init(
"test_single", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
with patch(
"hass_nabucasa.cloudhooks.Cloudhooks.async_create",
return_value={"cloudhook_url": "https://example.com"},
) as mock_create, patch(
"homeassistant.components.cloud.async_active_subscription", return_value=True
), patch(
"homeassistant.components.cloud.async_is_logged_in", return_value=True
):
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["description_placeholders"]["webhook_url"] == "https://example.com"
assert len(mock_create.mock_calls) == 1
assert len(async_setup_entry.mock_calls) == 1
with patch(
"hass_nabucasa.cloudhooks.Cloudhooks.async_delete",
return_value={"cloudhook_url": "https://example.com"},
) as mock_delete:
result = await hass.config_entries.async_remove(result["result"].entry_id)
assert len(mock_delete.mock_calls) == 1
assert result["require_restart"] is False
|
{
"content_hash": "95c08284941f74d6fe7fa563360630ce",
"timestamp": "",
"source": "github",
"line_count": 317,
"max_line_length": 88,
"avg_line_length": 35.17981072555205,
"alnum_prop": 0.6705523672883787,
"repo_name": "titilambert/home-assistant",
"id": "7893650d420824e6e1f4467cbae52aa1f40cd679",
"size": "11152",
"binary": false,
"copies": "5",
"ref": "refs/heads/dev",
"path": "tests/helpers/test_config_entry_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "25849092"
},
{
"name": "Shell",
"bytes": "4410"
}
],
"symlink_target": ""
}
|
"""
sentry.tasks.store
~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.conf import settings
from sentry.tasks.base import instrumented_task
from sentry.utils.safe import safe_execute
@instrumented_task(
name='sentry.tasks.store.preprocess_event',
queue='events')
def preprocess_event(cache_key=None, data=None, **kwargs):
from sentry.app import cache
from sentry.plugins import plugins
from sentry.tasks.fetch_source import expand_javascript_source
if cache_key:
data = cache.get(cache_key)
logger = preprocess_event.get_logger()
if data is None:
logger.error('Data not available in preprocess_event (cache_key=%s)', cache_key)
return
project = data['project']
# TODO(dcramer): ideally we would know if data changed by default
has_changed = False
# TODO(dcramer): move js sourcemap processing into JS plugin
if settings.SENTRY_SCRAPE_JAVASCRIPT_CONTEXT and data.get('platform') == 'javascript':
try:
expand_javascript_source(data)
except Exception as e:
logger.exception(u'Error fetching javascript source: %r [%s]', data['event_id'], e)
else:
has_changed = True
for plugin in plugins.all(version=2):
for processor in (safe_execute(plugin.get_event_preprocessors) or ()):
result = safe_execute(processor, data)
if result:
data = result
has_changed = True
assert data['project'] == project, 'Project cannot be mutated by preprocessor'
if has_changed and cache_key:
cache.set(cache_key, data, 3600)
if cache_key:
data = None
save_event.delay(cache_key=cache_key, data=data)
@instrumented_task(
name='sentry.tasks.store.save_event',
queue='events')
def save_event(cache_key=None, data=None, **kwargs):
"""
Saves an event to the database.
"""
from sentry.app import cache
from sentry.event_manager import EventManager
if cache_key:
data = cache.get(cache_key)
if data is None:
return
project = data.pop('project')
try:
manager = EventManager(data)
manager.save(project)
finally:
if cache_key:
cache.delete(cache_key)
|
{
"content_hash": "c8d1c4798fcab2d996735f901b199108",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 95,
"avg_line_length": 27.477272727272727,
"alnum_prop": 0.6443341604631927,
"repo_name": "camilonova/sentry",
"id": "9a09a27ffc3d76a9b924c51159e9c17103f2336e",
"size": "2418",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sentry/tasks/store.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "575532"
},
{
"name": "JavaScript",
"bytes": "608497"
},
{
"name": "Makefile",
"bytes": "7296"
},
{
"name": "Python",
"bytes": "4505014"
},
{
"name": "Shell",
"bytes": "4106"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import os
import itertools
from copy import deepcopy
from mock import Mock, patch
from pickle import loads, dumps
from kombu import Exchange
from celery import shared_task, current_app
from celery import app as _app
from celery import _state
from celery.app import base as _appbase
from celery.app import defaults
from celery.exceptions import ImproperlyConfigured
from celery.five import items
from celery.loaders.base import BaseLoader
from celery.platforms import pyimplementation
from celery.utils.serialization import pickle
from celery.tests.case import (
CELERY_TEST_CONFIG,
AppCase,
depends_on_current_app,
mask_modules,
platform_pyimp,
sys_platform,
pypy_version,
with_environ,
)
from celery.utils import uuid
from celery.utils.mail import ErrorMail
THIS_IS_A_KEY = 'this is a value'
class ObjectConfig(object):
FOO = 1
BAR = 2
object_config = ObjectConfig()
dict_config = dict(FOO=10, BAR=20)
class Object(object):
def __init__(self, **kwargs):
for key, value in items(kwargs):
setattr(self, key, value)
def _get_test_config():
return deepcopy(CELERY_TEST_CONFIG)
test_config = _get_test_config()
class test_module(AppCase):
def test_default_app(self):
self.assertEqual(_app.default_app, _state.default_app)
def test_bugreport(self):
self.assertTrue(_app.bugreport(app=self.app))
class test_App(AppCase):
def setup(self):
self.app.add_defaults(test_config)
def test_task(self):
with self.Celery('foozibari') as app:
def fun():
pass
fun.__module__ = '__main__'
task = app.task(fun)
self.assertEqual(task.name, app.main + '.fun')
def test_with_config_source(self):
with self.Celery(config_source=ObjectConfig) as app:
self.assertEqual(app.conf.FOO, 1)
self.assertEqual(app.conf.BAR, 2)
@depends_on_current_app
def test_task_windows_execv(self):
prev, _appbase._EXECV = _appbase._EXECV, True
try:
@self.app.task(shared=False)
def foo():
pass
self.assertTrue(foo._get_current_object()) # is proxy
finally:
_appbase._EXECV = prev
assert not _appbase._EXECV
def test_task_takes_no_args(self):
with self.assertRaises(TypeError):
@self.app.task(1)
def foo():
pass
def test_add_defaults(self):
self.assertFalse(self.app.configured)
_conf = {'FOO': 300}
conf = lambda: _conf
self.app.add_defaults(conf)
self.assertIn(conf, self.app._pending_defaults)
self.assertFalse(self.app.configured)
self.assertEqual(self.app.conf.FOO, 300)
self.assertTrue(self.app.configured)
self.assertFalse(self.app._pending_defaults)
# defaults not pickled
appr = loads(dumps(self.app))
with self.assertRaises(AttributeError):
appr.conf.FOO
# add more defaults after configured
conf2 = {'FOO': 'BAR'}
self.app.add_defaults(conf2)
self.assertEqual(self.app.conf.FOO, 'BAR')
self.assertIn(_conf, self.app.conf.defaults)
self.assertIn(conf2, self.app.conf.defaults)
def test_connection_or_acquire(self):
with self.app.connection_or_acquire(block=True):
self.assertTrue(self.app.pool._dirty)
with self.app.connection_or_acquire(pool=False):
self.assertFalse(self.app.pool._dirty)
def test_maybe_close_pool(self):
cpool = self.app._pool = Mock()
ppool = self.app.amqp._producer_pool = Mock()
self.app._maybe_close_pool()
cpool.force_close_all.assert_called_with()
ppool.force_close_all.assert_called_with()
self.assertIsNone(self.app._pool)
self.assertIsNone(self.app.amqp._producer_pool)
self.app._pool = Mock()
self.app._maybe_close_pool()
self.app._maybe_close_pool()
def test_using_v1_reduce(self):
self.app._using_v1_reduce = True
self.assertTrue(loads(dumps(self.app)))
def test_autodiscover_tasks(self):
self.app.conf.CELERY_FORCE_BILLIARD_LOGGING = True
with patch('celery.app.base.ensure_process_aware_logger') as ep:
self.app.loader.autodiscover_tasks = Mock()
self.app.autodiscover_tasks(['proj.A', 'proj.B'])
ep.assert_called_with()
self.app.loader.autodiscover_tasks.assert_called_with(
['proj.A', 'proj.B'], 'tasks',
)
with patch('celery.app.base.ensure_process_aware_logger') as ep:
self.app.conf.CELERY_FORCE_BILLIARD_LOGGING = False
self.app.autodiscover_tasks(['proj.A', 'proj.B'])
self.assertFalse(ep.called)
@with_environ('CELERY_BROKER_URL', '')
def test_with_broker(self):
with self.Celery(broker='foo://baribaz') as app:
self.assertEqual(app.conf.BROKER_URL, 'foo://baribaz')
def test_repr(self):
self.assertTrue(repr(self.app))
def test_custom_task_registry(self):
with self.Celery(tasks=self.app.tasks) as app2:
self.assertIs(app2.tasks, self.app.tasks)
def test_include_argument(self):
with self.Celery(include=('foo', 'bar.foo')) as app:
self.assertEqual(app.conf.CELERY_IMPORTS, ('foo', 'bar.foo'))
def test_set_as_current(self):
current = _state._tls.current_app
try:
app = self.Celery(set_as_current=True)
self.assertIs(_state._tls.current_app, app)
finally:
_state._tls.current_app = current
def test_current_task(self):
@self.app.task
def foo(shared=False):
pass
_state._task_stack.push(foo)
try:
self.assertEqual(self.app.current_task.name, foo.name)
finally:
_state._task_stack.pop()
def test_task_not_shared(self):
with patch('celery.app.base.shared_task') as sh:
@self.app.task(shared=False)
def foo():
pass
self.assertFalse(sh.called)
def test_task_compat_with_filter(self):
with self.Celery(accept_magic_kwargs=True) as app:
check = Mock()
def filter(task):
check(task)
return task
@app.task(filter=filter, shared=False)
def foo():
pass
check.assert_called_with(foo)
def test_task_with_filter(self):
with self.Celery(accept_magic_kwargs=False) as app:
check = Mock()
def filter(task):
check(task)
return task
assert not _appbase._EXECV
@app.task(filter=filter, shared=False)
def foo():
pass
check.assert_called_with(foo)
def test_task_sets_main_name_MP_MAIN_FILE(self):
from celery import utils as _utils
_utils.MP_MAIN_FILE = __file__
try:
with self.Celery('xuzzy') as app:
@app.task
def foo():
pass
self.assertEqual(foo.name, 'xuzzy.foo')
finally:
_utils.MP_MAIN_FILE = None
def test_annotate_decorator(self):
from celery.app.task import Task
class adX(Task):
abstract = True
def run(self, y, z, x):
return y, z, x
check = Mock()
def deco(fun):
def _inner(*args, **kwargs):
check(*args, **kwargs)
return fun(*args, **kwargs)
return _inner
self.app.conf.CELERY_ANNOTATIONS = {
adX.name: {'@__call__': deco}
}
adX.bind(self.app)
self.assertIs(adX.app, self.app)
i = adX()
i(2, 4, x=3)
check.assert_called_with(i, 2, 4, x=3)
i.annotate()
i.annotate()
def test_apply_async_has__self__(self):
@self.app.task(__self__='hello', shared=False)
def aawsX():
pass
with patch('celery.app.amqp.TaskProducer.publish_task') as dt:
aawsX.apply_async((4, 5))
args = dt.call_args[0][1]
self.assertEqual(args, ('hello', 4, 5))
def test_apply_async_adds_children(self):
from celery._state import _task_stack
@self.app.task(shared=False)
def a3cX1(self):
pass
@self.app.task(shared=False)
def a3cX2(self):
pass
_task_stack.push(a3cX1)
try:
a3cX1.push_request(called_directly=False)
try:
res = a3cX2.apply_async(add_to_parent=True)
self.assertIn(res, a3cX1.request.children)
finally:
a3cX1.pop_request()
finally:
_task_stack.pop()
def test_pickle_app(self):
changes = dict(THE_FOO_BAR='bars',
THE_MII_MAR='jars')
self.app.conf.update(changes)
saved = pickle.dumps(self.app)
self.assertLess(len(saved), 2048)
restored = pickle.loads(saved)
self.assertDictContainsSubset(changes, restored.conf)
def test_worker_main(self):
from celery.bin import worker as worker_bin
class worker(worker_bin.worker):
def execute_from_commandline(self, argv):
return argv
prev, worker_bin.worker = worker_bin.worker, worker
try:
ret = self.app.worker_main(argv=['--version'])
self.assertListEqual(ret, ['--version'])
finally:
worker_bin.worker = prev
def test_config_from_envvar(self):
os.environ['CELERYTEST_CONFIG_OBJECT'] = 'celery.tests.app.test_app'
self.app.config_from_envvar('CELERYTEST_CONFIG_OBJECT')
self.assertEqual(self.app.conf.THIS_IS_A_KEY, 'this is a value')
def test_config_from_object(self):
class Object(object):
LEAVE_FOR_WORK = True
MOMENT_TO_STOP = True
CALL_ME_BACK = 123456789
WANT_ME_TO = False
UNDERSTAND_ME = True
self.app.config_from_object(Object())
self.assertTrue(self.app.conf.LEAVE_FOR_WORK)
self.assertTrue(self.app.conf.MOMENT_TO_STOP)
self.assertEqual(self.app.conf.CALL_ME_BACK, 123456789)
self.assertFalse(self.app.conf.WANT_ME_TO)
self.assertTrue(self.app.conf.UNDERSTAND_ME)
def test_config_from_cmdline(self):
cmdline = ['.always_eager=no',
'.result_backend=/dev/null',
'celeryd.prefetch_multiplier=368',
'.foobarstring=(string)300',
'.foobarint=(int)300',
'.result_engine_options=(dict){"foo": "bar"}']
self.app.config_from_cmdline(cmdline, namespace='celery')
self.assertFalse(self.app.conf.CELERY_ALWAYS_EAGER)
self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, '/dev/null')
self.assertEqual(self.app.conf.CELERYD_PREFETCH_MULTIPLIER, 368)
self.assertEqual(self.app.conf.CELERY_FOOBARSTRING, '300')
self.assertEqual(self.app.conf.CELERY_FOOBARINT, 300)
self.assertDictEqual(self.app.conf.CELERY_RESULT_ENGINE_OPTIONS,
{'foo': 'bar'})
def test_compat_setting_CELERY_BACKEND(self):
self.app.config_from_object(Object(CELERY_BACKEND='set_by_us'))
self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, 'set_by_us')
def test_setting_BROKER_TRANSPORT_OPTIONS(self):
_args = {'foo': 'bar', 'spam': 'baz'}
self.app.config_from_object(Object())
self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, {})
self.app.config_from_object(Object(BROKER_TRANSPORT_OPTIONS=_args))
self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, _args)
def test_Windows_log_color_disabled(self):
self.app.IS_WINDOWS = True
self.assertFalse(self.app.log.supports_color(True))
def test_compat_setting_CARROT_BACKEND(self):
self.app.config_from_object(Object(CARROT_BACKEND='set_by_us'))
self.assertEqual(self.app.conf.BROKER_TRANSPORT, 'set_by_us')
def test_WorkController(self):
x = self.app.WorkController
self.assertIs(x.app, self.app)
def test_Worker(self):
x = self.app.Worker
self.assertIs(x.app, self.app)
@depends_on_current_app
def test_AsyncResult(self):
x = self.app.AsyncResult('1')
self.assertIs(x.app, self.app)
r = loads(dumps(x))
# not set as current, so ends up as default app after reduce
self.assertIs(r.app, current_app._get_current_object())
def test_get_active_apps(self):
self.assertTrue(list(_state._get_active_apps()))
app1 = self.Celery()
appid = id(app1)
self.assertIn(app1, _state._get_active_apps())
app1.close()
del(app1)
# weakref removed from list when app goes out of scope.
with self.assertRaises(StopIteration):
next(app for app in _state._get_active_apps() if id(app) == appid)
def test_config_from_envvar_more(self, key='CELERY_HARNESS_CFG1'):
self.assertFalse(self.app.config_from_envvar('HDSAJIHWIQHEWQU',
silent=True))
with self.assertRaises(ImproperlyConfigured):
self.app.config_from_envvar('HDSAJIHWIQHEWQU', silent=False)
os.environ[key] = __name__ + '.object_config'
self.assertTrue(self.app.config_from_envvar(key))
self.assertEqual(self.app.conf['FOO'], 1)
self.assertEqual(self.app.conf['BAR'], 2)
os.environ[key] = 'unknown_asdwqe.asdwqewqe'
with self.assertRaises(ImportError):
self.app.config_from_envvar(key, silent=False)
self.assertFalse(self.app.config_from_envvar(key, silent=True))
os.environ[key] = __name__ + '.dict_config'
self.assertTrue(self.app.config_from_envvar(key))
self.assertEqual(self.app.conf['FOO'], 10)
self.assertEqual(self.app.conf['BAR'], 20)
@patch('celery.bin.celery.CeleryCommand.execute_from_commandline')
def test_start(self, execute):
self.app.start()
self.assertTrue(execute.called)
def test_mail_admins(self):
class Loader(BaseLoader):
def mail_admins(*args, **kwargs):
return args, kwargs
self.app.loader = Loader(app=self.app)
self.app.conf.ADMINS = None
self.assertFalse(self.app.mail_admins('Subject', 'Body'))
self.app.conf.ADMINS = [('George Costanza', 'george@vandelay.com')]
self.assertTrue(self.app.mail_admins('Subject', 'Body'))
def test_amqp_get_broker_info(self):
self.assertDictContainsSubset(
{'hostname': 'localhost',
'userid': 'guest',
'password': 'guest',
'virtual_host': '/'},
self.app.connection('pyamqp://').info(),
)
self.app.conf.BROKER_PORT = 1978
self.app.conf.BROKER_VHOST = 'foo'
self.assertDictContainsSubset(
{'port': 1978, 'virtual_host': 'foo'},
self.app.connection('pyamqp://:1978/foo').info(),
)
conn = self.app.connection('pyamqp:////value')
self.assertDictContainsSubset({'virtual_host': '/value'},
conn.info())
def test_amqp_failover_strategy_selection(self):
# Test passing in a string and make sure the string
# gets there untouched
self.app.conf.BROKER_FAILOVER_STRATEGY = 'foo-bar'
self.assertEquals(
self.app.connection('amqp:////value').failover_strategy,
'foo-bar',
)
# Try passing in None
self.app.conf.BROKER_FAILOVER_STRATEGY = None
self.assertEquals(
self.app.connection('amqp:////value').failover_strategy,
itertools.cycle,
)
# Test passing in a method
def my_failover_strategy(it):
yield True
self.app.conf.BROKER_FAILOVER_STRATEGY = my_failover_strategy
self.assertEquals(
self.app.connection('amqp:////value').failover_strategy,
my_failover_strategy,
)
def test_BROKER_BACKEND_alias(self):
self.assertEqual(self.app.conf.BROKER_BACKEND,
self.app.conf.BROKER_TRANSPORT)
def test_after_fork(self):
p = self.app._pool = Mock()
self.app._after_fork(self.app)
p.force_close_all.assert_called_with()
self.assertIsNone(self.app._pool)
self.app._after_fork(self.app)
def test_pool_no_multiprocessing(self):
with mask_modules('multiprocessing.util'):
pool = self.app.pool
self.assertIs(pool, self.app._pool)
def test_bugreport(self):
self.assertTrue(self.app.bugreport())
def test_send_task_sent_event(self):
class Dispatcher(object):
sent = []
def publish(self, type, fields, *args, **kwargs):
self.sent.append((type, fields))
conn = self.app.connection()
chan = conn.channel()
try:
for e in ('foo_exchange', 'moo_exchange', 'bar_exchange'):
chan.exchange_declare(e, 'direct', durable=True)
chan.queue_declare(e, durable=True)
chan.queue_bind(e, e, e)
finally:
chan.close()
assert conn.transport_cls == 'memory'
prod = self.app.amqp.TaskProducer(
conn, exchange=Exchange('foo_exchange'),
send_sent_event=True,
)
dispatcher = Dispatcher()
self.assertTrue(prod.publish_task('footask', (), {},
exchange='moo_exchange',
routing_key='moo_exchange',
event_dispatcher=dispatcher))
self.assertTrue(dispatcher.sent)
self.assertEqual(dispatcher.sent[0][0], 'task-sent')
self.assertTrue(prod.publish_task('footask', (), {},
event_dispatcher=dispatcher,
exchange='bar_exchange',
routing_key='bar_exchange'))
def test_error_mail_sender(self):
x = ErrorMail.subject % {'name': 'task_name',
'id': uuid(),
'exc': 'FOOBARBAZ',
'hostname': 'lana'}
self.assertTrue(x)
def test_error_mail_disabled(self):
task = Mock()
x = ErrorMail(task)
x.should_send = Mock()
x.should_send.return_value = False
x.send(Mock(), Mock())
self.assertFalse(task.app.mail_admins.called)
class test_defaults(AppCase):
def test_str_to_bool(self):
for s in ('false', 'no', '0'):
self.assertFalse(defaults.strtobool(s))
for s in ('true', 'yes', '1'):
self.assertTrue(defaults.strtobool(s))
with self.assertRaises(TypeError):
defaults.strtobool('unsure')
class test_debugging_utils(AppCase):
def test_enable_disable_trace(self):
try:
_app.enable_trace()
self.assertEqual(_app.app_or_default, _app._app_or_default_trace)
_app.disable_trace()
self.assertEqual(_app.app_or_default, _app._app_or_default)
finally:
_app.disable_trace()
class test_pyimplementation(AppCase):
def test_platform_python_implementation(self):
with platform_pyimp(lambda: 'Xython'):
self.assertEqual(pyimplementation(), 'Xython')
def test_platform_jython(self):
with platform_pyimp():
with sys_platform('java 1.6.51'):
self.assertIn('Jython', pyimplementation())
def test_platform_pypy(self):
with platform_pyimp():
with sys_platform('darwin'):
with pypy_version((1, 4, 3)):
self.assertIn('PyPy', pyimplementation())
with pypy_version((1, 4, 3, 'a4')):
self.assertIn('PyPy', pyimplementation())
def test_platform_fallback(self):
with platform_pyimp():
with sys_platform('darwin'):
with pypy_version():
self.assertEqual('CPython', pyimplementation())
class test_shared_task(AppCase):
def test_registers_to_all_apps(self):
with self.Celery('xproj', set_as_current=True) as xproj:
xproj.finalize()
@shared_task
def foo():
return 42
@shared_task()
def bar():
return 84
self.assertIs(foo.app, xproj)
self.assertIs(bar.app, xproj)
self.assertTrue(foo._get_current_object())
with self.Celery('yproj', set_as_current=True) as yproj:
self.assertIs(foo.app, yproj)
self.assertIs(bar.app, yproj)
@shared_task()
def baz():
return 168
self.assertIs(baz.app, yproj)
|
{
"content_hash": "ceea35b058666762d8ccd9eff3aa6fac",
"timestamp": "",
"source": "github",
"line_count": 660,
"max_line_length": 78,
"avg_line_length": 32.49545454545454,
"alnum_prop": 0.5744393155219845,
"repo_name": "sivaprakashniet/push_pull",
"id": "49b9fdd037e7d122735901961f7ca4e6940b1890",
"size": "21447",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "p2p/lib/python2.7/site-packages/celery/tests/app/test_app.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "33347"
},
{
"name": "CSS",
"bytes": "111284"
},
{
"name": "CoffeeScript",
"bytes": "21"
},
{
"name": "HTML",
"bytes": "933220"
},
{
"name": "JavaScript",
"bytes": "260224"
},
{
"name": "Nginx",
"bytes": "4758"
},
{
"name": "Python",
"bytes": "9725308"
},
{
"name": "Roff",
"bytes": "17679"
},
{
"name": "Shell",
"bytes": "6008"
}
],
"symlink_target": ""
}
|
"""Support for UK Met Office weather service."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_API_KEY,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTR_LAST_UPDATE = "last_update"
ATTR_SENSOR_ID = "sensor_id"
ATTR_SITE_ID = "site_id"
ATTR_SITE_NAME = "site_name"
ATTRIBUTION = "Data provided by the Met Office"
CONDITION_CLASSES = {
"cloudy": ["7", "8"],
"fog": ["5", "6"],
"hail": ["19", "20", "21"],
"lightning": ["30"],
"lightning-rainy": ["28", "29"],
"partlycloudy": ["2", "3"],
"pouring": ["13", "14", "15"],
"rainy": ["9", "10", "11", "12"],
"snowy": ["22", "23", "24", "25", "26", "27"],
"snowy-rainy": ["16", "17", "18"],
"sunny": ["0", "1"],
"windy": [],
"windy-variant": [],
"exceptional": [],
}
DEFAULT_NAME = "Met Office"
VISIBILITY_CLASSES = {
"VP": "<1",
"PO": "1-4",
"MO": "4-10",
"GO": "10-20",
"VG": "20-40",
"EX": ">40",
}
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=35)
# Sensor types are defined like: Name, units
SENSOR_TYPES = {
"name": ["Station Name", None],
"weather": ["Weather", None],
"temperature": ["Temperature", TEMP_CELSIUS],
"feels_like_temperature": ["Feels Like Temperature", TEMP_CELSIUS],
"wind_speed": ["Wind Speed", "mph"],
"wind_direction": ["Wind Direction", None],
"wind_gust": ["Wind Gust", "mph"],
"visibility": ["Visibility", None],
"visibility_distance": ["Visibility Distance", "km"],
"uv": ["UV", None],
"precipitation": ["Probability of Precipitation", "%"],
"humidity": ["Humidity", "%"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_MONITORED_CONDITIONS, default=[]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Inclusive(
CONF_LATITUDE, "coordinates", "Latitude and longitude must exist together"
): cv.latitude,
vol.Inclusive(
CONF_LONGITUDE, "coordinates", "Latitude and longitude must exist together"
): cv.longitude,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Met Office sensor platform."""
import datapoint as dp
api_key = config.get(CONF_API_KEY)
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
name = config.get(CONF_NAME)
datapoint = dp.connection(api_key=api_key)
if None in (latitude, longitude):
_LOGGER.error("Latitude or longitude not set in Home Assistant config")
return
try:
site = datapoint.get_nearest_site(latitude=latitude, longitude=longitude)
except dp.exceptions.APIException as err:
_LOGGER.error("Received error from Met Office Datapoint: %s", err)
return
if not site:
_LOGGER.error("Unable to get nearest Met Office forecast site")
return
data = MetOfficeCurrentData(hass, datapoint, site)
data.update()
if data.data is None:
return
sensors = []
for variable in config[CONF_MONITORED_CONDITIONS]:
sensors.append(MetOfficeCurrentSensor(site, data, variable, name))
add_entities(sensors, True)
class MetOfficeCurrentSensor(Entity):
"""Implementation of a Met Office current sensor."""
def __init__(self, site, data, condition, name):
"""Initialize the sensor."""
self._condition = condition
self.data = data
self._name = name
self.site = site
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format(self._name, SENSOR_TYPES[self._condition][0])
@property
def state(self):
"""Return the state of the sensor."""
if self._condition == "visibility_distance" and hasattr(
self.data.data, "visibility"
):
return VISIBILITY_CLASSES.get(self.data.data.visibility.value)
if hasattr(self.data.data, self._condition):
variable = getattr(self.data.data, self._condition)
if self._condition == "weather":
return [
k
for k, v in CONDITION_CLASSES.items()
if self.data.data.weather.value in v
][0]
return variable.value
return None
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return SENSOR_TYPES[self._condition][1]
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attr = {}
attr[ATTR_ATTRIBUTION] = ATTRIBUTION
attr[ATTR_LAST_UPDATE] = self.data.data.date
attr[ATTR_SENSOR_ID] = self._condition
attr[ATTR_SITE_ID] = self.site.id
attr[ATTR_SITE_NAME] = self.site.name
return attr
def update(self):
"""Update current conditions."""
self.data.update()
class MetOfficeCurrentData:
"""Get data from Datapoint."""
def __init__(self, hass, datapoint, site):
"""Initialize the data object."""
self._datapoint = datapoint
self._site = site
self.data = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from Datapoint."""
import datapoint as dp
try:
forecast = self._datapoint.get_forecast_for_site(self._site.id, "3hourly")
self.data = forecast.now()
except (ValueError, dp.exceptions.APIException) as err:
_LOGGER.error("Check Met Office %s", err.args)
self.data = None
|
{
"content_hash": "6c186f11fa6cb936acf2715a751e5af1",
"timestamp": "",
"source": "github",
"line_count": 203,
"max_line_length": 87,
"avg_line_length": 30.192118226600986,
"alnum_prop": 0.6040137053352912,
"repo_name": "Cinntax/home-assistant",
"id": "3ca55533ce31c5639db17defb8177d6b50c4016f",
"size": "6129",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/metoffice/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17374056"
},
{
"name": "Shell",
"bytes": "6792"
}
],
"symlink_target": ""
}
|
import sys
sys.path.append("../Sketcher")
from Whiteboard.Canvas import Canvas
import pygame
from Pen import *
from PreProcessing import *
from Analyser import *
from Grammar import *
from Kamaelia.Chassis.Graphline import Graphline
from Kamaelia.Chassis.Pipeline import Pipeline
def StrokeRecogniser():
return Graphline( QUANTISE = QuantiseStroke(),
NORMALISE = Normalise(),
ANALYSE = Analyse(),
GRAMMAR = StrokeGrammar(),
linkages = {
("self", "inbox" ) : ("QUANTISE", "inbox"),
("QUANTISE", "outbox") : ("NORMALISE", "inbox"),
("NORMALISE", "outbox") : ("ANALYSE", "inbox"),
("ANALYSE", "outbox") : ("GRAMMAR", "inbox"),
("GRAMMAR", "outbox") : ("self", "outbox"),
("QUANTISE","drawing") : ("self", "drawing"),
("ANALYSE", "drawing") : ("self", "drawing"),
}
)
if __name__ == "__main__":
from Kamaelia.UI.Pygame.Display import PygameDisplay
from Kamaelia.Util.Console import ConsoleEchoer
width = 1024
height = 384
pgd = PygameDisplay( width=width, height=height ).activate()
PygameDisplay.setDisplayService(pgd)
Graphline( CANVAS = Canvas( position=(0,0),size=(width,height) ),
PEN = Pen(),
STROKER = StrokeRecogniser(),
OUTPUT = ConsoleEchoer(),
linkages = {
("CANVAS", "eventsOut") : ("PEN", "inbox"),
("PEN", "outbox") : ("CANVAS", "inbox"),
("PEN", "points") : ("STROKER", "inbox"),
("STROKER", "outbox") : ("OUTPUT", "inbox"),
("STROKER", "drawing") : ("CANVAS", "inbox"),
},
).run()
|
{
"content_hash": "8bc677523d818f4b56be37333fc9a45e",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 76,
"avg_line_length": 35.06896551724138,
"alnum_prop": 0.4591937069813176,
"repo_name": "sparkslabs/kamaelia",
"id": "efa8025cf9ac34974f90003c808f4a775d3f0471",
"size": "3286",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "Sketches/MH/GestureRecognition/StrokeRecogniser.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3814"
},
{
"name": "C",
"bytes": "212854"
},
{
"name": "C++",
"bytes": "327546"
},
{
"name": "CSS",
"bytes": "114434"
},
{
"name": "ChucK",
"bytes": "422"
},
{
"name": "HTML",
"bytes": "1288960"
},
{
"name": "Java",
"bytes": "31832"
},
{
"name": "JavaScript",
"bytes": "829491"
},
{
"name": "M4",
"bytes": "12224"
},
{
"name": "Makefile",
"bytes": "150947"
},
{
"name": "NSIS",
"bytes": "18867"
},
{
"name": "OCaml",
"bytes": "643"
},
{
"name": "PHP",
"bytes": "49059"
},
{
"name": "Perl",
"bytes": "504"
},
{
"name": "Processing",
"bytes": "2885"
},
{
"name": "Python",
"bytes": "18900785"
},
{
"name": "Ruby",
"bytes": "4165"
},
{
"name": "Shell",
"bytes": "707588"
}
],
"symlink_target": ""
}
|
"""
Migrate contacts to custom_roles
Create Date: 2017-04-26 22:01:48.029793
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from ggrc.migrations.utils.migrate_contacts import migrate_contacts
# revision identifiers, used by Alembic.
revision = '1ac595e94a23'
down_revision = '7371f62ceb3'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
tables = [
('AccessGroup', 'access_groups'),
('Assessment', 'assessments'),
('Clause', 'clauses'),
('Contract', 'directives'),
('Control', 'controls'),
('DataAsset', 'data_assets'),
('Facility', 'facilities'),
('Issue', 'issues'),
('Market', 'markets'),
('Objective', 'objectives'),
('OrgGroup', 'org_groups'),
('Policy', 'directives'),
('Process', 'systems'),
('Product', 'products'),
('Project', 'projects'),
('Program', 'programs'),
('Regulation', 'directives'),
('Section', 'sections'),
('Standard', 'directives'),
('System', 'systems'),
('Vendor', 'vendors'),
]
for type_, table_type in tables:
migrate_contacts(type_, table_type)
if type_ == 'Control':
# Controls have assessors in addition to contacts
extra_control_mappings = ({
'name': 'Principal Assignees',
'column': 'principal_assessor_id',
}, {
'name': 'Secondary Assignees',
'column': 'secondary_assessor_id',
})
migrate_contacts(type_, table_type, extra_control_mappings)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
pass
|
{
"content_hash": "da5a2099588296d417052e4e40ac30d1",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 79,
"avg_line_length": 29.771929824561404,
"alnum_prop": 0.6075427224513847,
"repo_name": "AleksNeStu/ggrc-core",
"id": "c01669cfd26777417c207a192cf2d655499be2a3",
"size": "1810",
"binary": false,
"copies": "1",
"ref": "refs/heads/release/0.10-Raspberry",
"path": "src/ggrc/migrations/versions/20170426220148_1ac595e94a23_migrate_contacts_to_custom_roles.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "221201"
},
{
"name": "HTML",
"bytes": "1055542"
},
{
"name": "JavaScript",
"bytes": "1872353"
},
{
"name": "Makefile",
"bytes": "7044"
},
{
"name": "Mako",
"bytes": "4320"
},
{
"name": "Python",
"bytes": "2700938"
},
{
"name": "Shell",
"bytes": "31273"
}
],
"symlink_target": ""
}
|
from empymod.scripts import tmtemod, fdesign
__all__ = ['tmtemod', 'fdesign']
|
{
"content_hash": "352096c448cbc77604e0b7d89937e501",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 44,
"avg_line_length": 26.333333333333332,
"alnum_prop": 0.6962025316455697,
"repo_name": "prisae/empymod",
"id": "096e5bdf4fa0ab75e36f6176038fc0d907954d80",
"size": "706",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "empymod/scripts/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "650728"
},
{
"name": "Shell",
"bytes": "689"
}
],
"symlink_target": ""
}
|
import os
import time
import traceback
import boto.ec2
ec2_instance_types = [
't1.micro',
'm1.small',
'm1.medium',
'm1.large',
'm1.xlarge',
'm3.xlarge',
'm3.2xlarge',
'm2.xlarge',
'm2.2xlarge',
'm2.4xlarge',
'c1.medium',
'c1.xlarge',
'hi1.4xlarge',
'hs1.8xlarge']
class EC2Inst(object):
def __init__(self, itype, image, region, user, ssh_key, sec_group):
self.itype = itype
self.image = image
self.region = region
self.remote_ip = None
self.user = user
self.key_name = os.path.basename(ssh_key)[0:-4] if ssh_key.endswith('.pem') else os.path.basename(ssh_key)
self.ssh_key = ssh_key
self.sec_group = sec_group
self.cloud = 'ec2'
self._vol = None
if image in ['ami-3fec7956', 'ami-f2191786', 'ami-883714cd', 'ami-4ac9437a']:
self.distr = 'debian'
if image in ['ami-05355a6c', 'ami-3ffed17a', 'ami-0358ce33', 'ami-c7c0d6b3']:
self.distr = 'centos'
def launch(self, disk_size=None):
print '[LAUNCHING] %s | %s | %s | %s' % (self.itype, self.image, self.region, self.user)
self._conn = boto.ec2.connect_to_region(self.region)
reservation = self._conn.run_instances(
self.image,
key_name=self.key_name,
instance_type=self.itype,
security_groups=[self.sec_group])
self._inst = reservation.instances[0]
self.remote_ip = self._inst.ip_address
if disk_size:
try:
print '[CREATING DISK]'
self._vol = self._conn.create_volume(disk_size, self._inst.placement)
print '[CREATE DISK] ok'
print '[ATTACHING DISK]'
for _ in range(24):
self.update()
if self._inst.state == 'running':
self._vol.attach(self._inst.id, '/dev/sdb')
print '[ATTACH DISK] ok'
break
else:
time.sleep(5)
else:
print '[ATTACH DISK] timeout'
raise SystemExit
except:
self.terminate()
raise
def update(self):
self._inst.update()
self.remote_ip = self._inst.ip_address
def terminate(self):
if self._vol:
try:
print '[DETACHING DISK]'
self._vol.update()
if self._vol.detach(force=True):
print '[DETACH DISK] ok'
else:
print '[DETACH DISK] failed'
except:
print '[DETACH DISK] failed'
print '[TERMINATING] %s | %s | %s | %s' % (self.itype, self.image, self.region, self.user)
self._conn.terminate_instances(instance_ids=[self._inst.id])
print '[TERMINATED]'
try:
print '[DELETING DISK]'
while self._vol.status != 'available':
self._vol.update()
time.sleep(5)
if self._conn.delete_volume(self._vol.id):
print '[DELETE DISK] ok'
else:
print '[DELETE DISK] failed'
except:
print '[DELETE DISK] failed'
traceback.print_exc()
self._vol = None
else:
print '[TERMINATING] %s | %s | %s | %s' % (self.itype, self.image, self.region, self.user)
self._conn.terminate_instances(instance_ids=[self._inst.id])
print '[TERMINATED]'
|
{
"content_hash": "088fa5d1fc804a23e4456b99b678b753",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 114,
"avg_line_length": 31.630252100840337,
"alnum_prop": 0.47901168969181723,
"repo_name": "Scalr/perf-benchmarks",
"id": "c50837cc1a77a0031234719b135642241a15f1dd",
"size": "3765",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/ec2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "51006"
},
{
"name": "Shell",
"bytes": "2022"
}
],
"symlink_target": ""
}
|
'''
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,json,base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import directstream
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['tunemovie.com', 'tunemovie.tv']
self.base_link = 'https://tunemovie.com'
self.search_link = '/search/%s.html'
def movie(self, imdb, title, localtitle, aliases, year):
try:
query = urlparse.urljoin(self.base_link, self.search_link)
query = query % urllib.quote_plus(title)
t = cleantitle.get(title)
r = client.request(query)
r = client.parseDOM(r, 'div', attrs = {'class': 'thumb'})
r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title'), re.findall('(\d{4})', i)) for i in r]
r = [(i[0][0], i[1][0], i[2][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0 and len(i[2]) > 0]
url = [i[0] for i in r if t in cleantitle.get(i[1]) and year == i[2]][0]
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
query = urlparse.urljoin(self.base_link, self.search_link)
query = query % urllib.quote_plus(data['tvshowtitle'])
t = cleantitle.get(data['tvshowtitle'])
r = client.request(query)
r = client.parseDOM(r, 'div', attrs = {'class': 'thumb'})
r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title'), re.findall('(\d{4})', i)) for i in r]
r = [(i[0][0], i[1][0], i[2][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0 and len(i[2]) > 0]
url = [i[0] for i in r if t in cleantitle.get(i[1]) and ('Season %s' % season) in i[1]][0]
url += '?episode=%01d' % int(episode)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
try:
url, episode = re.findall('(.+?)\?episode=(\d*)$', url)[0]
except:
episode = None
ref = url
for i in range(3):
result = client.request(url)
if not result == None: break
if not episode == None:
result = client.parseDOM(result, 'div', attrs = {'id': 'ip_episode'})[0]
ep_url = client.parseDOM(result, 'a', attrs = {'data-name': str(episode)}, ret='href')[0]
for i in range(3):
result = client.request(ep_url)
if not result == None: break
r = client.parseDOM(result, 'div', attrs = {'class': '[^"]*server_line[^"]*'})
for u in r:
try:
url = urlparse.urljoin(self.base_link, '/ip.file/swf/plugins/ipplugins.php')
p1 = client.parseDOM(u, 'a', ret='data-film')[0]
p2 = client.parseDOM(u, 'a', ret='data-server')[0]
p3 = client.parseDOM(u, 'a', ret='data-name')[0]
post = {'ipplugins': 1, 'ip_film': p1, 'ip_server': p2, 'ip_name': p3}
post = urllib.urlencode(post)
for i in range(3):
result = client.request(url, post=post, XHR=True, referer=ref, timeout='10')
if not result == None: break
result = json.loads(result)
u = result['s']
s = result['v']
url = urlparse.urljoin(self.base_link, '/ip.file/swf/ipplayer/ipplayer.php')
for n in range(3):
try:
post = {'u': u, 'w': '100%', 'h': '420', 's': s, 'n': n}
post = urllib.urlencode(post)
result = client.request(url, post=post, XHR=True, referer=ref)
src = json.loads(result)['data']
if type(src) is list:
src = [i['files'] for i in src]
for i in src:
try:
sources.append({'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'], 'language': 'en', 'url': i, 'direct': True, 'debridonly': False})
except:
pass
else:
src = client.request(src)
src = client.parseDOM(src, 'source', ret='src', attrs = {'type': 'video.+?'})[0]
src += '|%s' % urllib.urlencode({'User-agent': client.randomagent()})
sources.append({'source': 'cdn', 'quality': 'HD', 'language': 'en', 'url': src, 'direct': False, 'debridonly': False})
except:
pass
except:
pass
return sources
except:
return sources
def resolve(self, url):
return directstream.googlepass(url)
|
{
"content_hash": "27578bb4d960d531720d63c1e82728e8",
"timestamp": "",
"source": "github",
"line_count": 164,
"max_line_length": 193,
"avg_line_length": 39.63414634146341,
"alnum_prop": 0.4916923076923077,
"repo_name": "TheWardoctor/Wardoctors-repo",
"id": "e41b59721059f192d26e678ad6ff072102b69b64",
"size": "6541",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "script.module.uncoded/lib/resources/lib/sources/en/to_be_fixed/sitedown/tunemovie.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "3208"
},
{
"name": "JavaScript",
"bytes": "115722"
},
{
"name": "Python",
"bytes": "34405207"
},
{
"name": "Shell",
"bytes": "914"
}
],
"symlink_target": ""
}
|
from tests.providers.google.cloud.utils.gcp_authenticator import GOOGLE_CAMPAIGN_MANAGER_KEY
from tests.providers.google.marketing_platform.operators.test_campaign_manager_system_helper import (
GoogleCampaignManagerTestHelper,
)
from tests.test_utils.gcp_system_helpers import provide_gcp_context, skip_gcp_system
from tests.test_utils.system_tests_class import SystemTest
# Required scopes
SCOPES = [
'https://www.googleapis.com/auth/dfatrafficking',
'https://www.googleapis.com/auth/dfareporting',
'https://www.googleapis.com/auth/ddmconversions'
]
@skip_gcp_system(GOOGLE_CAMPAIGN_MANAGER_KEY)
class CampaignManagerSystemTest(SystemTest):
helper = GoogleCampaignManagerTestHelper()
@provide_gcp_context(GOOGLE_CAMPAIGN_MANAGER_KEY)
def setUp(self):
super().setUp()
self.helper.create_bucket()
@provide_gcp_context(GOOGLE_CAMPAIGN_MANAGER_KEY)
def tearDown(self):
self.helper.delete_bucket()
super().tearDown()
@provide_gcp_context(GOOGLE_CAMPAIGN_MANAGER_KEY, scopes=SCOPES)
def test_run_example_dag(self):
self.run_dag('example_campaign_manager', "airflow/providers/google/marketing_platform/example_dags")
|
{
"content_hash": "d78238638c15cad302d8c414cfc3f092",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 108,
"avg_line_length": 37.5625,
"alnum_prop": 0.7504159733777038,
"repo_name": "wileeam/airflow",
"id": "adb4817c3cc4bcf3c0d1d2ab743061ccbc179b84",
"size": "1990",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/providers/google/marketing_platform/operators/test_campaign_manager_system.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "17179"
},
{
"name": "HTML",
"bytes": "148281"
},
{
"name": "JavaScript",
"bytes": "25233"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "9763694"
},
{
"name": "Shell",
"bytes": "221331"
},
{
"name": "TSQL",
"bytes": "879"
}
],
"symlink_target": ""
}
|
import os
import sys
import string
from optparse import OptionParser
PROG_ROOT = os.path.dirname(os.path.abspath( __file__ ))
from file_system import File, Folder
def main(argv):
parser = OptionParser(usage="usage: %prog [options]", version="%prog 0.1a")
parser.add_option("-a", "--app",
dest = "app",
help = "The application to build. Required.")
parser.add_option("-p", "--path",
dest = "path", default = '.',
help = "Conche root path. Default: Current Working Directory. Optional. Default: `%default`")
(options, args) = parser.parse_args()
if len(args):
parser.error("Unexpected arguments encountered.")
if not options.app:
parser.error("You must specify an application.")
path = options.path
if path == ".":
path = os.getcwdu()
target = Folder(path)
if not target.exists:
target.make()
source = Folder(PROG_ROOT).child_folder('template')
target.copy_contents_of(source, incremental=True)
apps = File(target.child('apps.yaml'))
appsString = apps.read_all()
appsString = string.Template(appsString).safe_substitute(init_app = options.app)
apps.write(appsString)
if __name__ == "__main__":
main(sys.argv[1:])
|
{
"content_hash": "973b2798bbc3cc43acd31f88fa6b35f0",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 117,
"avg_line_length": 32.651162790697676,
"alnum_prop": 0.5662393162393162,
"repo_name": "lakshmivyas/conche",
"id": "122e34bd71ce40c7e4e12f50bd32594024bc5088",
"size": "1426",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conche_init.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "5745"
},
{
"name": "Python",
"bytes": "36760"
}
],
"symlink_target": ""
}
|
"""URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
"""
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from fastsitemaps import views as sitemap_views
from django.urls import path, re_path
from django.views.decorators.cache import cache_control, never_cache
from django.views.generic.base import TemplateView, RedirectView
from .feedback.views import submit_feedback
from .papers.feeds import LatestPapersFeed
from .papers.views import (
HomeView,
PaperListView,
paper_detail,
paper_convert,
paper_render_state,
render_update_state,
stats,
)
from .scraper.arxiv_ids import ARXIV_ID_PATTERN
from .sitemaps import sitemaps
urlpatterns = [
path("", HomeView.as_view(), name="home"),
path(
"papers/",
PaperListView.as_view(),
name="paper_list",
),
path("papers/feed/", LatestPapersFeed(), name="paper_feed"),
re_path(
fr"papers/(?P<arxiv_id>{ARXIV_ID_PATTERN})/$", paper_detail, name="paper_detail"
),
re_path(
fr"abs/(?P<arxiv_id>{ARXIV_ID_PATTERN})/",
RedirectView.as_view(pattern_name="paper_detail"),
),
re_path(
fr"format/(?P<arxiv_id>{ARXIV_ID_PATTERN})/",
RedirectView.as_view(pattern_name="paper_detail"),
),
re_path(
fr"pdf/(?P<arxiv_id>{ARXIV_ID_PATTERN})(\.pdf)?/",
RedirectView.as_view(pattern_name="paper_detail"),
),
re_path(
fr"papers/(?P<arxiv_id>{ARXIV_ID_PATTERN})/render-state/",
paper_render_state,
name="paper_render_state",
),
path(
"renders/<int:pk>/update-state/",
render_update_state,
name="render_update_state",
),
path("convert/", paper_convert, name="paper_convert"),
path("submit-feedback/", submit_feedback),
path("stats/", stats),
path("admin/", admin.site.urls),
path(
"robots.txt",
TemplateView.as_view(template_name="robots.txt", content_type="text/plain"),
),
path(
"sitemap.xml",
cache_control(public=True, max_age=7 * 24 * 60 * 60)(sitemap_views.index),
{"sitemaps": sitemaps},
),
path(
"sitemap-<section>.xml",
cache_control(public=True, max_age=7 * 24 * 60 * 60)(sitemap_views.sitemap),
{"sitemaps": sitemaps},
name="fastsitemaps.views.sitemap",
),
]
# Serve uploaded files in development
if settings.DEBUG and settings.MEDIA_URL:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
{
"content_hash": "cf71d5f776482d2b94c45ccc17448130",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 88,
"avg_line_length": 31.93975903614458,
"alnum_prop": 0.6427763108261033,
"repo_name": "arxiv-vanity/arxiv-vanity",
"id": "b3ad77cbe2aab36332e85650f8a68612120ffa17",
"size": "2651",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "arxiv_vanity/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5333"
},
{
"name": "Dockerfile",
"bytes": "622"
},
{
"name": "HTML",
"bytes": "28804"
},
{
"name": "JavaScript",
"bytes": "7439"
},
{
"name": "Python",
"bytes": "151784"
},
{
"name": "Shell",
"bytes": "405"
}
],
"symlink_target": ""
}
|
from thrift.Thrift import *
from ttypes import *
EDAM_NOTE_SOURCE_WEB_CLIP = "web.clip"
EDAM_NOTE_SOURCE_MAIL_CLIP = "mail.clip"
EDAM_NOTE_SOURCE_MAIL_SMTP_GATEWAY = "mail.smtp"
|
{
"content_hash": "c70b511cb33f45237912515c2474e4d5",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 48,
"avg_line_length": 20.22222222222222,
"alnum_prop": 0.7307692307692307,
"repo_name": "vinodc/evernote",
"id": "55727a22299885edc5620a9a031148414ca85062",
"size": "282",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/evernote/edam/type/constants.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "1105618"
}
],
"symlink_target": ""
}
|
"""
Unit Tests for `pdb_selelem`.
"""
import os
import sys
import unittest
from config import data_dir
from utils import OutputCapture
class TestTool(unittest.TestCase):
"""
Generic class for testing tools.
"""
def setUp(self):
# Dynamically import the module
name = 'pdbtools.pdb_selelem'
self.module = __import__(name, fromlist=[''])
def exec_module(self):
"""
Execs module.
"""
with OutputCapture() as output:
try:
self.module.main()
except SystemExit as e:
self.retcode = e.code
self.stdout = output.stdout
self.stderr = output.stderr
return
def test_one_option(self):
"""$ pdb_selelem -C data/dummy.pdb"""
# Simulate input
# pdb_selelem dummy.pdb
sys.argv = ['', '-C', os.path.join(data_dir, 'dummy.pdb')]
# Execute the script
self.exec_module()
# Validate results
self.assertEqual(self.retcode, 0) # ensure the program exited OK.
self.assertEqual(len(self.stdout), 60) # selected C
self.assertEqual(len(self.stderr), 0) # no errors
def test_multiple(self):
"""
$ pdb_selelem -C,O data/dummy.pdb
"""
sys.argv = ['', '-C,O', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 0)
self.assertEqual(len(self.stdout), 75)
self.assertEqual(len(self.stderr), 0)
def test_file_not_found(self):
"""$ pdb_selelem not_existing.pdb"""
afile = os.path.join(data_dir, 'not_existing.pdb')
sys.argv = ['', afile]
self.exec_module()
self.assertEqual(self.retcode, 1) # exit code is 1 (error)
self.assertEqual(len(self.stdout), 0) # nothing written to stdout
self.assertEqual(self.stderr[0][:22],
"ERROR!! File not found") # proper error message
@unittest.skipIf(os.getenv('SKIP_TTY_TESTS'), 'skip on GHA - no TTY')
def test_file_missing(self):
"""$ pdb_selelem -C"""
sys.argv = ['', '-C']
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0) # no output
self.assertEqual(self.stderr[0],
"ERROR!! No data to process!")
@unittest.skipIf(os.getenv('SKIP_TTY_TESTS'), 'skip on GHA - no TTY')
def test_helptext(self):
"""$ pdb_selelem"""
sys.argv = ['']
self.exec_module()
self.assertEqual(self.retcode, 1) # ensure the program exited gracefully.
self.assertEqual(len(self.stdout), 0) # no output
self.assertEqual(self.stderr, self.module.__doc__.split("\n")[:-1])
def test_invalid_option(self):
"""$ pdb_selelem data/dummy.pdb"""
sys.argv = ['', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0][:37],
"ERROR!! Element set cannot be empty")
def test_invalid_option_2(self):
"""$ pdb_selelem -ABC data/dummy.pdb"""
sys.argv = ['', '-ABC', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0][:38],
"ERROR!! Element name is invalid: 'ABC'")
def test_not_an_option(self):
"""$ pdb_selelem 20 data/dummy.pdb"""
sys.argv = ['', '20', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0],
"ERROR! First argument is not an option: '20'")
if __name__ == '__main__':
from config import test_dir
mpath = os.path.abspath(os.path.join(test_dir, '..'))
sys.path.insert(0, mpath) # so we load dev files before any installation
unittest.main()
|
{
"content_hash": "d46120cf2d112c93734d70d60a362586",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 82,
"avg_line_length": 28,
"alnum_prop": 0.5639478764478765,
"repo_name": "haddocking/pdb-tools",
"id": "8923bdd2105358f7e0c8c69a6e7d9a269f589400",
"size": "4777",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_pdb_selelem.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "578989"
},
{
"name": "TeX",
"bytes": "980"
}
],
"symlink_target": ""
}
|
import rumps
window = rumps.Window('Nothing...', 'ALERTZ')
window.title = 'WINDOWS jk'
window.message = 'Something.'
window.default_text = 'eh'
response = window.run()
print (response)
window.add_buttons('One', 'Two', 'Three')
print (window.run())
|
{
"content_hash": "36b50c0c6121d34beb3ef17d3c293f7a",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 45,
"avg_line_length": 19.384615384615383,
"alnum_prop": 0.6865079365079365,
"repo_name": "jaredks/rumps",
"id": "1eb0b0cf59b7f98ce38d6b9baae8fffd64c1174b",
"size": "252",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/example_windows.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "104"
},
{
"name": "Python",
"bytes": "82841"
}
],
"symlink_target": ""
}
|
from colorsys import hsv_to_rgb
from muntjac.addon.colorpicker.color import Color
from muntjac.ui.custom_component import CustomComponent
from muntjac.ui.vertical_layout import VerticalLayout
from muntjac.ui.select import Select
from muntjac.data.property import IValueChangeListener
from muntjac.addon.colorpicker.color_picker_grid import ColorPickerGrid
from muntjac.addon.colorpicker.color_selector import IColorSelector
class ColorPickerSelect(CustomComponent, IColorSelector, IValueChangeListener):
"""The Class ColorPickerSelect.
@author: John Ahlroos / ITMill Oy LTd 2010
@author: Richard Lincoln
"""
def __init__(self):
"""Instantiates a new color picker select.
@param rows
the rows
@param columns
the columns
"""
super(ColorPickerSelect, self).__init__()
layout = VerticalLayout()
self.setCompositionRoot(layout)
self.setStyleName('colorselect')
self.setWidth('220px')
self._range = Select()
self._range.setImmediate(True)
self._range.setImmediate(True)
self._range.setNullSelectionAllowed(False)
self._range.setNewItemsAllowed(False)
self._range.setWidth('220px')
self._range.addListener(self, IValueChangeListener)
for Id in ColorRangePropertyId.values():
self._range.addItem(Id)
layout.addComponent(self._range)
self._grid = ColorPickerGrid(self.createAllColors(14, 10))
self._grid.setWidth('220px')
self._grid.setHeight('270px')
layout.addComponent(self._grid)
self._range.select(ColorRangePropertyId.ALL)
def createAllColors(self, rows, columns):
"""Creates the all colors.
@param rows:
the rows
@param columns:
the columns
@return: the color[][]
"""
colors = [([None] * columns) for _ in range(rows)]
for row in range(rows):
for col in range(columns):
# Create the color grid by varying the saturation and value
if row < rows - 1:
# Calculate new hue value
# The last row should have the black&white gradient
hue = col / columns
saturation = 1.0
value = 1.0
# For the upper half use value=1 and variable saturation
if row < rows / 2:
saturation = (row + 1.0) / rows / 2.0
else:
value = 1.0 - ((row - (rows / 2.0)) / rows / 2.0)
colors[row][col] = \
Color(*hsv_to_rgb(hue, saturation, value))
else:
hue = 0.0
saturation = 0.0
value = 1.0 - (col / columns)
colors[row][col] = \
Color(*hsv_to_rgb(hue, saturation, value))
return colors
def createColor(self, color, rows, columns):
"""Creates the color.
@param color:
the color
@param rows:
the rows
@param columns:
the columns
@return: the color[][]
"""
colors = [([None] * columns) for _ in range(rows)]
hsv = color.getHSV()
hue = hsv[0]
saturation = 1.0
value = 1.0
for row in range(rows):
for col in range(columns):
index = (row * columns) + col
saturation = 1.0
value = 1.0
if index <= (rows * columns) / 2:
saturation = index / (rows * columns) / 2.0
else:
index -= (rows * columns) / 2
value = 1.0 - (index / (rows * columns) / 2.0)
colors[row][col] = Color(*hsv_to_rgb(hue, saturation, value))
return colors
def addListener(self, listener, iface=None):
self._grid.addListener(listener, iface)
def removeListener(self, listener, iface=None):
self._grid.removeListener(listener, iface)
def getColor(self):
return self._grid.getColor()
def setColor(self, color):
self._grid.getColor()
def valueChange(self, event):
if self._grid is None:
return
if event.getProperty().getValue() == ColorRangePropertyId.ALL:
self._grid.setColorGrid(self.createAllColors(14, 10))
elif event.getProperty().getValue() == ColorRangePropertyId.RED:
self._grid.setColorGrid(self.createColor(Color(255, 0, 0), 14, 10))
elif event.getProperty().getValue() == ColorRangePropertyId.GREEN:
self._grid.setColorGrid(self.createColor(Color(0, 255, 0), 14, 10))
elif event.getProperty().getValue() == ColorRangePropertyId.BLUE:
self._grid.setColorGrid(self.createColor(Color(0, 0, 255), 14, 10))
class ColorRangePropertyId(object):
"""The Enum ColorRangePropertyId."""
ALL = None
RED = None
GREEN = None
BLUE = None
def __init__(self, caption):
"""Instantiates a new color range property id.
@param caption:
the caption
"""
self._caption = caption
def __str__(self):
return self._caption
@classmethod
def values(cls):
return [cls.ALL, cls.RED, cls.GREEN, cls.BLUE]
ColorRangePropertyId.ALL = ColorRangePropertyId('All colors')
ColorRangePropertyId.RED = ColorRangePropertyId('Red colors')
ColorRangePropertyId.GREEN = ColorRangePropertyId('Green colors')
ColorRangePropertyId.BLUE = ColorRangePropertyId('Blue colors')
|
{
"content_hash": "4ce13a65186610741c51354a757688ac",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 79,
"avg_line_length": 29.07537688442211,
"alnum_prop": 0.5629104735568614,
"repo_name": "rwl/muntjac",
"id": "27cc6564a561e0a114fb4ddb0c9d4a34be51294a",
"size": "5829",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "muntjac/addon/colorpicker/color_picker_select.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "8602"
},
{
"name": "Java",
"bytes": "2243"
},
{
"name": "JavaScript",
"bytes": "32438"
},
{
"name": "Python",
"bytes": "3212361"
}
],
"symlink_target": ""
}
|
from flask import send_from_directory, render_template, request
import logging
import os
from project.application import create_app
from project.bl import init_resource_registry
init_resource_registry()
app = create_app()
@app.route('/media/<path:path>')
def get_file(path):
return send_from_directory(app.config['UPLOAD_FOLDER'], path)
# noinspection PyUnusedLocal
@app.errorhandler(413)
def request_entity_too_large(error):
return render_template("errors/413.html", error=error), 413
@app.errorhandler(404)
def page_not_found(error):
path = request.path
# go through each blueprint to find the prefix that matches the path
# can't use request.blueprint since the routing didn't match anything
for bp_name, bp in app.blueprints.items():
if path.startswith(bp.url_prefix):
# get the 404 handler registered by the blueprint
handler = app.error_handler_spec.get(bp_name, {}).get(404)
if handler is not None:
# if a handler was found, return it's response
return handler(error)
# return a default response
return render_template("errors/404.html", error=error), 404
@app.errorhandler(500)
def internal_server_error(error):
return render_template("errors/500.html", error=error), 500
if os.environ.get("WERKZEUG_RUN_MAIN") == "true":
logging.info(app.url_map)
|
{
"content_hash": "8876a9353f8eb5a9dcd0d59c5ef125f7",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 73,
"avg_line_length": 32.13953488372093,
"alnum_prop": 0.7011577424023154,
"repo_name": "uaprom-summer-2015/Meowth",
"id": "f6a0bb17455d4d4383b4ab3a1b2c73237aacc4bb",
"size": "1382",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "1065"
},
{
"name": "HTML",
"bytes": "29007"
},
{
"name": "JavaScript",
"bytes": "39136"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "131524"
},
{
"name": "Shell",
"bytes": "115"
},
{
"name": "Stylus",
"bytes": "15356"
}
],
"symlink_target": ""
}
|
import io
import json
import os
import urllib.request as url_lib
import zipfile
from packaging.version import parse as version_parser
EXTENSION_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DEBUGGER_DEST = os.path.join(EXTENSION_ROOT, "pythonFiles", "lib", "python")
DEBUGGER_PACKAGE = "debugpy"
DEBUGGER_PYTHON_ABI_VERSIONS = ("cp39",)
DEBUGGER_VERSION = "1.6.2" # can also be "latest"
def _contains(s, parts=()):
return any(p for p in parts if p in s)
def _get_package_data():
json_uri = "https://pypi.org/pypi/{0}/json".format(DEBUGGER_PACKAGE)
# Response format: https://warehouse.readthedocs.io/api-reference/json/#project
# Release metadata format: https://github.com/pypa/interoperability-peps/blob/master/pep-0426-core-metadata.rst
with url_lib.urlopen(json_uri) as response:
return json.loads(response.read())
def _get_debugger_wheel_urls(data, version):
return list(
r["url"]
for r in data["releases"][version]
if _contains(r["url"], DEBUGGER_PYTHON_ABI_VERSIONS)
)
def _download_and_extract(root, url, version):
root = os.getcwd() if root is None or root == "." else root
print(url)
with url_lib.urlopen(url) as response:
data = response.read()
with zipfile.ZipFile(io.BytesIO(data), "r") as wheel:
for zip_info in wheel.infolist():
# Ignore dist info since we are merging multiple wheels
if ".dist-info/" in zip_info.filename:
continue
print("\t" + zip_info.filename)
wheel.extract(zip_info.filename, root)
def main(root):
data = _get_package_data()
if DEBUGGER_VERSION == "latest":
use_version = max(data["releases"].keys(), key=version_parser)
else:
use_version = DEBUGGER_VERSION
for url in _get_debugger_wheel_urls(data, use_version):
_download_and_extract(root, url, use_version)
if __name__ == "__main__":
main(DEBUGGER_DEST)
|
{
"content_hash": "ffb544df54c88b15ec58c42ac576b365",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 115,
"avg_line_length": 31.904761904761905,
"alnum_prop": 0.645273631840796,
"repo_name": "DonJayamanne/pythonVSCode",
"id": "593580d6a211a317a8f672c7c2c1b8a7d0108788",
"size": "2105",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "pythonFiles/install_debugpy.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "569"
},
{
"name": "JavaScript",
"bytes": "28707"
},
{
"name": "Jupyter Notebook",
"bytes": "10520"
},
{
"name": "Python",
"bytes": "2602995"
},
{
"name": "Roff",
"bytes": "108"
},
{
"name": "Shell",
"bytes": "76"
},
{
"name": "TypeScript",
"bytes": "5178987"
}
],
"symlink_target": ""
}
|
import Queue
import threading
import time
import json
import commands
import sys, os
sys.path.insert(1, os.path.join(sys.path[0], '..'))
from nbNet.nbNet import *
ff_conf = []
num = 0
def cmdRunner(input):
return str(num += 1)
class execThread(threading.Thread):
def __init__(self, host, port):
threading.Thread.__init__(self)
self.host = host
self.port = port
def run(self):
server = nbNet(self.host, self.port, cmdRunner)
server.run()
def startTh():
execTh = execThread("0.0.0.0", 50002)
execTh.start()
print "start"
execTh.join()
if __name__ == "__main__":
startTh()
|
{
"content_hash": "67a15880d6a6127cea10a5f5d1456216",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 55,
"avg_line_length": 19.78787878787879,
"alnum_prop": 0.6079632465543645,
"repo_name": "selboo/simpleMon",
"id": "a0c42ea4c5ed29ff0b38fdfc81604d3076ce9a92",
"size": "671",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "counter/framework.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "104"
},
{
"name": "Groff",
"bytes": "19018"
},
{
"name": "Groovy",
"bytes": "1229"
},
{
"name": "HTML",
"bytes": "36119"
},
{
"name": "JavaScript",
"bytes": "718019"
},
{
"name": "Python",
"bytes": "58071"
}
],
"symlink_target": ""
}
|
import os
import re
import six
import jsonschema
from st2common import log as logging
from st2common.constants.meta import ALLOWED_EXTS
from st2common.bootstrap.base import ResourceRegistrar
from st2common.persistence.action import Action
from st2common.models.api.action import ActionAPI
from st2common.models.system.common import ResourceReference
import st2common.content.utils as content_utils
import st2common.util.action_db as action_utils
import st2common.validators.api.action as action_validator
__all__ = [
'ActionsRegistrar',
'register_actions'
]
LOG = logging.getLogger(__name__)
class ActionsRegistrar(ResourceRegistrar):
ALLOWED_EXTENSIONS = ALLOWED_EXTS
def register_actions_from_packs(self, base_dirs):
"""
Discover all the packs in the provided directory and register actions from all of the
discovered packs.
:return: Number of actions registered.
:rtype: ``int``
"""
# Register packs first
self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs,
content_type='actions')
for pack, actions_dir in six.iteritems(content):
if not actions_dir:
LOG.debug('Pack %s does not contain actions.', pack)
continue
try:
LOG.debug('Registering actions from pack %s:, dir: %s', pack, actions_dir)
actions = self._get_actions_from_pack(actions_dir)
count = self._register_actions_from_pack(pack=pack, actions=actions)
registered_count += count
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all actions from pack: %s', actions_dir)
return registered_count
def register_actions_from_pack(self, pack_dir):
"""
Register all the actions from the provided pack.
:return: Number of actions registered.
:rtype: ``int``
"""
pack_dir = pack_dir[:-1] if pack_dir.endswith('/') else pack_dir
_, pack = os.path.split(pack_dir)
actions_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir,
content_type='actions')
# Register pack first
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if not actions_dir:
return registered_count
LOG.debug('Registering actions from pack %s:, dir: %s', pack, actions_dir)
try:
actions = self._get_actions_from_pack(actions_dir=actions_dir)
registered_count = self._register_actions_from_pack(pack=pack, actions=actions)
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all actions from pack: %s', actions_dir)
return registered_count
def _get_actions_from_pack(self, actions_dir):
actions = self.get_resources_from_pack(resources_dir=actions_dir)
# Exclude global actions configuration file
config_files = ['actions/config' + ext for ext in self.ALLOWED_EXTENSIONS]
for config_file in config_files:
actions = [file_path for file_path in actions if config_file not in file_path]
return actions
def _register_action(self, pack, action):
content = self._meta_loader.load(action)
pack_field = content.get('pack', None)
if not pack_field:
content['pack'] = pack
pack_field = pack
if pack_field != pack:
raise Exception('Model is in pack "%s" but field "pack" is different: %s' %
(pack, pack_field))
action_api = ActionAPI(**content)
try:
action_api.validate()
except jsonschema.ValidationError as e:
# We throw a more user-friendly exception on invalid parameter name
msg = str(e)
is_invalid_parameter_name = 'Additional properties are not allowed' in msg
is_invalid_parameter_name &= 'in schema[\'properties\'][\'parameters\']' in msg
if is_invalid_parameter_name:
parameter_name = re.search('\'(.+?)\' was unexpected', msg).groups()[0]
new_msg = ('Parameter name "%s" is invalid. Valid characters for parameter name '
'are [a-zA-Z0-0_].' % (parameter_name))
new_msg += '\n\n' + msg
raise jsonschema.ValidationError(new_msg)
raise e
action_validator.validate_action(action_api)
model = ActionAPI.to_model(action_api)
action_ref = ResourceReference.to_string_reference(pack=pack, name=str(content['name']))
existing = action_utils.get_action_by_ref(action_ref)
if not existing:
LOG.debug('Action %s not found. Creating new one with: %s', action_ref, content)
else:
LOG.debug('Action %s found. Will be updated from: %s to: %s',
action_ref, existing, model)
model.id = existing.id
try:
model = Action.add_or_update(model)
extra = {'action_db': model}
LOG.audit('Action updated. Action %s from %s.', model, action, extra=extra)
except Exception:
LOG.exception('Failed to write action to db %s.', model.name)
raise
def _register_actions_from_pack(self, pack, actions):
registered_count = 0
for action in actions:
try:
LOG.debug('Loading action from %s.', action)
self._register_action(pack, action)
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Unable to register action: %s', action)
continue
else:
registered_count += 1
return registered_count
def register_actions(packs_base_paths=None, pack_dir=None, use_pack_cache=True,
fail_on_failure=False):
if packs_base_paths:
assert isinstance(packs_base_paths, list)
if not packs_base_paths:
packs_base_paths = content_utils.get_packs_base_paths()
registrar = ActionsRegistrar(use_pack_cache=use_pack_cache,
fail_on_failure=fail_on_failure)
if pack_dir:
result = registrar.register_actions_from_pack(pack_dir=pack_dir)
else:
result = registrar.register_actions_from_packs(base_dirs=packs_base_paths)
return result
|
{
"content_hash": "945ffa1e9cf45028b58cc93ad6108da3",
"timestamp": "",
"source": "github",
"line_count": 187,
"max_line_length": 97,
"avg_line_length": 36.20855614973262,
"alnum_prop": 0.5950376606114312,
"repo_name": "emedvedev/st2",
"id": "3ea2cc9ca702d8a1a2aa61ebdc0e39bb1f08e3d6",
"size": "7551",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "st2common/st2common/bootstrap/actionsregistrar.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "198"
},
{
"name": "Makefile",
"bytes": "41694"
},
{
"name": "PowerShell",
"bytes": "299"
},
{
"name": "Python",
"bytes": "3717722"
},
{
"name": "Shell",
"bytes": "38637"
},
{
"name": "Slash",
"bytes": "677"
}
],
"symlink_target": ""
}
|
"""Identity v3 Domain action implementations"""
import six
import sys
from keystoneauth1 import exceptions as ks_exc
from eclcli.common import command
from eclcli.common import utils
from eclcli.i18n import _ # noqa
class CreateDomain(command.ShowOne):
"""Create new domain"""
def get_parser(self, prog_name):
parser = super(CreateDomain, self).get_parser(prog_name)
parser.add_argument(
'name',
metavar='<domain-name>',
help='New domain name',
)
parser.add_argument(
'--description',
metavar='<description>',
help='New domain description',
)
enable_group = parser.add_mutually_exclusive_group()
enable_group.add_argument(
'--enable',
action='store_true',
help='Enable domain (default)',
)
enable_group.add_argument(
'--disable',
action='store_true',
help='Disable domain',
)
parser.add_argument(
'--or-show',
action='store_true',
help=_('Return existing domain'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
enabled = True
if parsed_args.disable:
enabled = False
try:
domain = identity_client.domains.create(
name=parsed_args.name,
description=parsed_args.description,
enabled=enabled,
)
except ks_exc.Conflict as e:
if parsed_args.or_show:
domain = utils.find_resource(identity_client.domains,
parsed_args.name)
self.log.info('Returning existing domain %s', domain.name)
else:
raise e
domain._info.pop('links')
return zip(*sorted(six.iteritems(domain._info)))
class DeleteDomain(command.Command):
"""Delete domain"""
def get_parser(self, prog_name):
parser = super(DeleteDomain, self).get_parser(prog_name)
parser.add_argument(
'domain',
metavar='<domain>',
help='Domain to delete (name or ID)',
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
domain = utils.find_resource(identity_client.domains,
parsed_args.domain)
identity_client.domains.delete(domain.id)
class ListDomain(command.Lister):
"""List domains"""
def take_action(self, parsed_args):
columns = ('ID', 'Name', 'Enabled', 'Description')
data = self.app.client_manager.identity.domains.list()
return (columns,
(utils.get_item_properties(
s, columns,
formatters={},
) for s in data))
class SetDomain(command.Command):
"""Set domain properties"""
def get_parser(self, prog_name):
parser = super(SetDomain, self).get_parser(prog_name)
parser.add_argument(
'domain',
metavar='<domain>',
help='Domain to modify (name or ID)',
)
parser.add_argument(
'--name',
metavar='<name>',
help='New domain name',
)
parser.add_argument(
'--description',
metavar='<description>',
help='New domain description',
)
enable_group = parser.add_mutually_exclusive_group()
enable_group.add_argument(
'--enable',
action='store_true',
help='Enable domain',
)
enable_group.add_argument(
'--disable',
action='store_true',
help='Disable domain',
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
domain = utils.find_resource(identity_client.domains,
parsed_args.domain)
kwargs = {}
if parsed_args.name:
kwargs['name'] = parsed_args.name
if parsed_args.description:
kwargs['description'] = parsed_args.description
if parsed_args.enable:
kwargs['enabled'] = True
if parsed_args.disable:
kwargs['enabled'] = False
if not kwargs:
sys.stdout.write("Domain not updated, no arguments present")
return
identity_client.domains.update(domain.id, **kwargs)
class ShowDomain(command.ShowOne):
"""Display domain details"""
def get_parser(self, prog_name):
parser = super(ShowDomain, self).get_parser(prog_name)
parser.add_argument(
'domain',
metavar='<domain>',
help='Domain to display (name or ID)',
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
domain = utils.find_resource(identity_client.domains,
parsed_args.domain)
domain._info.pop('links')
return zip(*sorted(six.iteritems(domain._info)))
|
{
"content_hash": "ecffc75ad0b6abe785e1da6cfb1943a3",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 74,
"avg_line_length": 30.245714285714286,
"alnum_prop": 0.5446816550160589,
"repo_name": "nttcom/eclcli",
"id": "a6976a7467889c1d59a64c5f621f6dae0463d15a",
"size": "5906",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "eclcli/identity/v3/domain.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2087533"
}
],
"symlink_target": ""
}
|
import vim
import re
from glob import glob
from os import walk
from os.path import join, getmtime, isfile, isdir, exists
from subprocess import Popen, PIPE
from vim_pad.utils import get_save_dir
from vim_pad.pad import PadInfo
from vim_pad.timestamps import natural_timestamp
# globals (caches) {{{1
cached_data = []
cached_timestamps = []
cached_filenames = []
def open_pad(path=None, first_line="", query=''): # {{{1
"""Creates or opens a note.
path: a valid path for a note.
first_line: a string to insert to a new note, if given.
query: the previous search, if any.
"""
# we require self.save_dir_set to be set to a valid path
if get_save_dir() == "":
vim.command('echom "vim-pad: IMPORTANT: please set g:pad#dir to a valid path in your vimrc."')
return
# if no path is provided, we create one using the current time
if not path:
path = join(get_save_dir(),
PadInfo([first_line]).id + vim.eval("g:pad#default_file_extension"))
path = path.replace(" ", "\ ")
def split_for_pad():
if vim.eval('g:pad#position["pads"]') == 'right':
vim.command("silent! rightbelow"
+ str(vim.eval("g:pad#window_width")) + "vsplit " + path)
else:
vim.command("silent! botright"
+ str(vim.eval("g:pad#window_height")) + "split " + path)
if bool(int(vim.eval("g:pad#open_in_split"))):
split_for_pad()
else:
awa = int(vim.eval("&autowriteall"))
if bool(int(vim.eval("&modified"))):
reply = vim.eval('input("vim-pad: the current file has unsaved changes. do you want to save? [Yn] ", "y")')
if reply == "y":
vim.command("set autowriteall")
vim.command("silent! edit " + path)
if awa == 0:
vim.command("set noautowriteall")
else:
vim.command('echom "vim-pad: will have to open pad in a split"')
split_for_pad()
vim.command("redraw!")
else:
vim.command("silent! edit " + path)
# we don't keep the buffer when we hide it
vim.command("set bufhidden=wipe")
# set the filetype to our default
if vim.eval('&filetype') in ('', 'conf'):
vim.command("set filetype=" + vim.eval("g:pad#default_format"))
# map the local commands
if bool(int(vim.eval('has("gui_running")'))):
vim.command("noremap <silent> <buffer> <localleader><delete> :call pad#DeleteThis()<cr>")
else:
vim.command("noremap <silent> <buffer> <localleader>dd :call pad#DeleteThis()<cr>")
vim.command("noremap <silent> <buffer> <localleader>+m :call pad#AddModeline()<cr>")
vim.command("noremap <silent> <buffer> <localleader>+f :call pad#MoveToFolder()<cr>")
vim.command("noremap <silent> <buffer> <localleader>-f :call pad#MoveToSaveDir()<cr>")
vim.command("noremap <silent> <buffer> <localleader>+a :call pad#Archive()<cr>")
vim.command("noremap <silent> <buffer> <localleader>-a :call pad#Unarchive()<cr>")
# insert the text in first_line to the buffer, if provided
if first_line:
vim.current.buffer.append(first_line, 0)
vim.command("normal! j")
# highlight query and jump to it?
if query != '':
if vim.eval('g:pad#highlight_query') == '1':
vim.command("call matchadd('PadQuery', '\c"+query+"')")
if vim.eval('g:pad#jumpto_query') == '1':
vim.command("call search('\c"+query+"')")
def new_pad(text=None): #{{{1
path = join(get_save_dir(), PadInfo([text]).id + vim.eval("g:pad#default_file_extension"))
with open(path, 'w') as new_note:
new_note.write(text)
def listdir_recursive_nohidden(path, archive): # {{{1
matches = []
for root, dirnames, filenames in walk(path, topdown=True):
for dirname in dirnames:
if dirname.startswith('.'):
dirnames.remove(dirname)
if archive != "!":
if dirname == "archive":
dirnames.remove(dirname)
matches += [join(root, f) for f in filenames if not f.startswith('.')]
return matches
def listdir_external(path, archive, query): # {{{1
search_backend = vim.eval("g:pad#search_backend")
if search_backend == "grep":
# we use Perl mode for grep (-P), because it is really fast
command = ["grep", "-P", "-n", "-r", "-l", query, path + "/"]
if archive != "!":
command.append("--exclude-dir=archive")
command.append('--exclude=.*')
command.append("--exclude-dir=.git")
elif search_backend == "ack":
if vim.eval("executable('ack')") == "1":
ack_path = "ack"
else:
ack_path = "/usr/bin/vendor_perl/ack"
command = [ack_path, query, path, "--noheading", "-l"]
if archive != "!":
command.append("--ignore-dir=archive")
command.append('--ignore-file=match:/\./')
elif search_backend == "ag":
if vim.eval("executable('ag')") == "1":
command = ["ag", query, path, "--noheading", "-l"]
if archive != "!":
command.append("--ignore-dir=archive")
elif search_backend == "pt":
if vim.eval("executable('pt')") == "1":
command = ["pt", "-l", "--nogroup"]
if archive != "!":
command.append("--ignore=archive")
command.append(query)
command.append(path)
if bool(int(vim.eval("g:pad#search_ignorecase"))):
command.append("-i")
command.append("--max-count=1")
cmd_output = Popen(command, stdout=PIPE, stderr=PIPE).communicate()[0].split("\n")
return list(filter(lambda i: i != "", cmd_output))
def get_filelist(query=None, archive=None): # {{{1
""" __get_filelist(query) -> list_of_notes
Returns a list of notes. If no query is provided, all the valid filenames
in self.save_dir are returned in a list, otherwise, return the results of
grep or ack search for query in self.save_dir.
"""
local_path = vim.eval("getcwd(). '/'. g:pad#local_dir")
use_local_dir = vim.eval('g:pad#local_dir') != '' and local_path != get_save_dir()
if not query or query == "":
files = listdir_recursive_nohidden(get_save_dir(), archive)
if use_local_dir:
files.extend(listdir_recursive_nohidden(local_path, archive))
else:
files = listdir_external(get_save_dir(), archive, query)
if bool(int(vim.eval("g:pad#query_filenames"))):
matches = filter(lambda i: not isdir(i) and i not in files, glob(join(get_save_dir(), "*"+query+"*")))
files.extend(matches)
if bool(int(vim.eval("g:pad#query_dirnames"))):
matching_dirs = filter(isdir, glob(join(get_save_dir(), "*"+ query+"*")))
for mdir in matching_dirs:
files.extend(filter(lambda x: x not in files, listdir_recursive_nohidden(mdir, archive)))
if use_local_dir:
files.extend(listdir_external(local_path, archive, query))
return files
def fill_list(files, queried=False, custom_order=False): # {{{1
""" Writes the list of notes to the __pad__ buffer.
files: a list of files to process.
queried: whether files is the result of a query or not.
custom_order: whether we should keep the order of the list given (implies queried=True).
Keeps a cache so we only read the notes when the files have been modified.
"""
global cached_filenames, cached_timestamps, cached_data
# we won't want to touch the cache
if custom_order:
queried = True
files = filter(exists, [join(get_save_dir(), f) for f in files])
timestamps = [getmtime(join(get_save_dir(), f)) for f in files]
# we will have a new list only on the following cases
if queried or files != cached_filenames or timestamps != cached_timestamps:
lines = []
if not custom_order:
files = reversed(sorted(files, key=lambda i: getmtime(join(get_save_dir(), i))))
for pad in files:
pad_path = join(get_save_dir(), pad)
if isfile(pad_path):
pad_path = join(get_save_dir(), pad)
with open(pad_path) as pad_file:
info = PadInfo(pad_file)
if info.isEmpty:
if bool(int(vim.eval("g:pad#show_dir"))):
tail = info.folder + u'\u2e25 '.encode('utf-8') + "[EMPTY]"
else:
tail = "[EMPTY]"
else:
if bool(int(vim.eval("g:pad#show_dir"))):
tail = info.folder + u'\u2e25 '.encode('utf-8') + u'\u21b2'.encode('utf-8').join((info.summary, info.body))
else:
tail = u'\u21b2'.encode('utf-8').join((info.summary, info.body))
lines.append(pad + " @ " + tail)
else:
pass
# we only update the cache if we are not queried, to preserve the global cache
if not queried:
cached_data = lines
cached_timestamps = timestamps
cached_filenames = files
# update natural timestamps
def add_natural_timestamp(matchobj):
id_string = matchobj.group("id")
mtime = str(int(getmtime(join(get_save_dir(), matchobj.group("id")))*1000000))
return id_string + " @ " + natural_timestamp(mtime).ljust(19) + " │"
if not queried: # we use the cache
lines = [re.sub("(?P<id>^.*?) @", add_natural_timestamp, line) for line in cached_data]
else: # we use the new values in lines
lines = [re.sub("(?P<id>^.*?) @", add_natural_timestamp, line) for line in lines]
# we now show the list
if vim.eval('&modifiable') != '1':
vim.current.buffer.options['modifiable'] = True
del vim.current.buffer[:] # clear the buffer
vim.current.buffer.append(list(lines))
vim.command("normal! dd")
def display(query, archive): # {{{1
""" Shows a list of notes.
query: a string representing a regex search. Can be "".
Builds a list of files for query and then processes it to show the list in the pad format.
"""
if get_save_dir() == "":
vim.command('let tmp = confirm("IMPORTANT:\n'\
'Please set g:pad#dir to a valid path in your vimrc.", "OK", 1, "Error")')
return
pad_files = get_filelist(query, archive)
if len(pad_files) > 0:
if vim.eval("bufexists('__pad__')") == "1":
vim.command("bw __pad__")
if vim.eval('g:pad#position["list"]') == "right":
vim.command("silent! rightbelow " + str(vim.eval('g:pad#window_width')) + "vnew __pad__")
else:
vim.command("silent! botright " + str(vim.eval("g:pad#window_height")) + "new __pad__")
fill_list(pad_files, query != "")
if query != "":
vim.command("let b:pad_query = '" + query + "'")
vim.command("set filetype=pad")
vim.command("setlocal nomodifiable")
vim.command("setlocal statusline=%#PreCondit#\ vim-pad%=%#Comment#" + \
"%#Special#q%#Comment#:close\ %#Special#dd%#Comment#:delete\ " + \
"%#Special#[-+]a%#Comment#:[un]archive\ %#Special#[-+]f%#Comment#:move\ [from\|to]\ " + \
"%#Special#<s-f>%#Comment#:search\ %#Special#<s-s>%#Comment#:sort\ ")
else:
print "vim-pad: no pads"
def search_pads(): # {{{1
""" Aks for a query and lists the matching notes.
"""
if get_save_dir() == "":
vim.command('let tmp = confirm("IMPORTANT:\n'\
'Please set g:pad#dir to a valid path in your vimrc.", "OK", 1, "Error")')
return
query = vim.eval('input(">>> ")')
display(query, "")
vim.command("redraw!")
def global_incremental_search(should_open=True): # {{{1
""" Provides incremental search in normal mode without opening the list.
"""
query = ""
should_create_on_enter = False
vim.command("echohl None")
vim.command('echo ">> "')
while True:
raw_char = vim.eval("getchar()")
if raw_char in ("13", "27"):
if raw_char == "13":
if should_create_on_enter:
if should_open == True:
open_pad(first_line=query)
else:
print "new_pad"
new_pad(text=query)
vim.command("echohl None")
else:
display(query, True)
vim.command("redraw!")
break
else:
try: # if we can convert to an int, we have a regular key
int(raw_char) # we bring up an error on nr2char
last_char = vim.eval("nr2char(" + raw_char + ")")
query = query + last_char
except: # if we don't, we have some special key
keycode = unicode(raw_char, errors="ignore")
if keycode == "kb": # backspace
query = query[:-len(last_char)]
pad_files = get_filelist(query)
if pad_files != []:
info = ""
vim.command("echohl None")
should_create_on_enter = False
else: # we will create a new pad
info = "[NEW] "
vim.command("echohl WarningMsg")
should_create_on_enter = True
vim.command("redraw")
vim.command('echo ">> ' + info + query + '"')
|
{
"content_hash": "8aa53e34c81b2d39cb20c8e0e5baafd7",
"timestamp": "",
"source": "github",
"line_count": 338,
"max_line_length": 135,
"avg_line_length": 40.31065088757396,
"alnum_prop": 0.5546422018348623,
"repo_name": "vim-scripts/vim-pad",
"id": "ca92abf5b2145ff8df2f09f958ba5684d06f320a",
"size": "13688",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pythonx/vim_pad/handler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "28267"
},
{
"name": "VimL",
"bytes": "11617"
}
],
"symlink_target": ""
}
|
"""Contains the sound config player class"""
from collections import namedtuple
from copy import deepcopy
from typing import Dict, List
from mpfmc.core.mc_config_player import McConfigPlayer
SoundBlock = namedtuple("SoundBlock", ["priority", "context"])
class McSoundPlayer(McConfigPlayer):
"""Base class for the Sound Player that runs on the mpf-mc side of things.
It receives all of its instructions via BCP from a MpfSoundPlayer instance
running as part of MPF.
The sound_player: section of a config file (either the machine-wide or
a mode-specific config) looks like this:
sound_player:
<event_name>:
<sound_name>:
<sound_settings>: ...
The express config just puts a sound_name next to an event.
sound_player:
some_event: sound_name_to_play
If you want to control other settings (such as track, priority, volume,
loops, etc.), enter the sound name on the next line and the settings
indented under it, like this:
sound_player:
some_event:
sound_name_to_play:
volume: 0.35
max_queue_time: 1 sec
Here are several various examples:
sound_player:
some_event:
sound1:
volume: 0.65
some_event2:
sound2:
volume: -4.5 db
priority: 100
max_queue_time: 500 ms
some_event3: sound3
"""
config_file_section = 'sound_player'
show_section = 'sounds'
machine_collection_name = 'sounds'
def __init__(self, machine) -> None:
"""Initialise variable player."""
super().__init__(machine)
self.blocks = {} # type: Dict[str, List[SoundBlock]]
# pylint: disable=invalid-name,too-many-branches
def play(self, settings, context, calling_context, priority=0, **kwargs): # noqa: MC0001
"""Plays a validated sounds: section from a sound_player: section of a
config file or the sounds: section of a show.
The config must be validated. Validated config looks like this:
<sound_name>:
<settings>: ...
<settings> can be:
action:
priority:
volume:
loops:
max_queue_time:
block:
Notes:
Ducking settings and markers cannot currently be specified/overridden in the
sound_player (they must be specified in the sounds section of a config file).
"""
settings = deepcopy(settings)
if 'sounds' in settings:
settings = settings['sounds']
for sound_name, s in settings.items():
if self.check_delayed_play(sound_name, s, context, calling_context, priority, **kwargs):
return
# adjust priority
try:
s['priority'] += priority
except (KeyError, TypeError):
s['priority'] = priority
# Retrieve sound asset by name
try:
sound = self.machine.sounds[sound_name]
except KeyError:
self.machine.log.error("SoundPlayer: The specified sound "
"does not exist ('{}').".format(sound_name))
return
s.update(kwargs)
action = s['action'].lower()
del s['action']
# assign output track
track = self.machine.sound_system.audio_interface.get_track_by_name(s.get('track') or sound.track)
if track is None:
self.machine.log.error("SoundPlayer: The specified track ('{}') "
"does not exist. Unable to perform '{}' action "
"on sound '{}'."
.format(s['track'], action, sound_name))
return
# a block will block any other lower priority sound from being triggered by the same event
# the calling_context contains the name of the triggering event
block_item = str(calling_context)
if self._is_blocked(block_item, context, priority):
continue
if s['block']:
if block_item not in self.blocks:
self.blocks[block_item] = []
if SoundBlock(priority, context) not in self.blocks[block_item]:
self.blocks[block_item].append(SoundBlock(priority, context))
# Determine action to perform
if action == 'play':
track.play_sound(sound, context, s)
elif action == 'stop':
if 'fade_out' in s:
track.stop_sound(sound, s['fade_out'])
else:
track.stop_sound(sound)
elif action == 'stop_looping':
track.stop_sound_looping(sound)
elif action == 'load':
sound.load()
elif action == 'unload':
sound.unload()
else:
self.machine.log.error("SoundPlayer: The specified action "
"is not valid ('{}').".format(action))
def _is_blocked(self, block_item: str, context: str, priority: int) -> bool:
"""Determine if event should be blocked."""
if block_item not in self.blocks or not self.blocks[block_item]:
return False
priority_sorted = sorted(self.blocks[block_item], reverse=True)
first_element = priority_sorted[0]
return first_element.priority > priority and first_element.context != context
def get_express_config(self, value):
"""Express config for sounds is simply a string (sound name) with an optional block."""
if not isinstance(value, str):
block = False
else:
try:
value, block_str = value.split('|')
except ValueError:
block = False
else:
if block_str != "block":
raise ValueError("Invalid action in sound_player entry: {}".format(value), 6)
block = True
return {value: {"block": block}}
# pylint: disable=too-many-branches
def validate_config(self, config):
"""Validates the sound_player: section of a config file (either a
machine-wide config or a mode config).
Args:
config: A dict of the contents of the sound_player section
from the config file. It's assumed that keys are event names, and
values are settings for what the sound_player should do when that
event is posted.
Returns: A dict a validated entries.
This method overrides the base method since the sound_player has
unique options.
"""
# first, we're looking to see if we have a string, a list, or a dict.
# if it's a dict, we look to see whether we have the name of some sound
validated_config = dict()
for event, settings in config.items():
validated_config[event] = dict()
validated_config[event]['sounds'] = dict()
if isinstance(settings, str):
settings = self.get_express_config(settings)
if not isinstance(settings, dict):
settings = {settings: dict()}
if 'track' in settings:
track = settings['track']
if self.machine.sound_system.audio_interface.get_track_type(track) != "standard":
raise ValueError("SoundPlayer: An invalid audio track '{}' is specified for event '{}' "
"(only standard audio tracks are supported).".format(track, event))
for sound, sound_settings in settings.items():
# Now check to see if all the settings are valid
# sound settings. If not, assume it's a single sound settings.
if isinstance(sound_settings, dict):
for key in sound_settings:
if key not in self.machine.config_validator.get_config_spec()['sound_player']:
break
validated_config[event]['sounds'].update(
self._validate_config_item(sound, sound_settings))
return validated_config
def clear_context(self, context):
"""Stop all sounds from this context."""
self.machine.log.debug("SoundPlayer: Clearing context - applying mode_end_action for all active sounds")
for index in range(self.machine.sound_system.audio_interface.get_track_count()):
track = self.machine.sound_system.audio_interface.get_track(index)
if track.type == "standard":
track.clear_context(context)
# clear blocks
for item in self.blocks:
for entry, s in enumerate(self.blocks[item]):
if s.context == context:
del self.blocks[item][entry]
McPlayerCls = McSoundPlayer
|
{
"content_hash": "f9f658c37aae2249f71315313db90d8f",
"timestamp": "",
"source": "github",
"line_count": 252,
"max_line_length": 112,
"avg_line_length": 36.051587301587304,
"alnum_prop": 0.5608145294441387,
"repo_name": "missionpinball/mpf-mc",
"id": "26685d96db6ae3123a380871934a6978eddd0572",
"size": "9085",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "mpfmc/config_players/sound_player.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3434"
},
{
"name": "Cython",
"bytes": "44814"
},
{
"name": "Dockerfile",
"bytes": "1441"
},
{
"name": "Makefile",
"bytes": "262"
},
{
"name": "Python",
"bytes": "1198826"
},
{
"name": "Shell",
"bytes": "829"
}
],
"symlink_target": ""
}
|
import kivy
kivy.require('1.7.0')
from kivy.core.window import Window
from kivy.app import App
import codecs
from kivy.uix.floatlayout import FloatLayout
from kivy.factory import Factory
from kivy.uix.scrollview import ScrollView
from kivy.uix.label import Label
from kivy.properties import ObjectProperty, StringProperty
from kivy.uix.textinput import TextInput
from kivy.uix.gridlayout import GridLayout
from kivy.uix.popup import Popup
from kivy.config import Config
import sys; print sys.getdefaultencoding()
import os
class MainWidget(FloatLayout):
manager = ObjectProperty(None)
class OpenDialog(FloatLayout):
load = ObjectProperty(None)
cancel = ObjectProperty(None)
class SaveDialog(FloatLayout):
save = ObjectProperty(None)
text_input = ObjectProperty(None)
cancel = ObjectProperty(None)
class Root(FloatLayout):
text_input = ObjectProperty(None)
savefile = ObjectProperty(None)
loadfile = ObjectProperty(None)
label_wid = ObjectProperty()
label_modified = ObjectProperty()
info = StringProperty()
def set_file(self, text):
self.label_wid.text = text
def set_modified(self, text):
self.label_modified.text = text
def exit(self):
self._popup.dismiss()
def save_dialog(self):
content = SaveDialog(save=self.save, cancel=self.exit)
self._popup = Popup(title="Salvar...", content=content, )
self._popup.open()
def open_dialog(self):
content = OpenDialog(load=self.load, cancel=self.exit)
self._popup = Popup(title="Selecione o arquivo", content=content, size_hint=(0.9, 0.9))
self._popup.open()
def load(self, path, filename):
with codecs.open(os.path.join(path, filename[0] or ''), 'r','utf-8') as stream:
self.text_input.text = stream.read()
print(filename)
self.set_file(filename[0].encode("ascii"))
self.set_modified('Modified: NO')
self.exit()
def save(self, path, fname):
with codecs.open(os.path.join(path, fname or ''), 'w', 'utf8') as stream:
stream.write(self.text_input.text)
self.set_file(fname)
self.set_modified('Modified: NO')
self.exit()
class myapp(App):
def build(self):
self.title = 'pyHGEditor'
pass
Factory.register('Root', cls=Root)
Factory.register('OpenDialog', cls=OpenDialog)
Factory.register('SaveDialog', cls=SaveDialog)
if __name__ == '__main__':
myapp().run()
|
{
"content_hash": "58b2480b08c3f81953dee70b675b25fe",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 95,
"avg_line_length": 27.97752808988764,
"alnum_prop": 0.6730923694779116,
"repo_name": "guihknx/pyHGEditor",
"id": "6513cbadd78c771d65613e8cf534ce0cdf75a3ec",
"size": "2512",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2512"
}
],
"symlink_target": ""
}
|
import shortuuid
import six
import sys
from django.db.models import CharField
class ShortUUIDField(CharField):
"""
A field which stores a Short UUID value in base57 format. This may also have
the Boolean attribute 'auto' which will set the value on initial save to a
new UUID value (calculated using shortuuid's default (uuid4)). Note that while all
UUIDs are expected to be unique we enforce this with a DB constraint.
"""
def __init__(self, auto=True, *args, **kwargs):
self.auto = auto
# We store UUIDs in base57 format, which is fixed at 22 characters.
kwargs['max_length'] = 22
if auto:
# Do not let the user edit UUIDs if they are auto-assigned.
kwargs['editable'] = False
kwargs['blank'] = True
# kwargs['unique'] = True # if you want to be paranoid, set unique=True in your instantiation of the field.
super(ShortUUIDField, self).__init__(*args, **kwargs)
def pre_save(self, model_instance, add):
"""
This is used to ensure that we auto-set values if required.
See CharField.pre_save
"""
value = super(ShortUUIDField, self).pre_save(model_instance, add)
if self.auto and not value:
# Assign a new value for this attribute if required.
value = six.text_type(shortuuid.uuid())
setattr(model_instance, self.attname, value)
return value
def formfield(self, **kwargs):
if self.auto:
return None
return super(ShortUUIDField, self).formfield(**kwargs)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], [r"^shortuuidfield\.fields\.ShortUUIDField"])
except ImportError:
pass
|
{
"content_hash": "bfa52fad4ea3954ad38ce88268f062c3",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 119,
"avg_line_length": 37.041666666666664,
"alnum_prop": 0.6456692913385826,
"repo_name": "nebstrebor/django-shortuuidfield",
"id": "c10c7df2d1299eade1875a982a771ba75a359d77",
"size": "1778",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "shortuuidfield/fields.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "2755"
}
],
"symlink_target": ""
}
|
from zerver.lib.test_classes import WebhookTestCase
from zerver.webhooks.gosquared.view import CHAT_MESSAGE_TEMPLATE
class GoSquaredHookTests(WebhookTestCase):
STREAM_NAME = "gosquared"
URL_TEMPLATE = "/api/v1/external/gosquared?stream={stream}&api_key={api_key}"
WEBHOOK_DIR_NAME = "gosquared"
# Note: Include a test function per each distinct message condition your integration supports
def test_traffic_message(self) -> None:
expected_topic = "GoSquared - requestb.in"
expected_message = (
"[requestb.in](https://www.gosquared.com/now/GSN-595854-T) has 33 visitors online."
)
self.check_webhook(
"traffic_spike",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_chat_message(self) -> None:
expected_topic = "Live chat session - Zulip Chat"
expected_message = CHAT_MESSAGE_TEMPLATE.format(
status="visitor",
name="John Smith",
content="Zulip is awesome!",
)
self.check_webhook(
"chat_message",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
|
{
"content_hash": "06e123284beebf8dd82cb3658a459565",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 97,
"avg_line_length": 34.648648648648646,
"alnum_prop": 0.6201248049921997,
"repo_name": "hackerkid/zulip",
"id": "8a8bb851fa46c62a1b4d4ba0bf377dcf367e4ae4",
"size": "1282",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "zerver/webhooks/gosquared/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "397271"
},
{
"name": "Dockerfile",
"bytes": "2939"
},
{
"name": "Emacs Lisp",
"bytes": "158"
},
{
"name": "HTML",
"bytes": "717106"
},
{
"name": "JavaScript",
"bytes": "3079595"
},
{
"name": "Perl",
"bytes": "398763"
},
{
"name": "Puppet",
"bytes": "71210"
},
{
"name": "Python",
"bytes": "6876664"
},
{
"name": "Ruby",
"bytes": "6110"
},
{
"name": "Shell",
"bytes": "119833"
},
{
"name": "TypeScript",
"bytes": "14645"
}
],
"symlink_target": ""
}
|
import json
import posixpath
import string
import uuid
from copy import copy
from datetime import datetime
from django.apps import apps
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
import commonware.log
import jinja2
from tower import ugettext as _
import mkt
from lib.crypto import generate_key
from lib.pay_server import client
from mkt.access.models import Group
from mkt.constants.payments import ACCESS_SIMULATE
from mkt.constants.payments import PROVIDER_BANGO, PROVIDER_CHOICES
from mkt.ratings.models import Review
from mkt.site.models import ManagerBase, ModelBase
from mkt.tags.models import Tag
from mkt.users.models import UserForeignKey, UserProfile
from mkt.versions.models import Version
from mkt.webapps.models import Webapp
from mkt.websites.models import Website
log = commonware.log.getLogger('z.devhub')
class CantCancel(Exception):
pass
class SolitudeSeller(ModelBase):
# TODO: When Solitude allows for it, this should be updated to be 1:1 with
# users.
user = UserForeignKey()
uuid = models.CharField(max_length=255, unique=True)
resource_uri = models.CharField(max_length=255)
class Meta:
db_table = 'payments_seller'
@classmethod
def create(cls, user):
uuid_ = str(uuid.uuid4())
res = client.api.generic.seller.post(data={'uuid': uuid_})
uri = res['resource_uri']
obj = cls.objects.create(user=user, uuid=uuid_, resource_uri=uri)
log.info('[User:%s] Created Solitude seller (uuid:%s)' %
(user, uuid_))
return obj
class PaymentAccount(ModelBase):
user = UserForeignKey()
name = models.CharField(max_length=64)
agreed_tos = models.BooleanField(default=False)
solitude_seller = models.ForeignKey(SolitudeSeller)
# These two fields can go away when we're not 1:1 with SolitudeSellers.
seller_uri = models.CharField(max_length=255, unique=True)
uri = models.CharField(max_length=255, unique=True)
# A soft-delete so we can talk to Solitude asynchronously.
inactive = models.BooleanField(default=False)
# The id for this account from the provider.
account_id = models.CharField(max_length=255)
# Each account will be for a particular provider.
provider = models.IntegerField(choices=PROVIDER_CHOICES,
default=PROVIDER_BANGO)
shared = models.BooleanField(default=False)
class Meta:
db_table = 'payment_accounts'
unique_together = ('user', 'uri')
def cancel(self, disable_refs=False):
"""Cancels the payment account.
If `disable_refs` is set, existing apps that use this payment account
will be set to STATUS_NULL.
"""
account_refs = AddonPaymentAccount.objects.filter(account_uri=self.uri)
if self.shared and account_refs:
# With sharing a payment account comes great responsibility. It
# would be really mean to create a payment account, share it
# and have lots of apps use it. Then one day you remove it and
# make a whole pile of apps in the marketplace get removed from
# the store, or have in-app payments fail.
#
# For the moment I'm just stopping this completely, if this ever
# happens, we'll have to go through a deprecation phase.
# - let all the apps that use it know
# - when they have all stopped sharing it
# - re-run this
log.error('Cannot cancel a shared payment account that has '
'apps using it.')
raise CantCancel('You cannot cancel a shared payment account.')
self.update(inactive=True)
log.info('Soft-deleted payment account (uri: %s)' % self.uri)
for acc_ref in account_refs:
if (disable_refs and
not acc_ref.addon.has_multiple_payment_accounts()):
log.info('Changing app status to NULL for app: {0}'
'because of payment account deletion'.format(
acc_ref.addon_id))
acc_ref.addon.update(status=mkt.STATUS_NULL)
log.info('Deleting AddonPaymentAccount for app: {0} because of '
'payment account deletion'.format(acc_ref.addon_id))
acc_ref.delete()
def get_provider(self):
"""Returns an instance of the payment provider for this account."""
# TODO: fix circular import. Providers imports models which imports
# forms which imports models.
from mkt.developers.providers import get_provider
return get_provider(id=self.provider)
def __unicode__(self):
date = self.created.strftime('%m/%y')
if not self.shared:
return u'%s - %s' % (date, self.name)
# L10n: {0} is the name of the account.
return _(u'Donate to {0}'.format(self.name))
def get_agreement_url(self):
return reverse('mkt.developers.provider.agreement', args=[self.pk])
class AddonPaymentAccount(ModelBase):
addon = models.ForeignKey(
'webapps.Webapp', related_name='app_payment_accounts')
payment_account = models.ForeignKey(PaymentAccount)
account_uri = models.CharField(max_length=255)
product_uri = models.CharField(max_length=255, unique=True)
class Meta:
db_table = 'addon_payment_account'
@property
def user(self):
return self.payment_account.user
class UserInappKey(ModelBase):
solitude_seller = models.ForeignKey(SolitudeSeller)
seller_product_pk = models.IntegerField(unique=True)
def secret(self):
return self._product().get()['secret']
def public_id(self):
return self._product().get()['public_id']
def reset(self):
self._product().patch(data={'secret': generate_key(48)})
@classmethod
def create(cls, user, public_id=None, secret=None, access_type=None):
if public_id is None:
public_id = str(uuid.uuid4())
if secret is None:
secret = generate_key(48)
if access_type is None:
access_type = ACCESS_SIMULATE
sel = SolitudeSeller.create(user)
prod = client.api.generic.product.post(data={
'seller': sel.resource_uri, 'secret': secret,
'external_id': str(uuid.uuid4()), 'public_id': public_id,
'access': access_type,
})
log.info(u'User %s created an in-app payments dev key product=%s '
u'with %s' % (unicode(user), prod['resource_pk'], sel))
return cls.objects.create(solitude_seller=sel,
seller_product_pk=prod['resource_pk'])
def _product(self):
return client.api.generic.product(self.seller_product_pk)
class Meta:
db_table = 'user_inapp_keys'
class PreloadTestPlan(ModelBase):
addon = models.ForeignKey('webapps.Webapp')
last_submission = models.DateTimeField(auto_now_add=True)
filename = models.CharField(max_length=60)
status = models.PositiveSmallIntegerField(default=mkt.STATUS_PUBLIC)
class Meta:
db_table = 'preload_test_plans'
ordering = ['-last_submission']
@property
def preload_test_plan_url(self):
host = (settings.PRIVATE_MIRROR_URL if self.addon.is_disabled
else settings.LOCAL_MIRROR_URL)
return posixpath.join(host, str(self.addon.id), self.filename)
# When an app is deleted we need to remove the preload test plan.
def preload_cleanup(*args, **kwargs):
instance = kwargs.get('instance')
PreloadTestPlan.objects.filter(addon=instance).delete()
models.signals.post_delete.connect(preload_cleanup, sender=Webapp,
dispatch_uid='webapps_preload_cleanup')
class AppLog(ModelBase):
"""
This table is for indexing the activity log by app.
"""
addon = models.ForeignKey('webapps.Webapp', db_constraint=False)
activity_log = models.ForeignKey('ActivityLog')
class Meta:
db_table = 'log_activity_app'
ordering = ('-created',)
class CommentLog(ModelBase):
"""
This table is for indexing the activity log by comment.
"""
activity_log = models.ForeignKey('ActivityLog')
comments = models.TextField()
class Meta:
db_table = 'log_activity_comment'
ordering = ('-created',)
class VersionLog(ModelBase):
"""
This table is for indexing the activity log by version.
"""
activity_log = models.ForeignKey('ActivityLog')
version = models.ForeignKey(Version)
class Meta:
db_table = 'log_activity_version'
ordering = ('-created',)
class UserLog(ModelBase):
"""
This table is for indexing the activity log by user.
Note: This includes activity performed unto the user.
"""
activity_log = models.ForeignKey('ActivityLog')
user = models.ForeignKey(UserProfile)
class Meta:
db_table = 'log_activity_user'
ordering = ('-created',)
class GroupLog(ModelBase):
"""
This table is for indexing the activity log by access group.
"""
activity_log = models.ForeignKey('ActivityLog')
group = models.ForeignKey(Group)
class Meta:
db_table = 'log_activity_group'
ordering = ('-created',)
class ActivityLogManager(ManagerBase):
def for_apps(self, apps):
vals = (AppLog.objects.filter(addon__in=apps)
.values_list('activity_log', flat=True))
if vals:
return self.filter(pk__in=list(vals))
else:
return self.none()
def for_version(self, version):
vals = (VersionLog.objects.filter(version=version)
.values_list('activity_log', flat=True))
return self.filter(pk__in=list(vals))
def for_group(self, group):
return self.filter(grouplog__group=group)
def for_user(self, user):
vals = (UserLog.objects.filter(user=user)
.values_list('activity_log', flat=True))
return self.filter(pk__in=list(vals))
def for_developer(self):
return self.exclude(action__in=mkt.LOG_ADMINS + mkt.LOG_HIDE_DEVELOPER)
def admin_events(self):
return self.filter(action__in=mkt.LOG_ADMINS)
def editor_events(self):
return self.filter(action__in=mkt.LOG_EDITORS)
def review_queue(self, webapp=False):
qs = self._by_type(webapp)
return (qs.filter(action__in=mkt.LOG_REVIEW_QUEUE)
.exclude(user__id=settings.TASK_USER_ID))
def total_reviews(self, webapp=False):
qs = self._by_type(webapp)
"""Return the top users, and their # of reviews."""
return (qs.values('user', 'user__display_name', 'user__email')
.filter(action__in=mkt.LOG_REVIEW_QUEUE)
.exclude(user__id=settings.TASK_USER_ID)
.annotate(approval_count=models.Count('id'))
.order_by('-approval_count'))
def monthly_reviews(self, webapp=False):
"""Return the top users for the month, and their # of reviews."""
qs = self._by_type(webapp)
now = datetime.now()
created_date = datetime(now.year, now.month, 1)
return (qs.values('user', 'user__display_name', 'user__email')
.filter(created__gte=created_date,
action__in=mkt.LOG_REVIEW_QUEUE)
.exclude(user__id=settings.TASK_USER_ID)
.annotate(approval_count=models.Count('id'))
.order_by('-approval_count'))
def user_position(self, values_qs, user):
try:
return next(i for (i, d) in enumerate(list(values_qs))
if d.get('user') == user.id) + 1
except StopIteration:
return None
def total_reviews_user_position(self, user, webapp=False):
return self.user_position(self.total_reviews(webapp), user)
def monthly_reviews_user_position(self, user, webapp=False):
return self.user_position(self.monthly_reviews(webapp), user)
def _by_type(self, webapp=False):
qs = super(ActivityLogManager, self).get_queryset()
return qs.extra(
tables=['log_activity_app'],
where=['log_activity_app.activity_log_id=log_activity.id'])
class SafeFormatter(string.Formatter):
"""A replacement for str.format that escapes interpolated values."""
def get_field(self, *args, **kw):
# obj is the value getting interpolated into the string.
obj, used_key = super(SafeFormatter, self).get_field(*args, **kw)
return jinja2.escape(obj), used_key
class ActivityLog(ModelBase):
TYPES = sorted([(value.id, key) for key, value in mkt.LOG.items()])
user = models.ForeignKey('users.UserProfile', null=True)
action = models.SmallIntegerField(choices=TYPES, db_index=True)
_arguments = models.TextField(blank=True, db_column='arguments')
_details = models.TextField(blank=True, db_column='details')
objects = ActivityLogManager()
formatter = SafeFormatter()
class Meta:
db_table = 'log_activity'
ordering = ('-created',)
def f(self, *args, **kw):
"""Calls SafeFormatter.format and returns a Markup string."""
# SafeFormatter escapes everything so this is safe.
return jinja2.Markup(self.formatter.format(*args, **kw))
@property
def arguments(self):
try:
# d is a structure:
# ``d = [{'addons.addon':12}, {'addons.addon':1}, ... ]``
d = json.loads(self._arguments)
except:
log.debug('unserializing data from addon_log failed: %s' % self.id)
return None
objs = []
for item in d:
# item has only one element.
model_name, pk = item.items()[0]
if model_name in ('str', 'int', 'null'):
objs.append(pk)
else:
(app_label, model_name) = model_name.split('.')
model = apps.get_model(app_label, model_name)
# Cope with soft deleted models.
if hasattr(model, 'with_deleted'):
objs.extend(model.with_deleted.filter(pk=pk))
else:
objs.extend(model.objects.filter(pk=pk))
return objs
@arguments.setter
def arguments(self, args=[]):
"""
Takes an object or a tuple of objects and serializes them and stores it
in the db as a json string.
"""
if args is None:
args = []
if not isinstance(args, (list, tuple)):
args = (args,)
serialize_me = []
for arg in args:
if isinstance(arg, basestring):
serialize_me.append({'str': arg})
elif isinstance(arg, (int, long)):
serialize_me.append({'int': arg})
elif isinstance(arg, tuple):
# Instead of passing an addon instance you can pass a tuple:
# (Webapp, 3) for Webapp with pk=3
serialize_me.append(dict(((unicode(arg[0]._meta), arg[1]),)))
elif arg is not None:
serialize_me.append(dict(((unicode(arg._meta), arg.pk),)))
self._arguments = json.dumps(serialize_me)
@property
def details(self):
if self._details:
return json.loads(self._details)
@details.setter
def details(self, data):
self._details = json.dumps(data)
@property
def log(self):
return mkt.LOG_BY_ID[self.action]
def to_string(self, type_=None):
log_type = mkt.LOG_BY_ID[self.action]
if type_ and hasattr(log_type, '%s_format' % type_):
format = getattr(log_type, '%s_format' % type_)
else:
format = log_type.format
# We need to copy arguments so we can remove elements from it
# while we loop over self.arguments.
arguments = copy(self.arguments)
addon = None
review = None
version = None
collection = None
tag = None
group = None
website = None
for arg in self.arguments:
if isinstance(arg, Webapp) and not addon:
addon = self.f(u'<a href="{0}">{1}</a>',
arg.get_url_path(), arg.name)
arguments.remove(arg)
if isinstance(arg, Review) and not review:
review = self.f(u'<a href="{0}">{1}</a>',
arg.get_url_path(), _('Review'))
arguments.remove(arg)
if isinstance(arg, Version) and not version:
text = _('Version {0}')
version = self.f(text, arg.version)
arguments.remove(arg)
if isinstance(arg, Tag) and not tag:
if arg.can_reverse():
tag = self.f(u'<a href="{0}">{1}</a>',
arg.get_url_path(), arg.tag_text)
else:
tag = self.f('{0}', arg.tag_text)
if isinstance(arg, Group) and not group:
group = arg.name
arguments.remove(arg)
if isinstance(arg, Website) and not website:
website = self.f(u'<a href="{0}">{1}</a>',
arg.get_url_path(), arg.name)
arguments.remove(arg)
try:
kw = dict(addon=addon, review=review, version=version, group=group,
collection=collection, tag=tag,
user=self.user.display_name)
return self.f(format, *arguments, **kw)
except (AttributeError, KeyError, IndexError):
log.warning('%d contains garbage data' % (self.id or 0))
return 'Something magical happened.'
def __unicode__(self):
return self.to_string()
def __html__(self):
return self
|
{
"content_hash": "d69800d0295cf0254b2154139cd70a82",
"timestamp": "",
"source": "github",
"line_count": 517,
"max_line_length": 79,
"avg_line_length": 34.8936170212766,
"alnum_prop": 0.6005543237250555,
"repo_name": "tsl143/zamboni",
"id": "94623ab0bdaf38dd9c2d65d0e436426e45756a78",
"size": "18040",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mkt/developers/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "357511"
},
{
"name": "HTML",
"bytes": "2330810"
},
{
"name": "JavaScript",
"bytes": "536153"
},
{
"name": "Makefile",
"bytes": "4281"
},
{
"name": "Python",
"bytes": "4365804"
},
{
"name": "Shell",
"bytes": "11156"
},
{
"name": "Smarty",
"bytes": "1159"
}
],
"symlink_target": ""
}
|
__author__ = 'gca'
import os
import DNSaaSClient
class ImsDnsClient():
def __init__(self):
self.dns_api_ip = os.environ['DNSAAS_IP']
self.dns_ip = os.environ['DNS_IP']
DNSaaSClient.DNSaaSClientCore.apiurlDNSaaS='http://%s:8080' %self.dns_api_ip
self.tokenID = os.environ['OS_AUTH_TOKEN']
if 'test' in os.environ['TOPOLOG.']:
return
if 'standalone' in os.environ['TOPOLOG.']:
# In case of a standalone topology we need to create also the domains
DNSaaSClient.createDomain('epc.mnc001.mcc001.3gppnetwork.org','admin@mcn.pt',self.tokenID)
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='ns',record_type='A',record_data=self.dns_ip,tokenId=self.self.tokenID)
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='dns',record_type='A',record_data=self.dns_ip,tokenId=self.self.tokenID)
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", '', 'NAPTR', "10 50 \"s\" \"SIP+D2U\" \"\" _sip._udp", self.tokenID,priority = 10 )
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", '', 'NAPTR', "20 50 \"s\" \"SIP+D2U\" \"\" _sip._udp", self.tokenID,priority = 10 )
def create_records_cscfs(self, cscfs_ip):
self.__create_records_icscf(cscfs_ip)
self.__create_records_pcscf(cscfs_ip)
self.__create_records_scscf(cscfs_ip)
def __create_records_pcscf(self, pcscf_ip):
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='pcscf',record_type='A',record_data=pcscf_ip,tokenId=self.tokenID)
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='pcscf-rx',record_type='A',record_data=pcscf_ip,tokenId=self.tokenID)
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='pcscf-rxrf',record_type='A',record_data=pcscf_ip,tokenId=self.tokenID)
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='pcscf-rf',record_type='A',record_data=pcscf_ip,tokenId=self.tokenID)
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", "_sip.pcscf", "SRV", "0 4060 pcscf.epc.mnc001.mcc001.3gppnetwork.org.", tokenId=self.tokenID, priority = 1)
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", "_sip._tcp.pcscf", "SRV", "0 4060 pcscf.epc.mnc001.mcc001.3gppnetwork.org.", tokenId=self.tokenID, priority = 1)
def __create_records_icscf(self, icscf_ip):
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='icscf',record_type='A',record_data=icscf_ip,tokenId=self.tokenID)
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='icscf-cx',record_type='A',record_data=icscf_ip,tokenId=self.tokenID)
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", "_sip.icscf", "SRV", "0 5060 icscf.epc.mnc001.mcc001.3gppnetwork.org.", tokenId=self.tokenID, priority = 1)
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", "_sip._udp.icscf", "SRV", "0 5060 icscf.epc.mnc001.mcc001.3gppnetwork.org.", tokenId=self.tokenID, priority = 1)
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", "_sip._tcp.icscf", "SRV", "0 5060 icscf.epc.mnc001.mcc001.3gppnetwork.org.", tokenId=self.tokenID, priority = 1)
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", "_sip._udp.epc", "SRV", "0 5060 epc.mnc001.mcc001.3gppnetwork.org.", tokenId=self.tokenID, priority = 1)
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", "_sip._tcp", "SRV", "0 5060 epc.mnc001.mcc001.3gppnetwork.org.", tokenId=self.tokenID, priority = 1)
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", "_sip.epc", "SRV", "0 5060 epc.mnc001.mcc001.3gppnetwork.org.", tokenId=self.tokenID, priority = 1)
# TODO change
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='',record_type='A',record_data=icscf_ip,tokenId=self.tokenID)
def __create_records_scscf(self, scscf_ip):
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='scscf',record_type='A',record_data=scscf_ip,tokenId=self.tokenID)
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='scscf-cx',record_type='A',record_data=scscf_ip,tokenId=self.tokenID)
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='scscf-cxrf',record_type='A',record_data=scscf_ip,tokenId=self.tokenID)
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", "_sip.scscf", "SRV", "0 6060 scscf.epc.mnc001.mcc001.3gppnetwork.org.", tokenId=self.tokenID, priority = 1)
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", "_sip._tcp.scscf", "SRV", "0 6060 scscf.epc.mnc001.mcc001.3gppnetwork.org.", tokenId=self.tokenID, priority = 1)
DNSaaSClient.createRecord("epc.mnc001.mcc001.3gppnetwork.org", "_sip._udp.scscf", "SRV", "0 6060 scscf.epc.mnc001.mcc001.3gppnetwork.org.", tokenId=self.tokenID, priority = 1)
def create_records_hss_1(self, hss_1_ip):
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='hss-1',record_type='A',record_data=hss_1_ip,tokenId=self.tokenID)
def create_records_hss_2(self, hss_2_ip):
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='hss-2',record_type='A',record_data=hss_2_ip,tokenId=self.tokenID)
def create_records_slf(self, slf_ip):
DNSaaSClient.createRecord(domain_name='epc.mnc001.mcc001.3gppnetwork.org',record_name='slf',record_type='A',record_data=slf_ip,tokenId=self.tokenID)
def create_records_test(self,test_ip):
print "testing dns entry with ip %s"%test_ip
def configure_dns_entry(self, service_type, hostanme):
return {
'hss': (self.create_records_hss_1 if "hss-1" in hostanme
else self.create_records_hss_2),
'slf': self.create_records_slf,
'cscfs': self.create_records_cscfs,
'test': self.create_records_test,
}[service_type]
|
{
"content_hash": "43fcf3b70d665a3ce30515e52f4d51c9",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 183,
"avg_line_length": 81.94805194805195,
"alnum_prop": 0.706973058637084,
"repo_name": "MobileCloudNetworking/maas",
"id": "10905104f708f23349d254756c593e62a13315d2",
"size": "6310",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bundle/util/IMSDNSConfigurator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "340104"
},
{
"name": "Shell",
"bytes": "1264"
}
],
"symlink_target": ""
}
|
import tensorflow as tf
hello = tf.constant('Hello, World!')
# Start tensorflow session
with tf.Session() as sess:
# Run the operation
print(sess.run(hello))
|
{
"content_hash": "8385b43382b556ebb8eb292b5df4af52",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 36,
"avg_line_length": 23.857142857142858,
"alnum_prop": 0.7065868263473054,
"repo_name": "Wonjuseo/Project101",
"id": "a576ca642a0750a1c2bd59d741f6d50b51d0c10e",
"size": "168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "0/0-0. HelloWorld.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "48063"
},
{
"name": "Python",
"bytes": "92597"
}
],
"symlink_target": ""
}
|
from django import forms
from django.contrib.auth.models import User
from django.core.validators import RegexValidator
from reddit.models import Submission
from users.models import RedditUser
class UserForm(forms.ModelForm):
alphanumeric = RegexValidator(r'^[0-9a-zA-Z_]*$',
'This value may contain only letters, '
'numbers and _ characters.')
username = forms.CharField(widget=forms.TextInput(
attrs=
{'class': "form-control",
'placeholder': "Username",
'required': '',
'autofocus': ''}),
max_length=12,
min_length=3,
required=True,
validators=[alphanumeric])
password = forms.CharField(widget=forms.PasswordInput(
attrs=
{'class': "form-control",
'placeholder': "Password",
'required': ''}),
min_length=4,
required=True)
class Meta:
model = User
fields = ('username', 'password')
class ProfileForm(forms.ModelForm):
first_name = forms.CharField(widget=forms.TextInput(
attrs={'class': "form-control",
'id': "first_name",
'type': "text"}),
min_length=1,
max_length=12,
required=False
)
last_name = forms.CharField(widget=forms.TextInput(
attrs={'class': "form-control",
'id': "last_name",
'type': "text"}),
min_length=1,
max_length=12,
required=False
)
email = forms.EmailField(widget=forms.EmailInput(
attrs={'class': "form-control",
'id': "email",
'type': "text"}),
required=False
)
display_picture = forms.BooleanField(required=False)
about_text = forms.CharField(widget=forms.Textarea(
attrs={'class': "form-control",
'id': "about_me",
'rows': "4",
}),
max_length=500,
required=False
)
homepage = forms.CharField(widget=forms.URLInput(
attrs={'class': "form-control",
'id': "homepage"}),
required=False
)
github = forms.CharField(widget=forms.TextInput(
attrs={'class': "form-control",
'id': "github",
'type': "text"}),
required=False,
max_length=39
)
twitter = forms.CharField(widget=forms.TextInput(
attrs={'class': "form-control",
'id': "twitter",
'type': "text"}),
required=False,
max_length=15
)
class Meta:
model = RedditUser
fields = ('first_name', 'last_name', 'email',
'display_picture', 'about_text',
'homepage', 'github', 'twitter')
class SubmissionForm(forms.ModelForm):
title = forms.CharField(widget=forms.TextInput(
attrs={'class': "form-control",
'placeholder': "Submission title"}),
required=True, min_length=1, max_length=250)
url = forms.URLField(widget=forms.URLInput(
attrs={'class': "form-control",
'placeholder': "(Optional) http:///www.example.com"}),
required=False)
text = forms.CharField(widget=forms.Textarea(
attrs={
'class': "form-control",
'rows': "3",
'placeholder': "Optional text"}),
max_length=5000,
required=False)
class Meta:
model = Submission
fields = ('title', 'url', 'text')
|
{
"content_hash": "b22d3c653b99aa2f129eabfc7dcb7ad3",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 73,
"avg_line_length": 28.5609756097561,
"alnum_prop": 0.5351551380586393,
"repo_name": "Nikola-K/django_reddit",
"id": "94901f56031d4ee9b6566bd7af60f55ec8b0131f",
"size": "3513",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reddit/forms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2416"
},
{
"name": "HTML",
"bytes": "22856"
},
{
"name": "JavaScript",
"bytes": "6902"
},
{
"name": "Python",
"bytes": "92178"
}
],
"symlink_target": ""
}
|
from os import path
from time import time
import subprocess
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from create import Create, FULL_MODE
base_path="/home/root/go/web_control/http/"
ip_address="192.168.0.109"
jquery='https://ajax.googleapis.com/ajax/libs/jquery/1.6.4/jquery.min.js' #'jquery.js' # with no internet
if __name__ == '__main__':
main()
def main():
try:
vlc = setup_video()
server = HTTPServer(('', 8090), CreateHandler)
print 'started httpserver...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
vlc.terminate()
vlc.wait()
exit()
def setup_video(device='/dev/video7'):
set_resolution = subprocess.Popen("v4l2-ctl -v width=160,height=120".split())
set_resolution.wait()
return subprocess.Popen(['cvlc', '--no-audio', 'v4l2://'+device+':width=160:height=120:fps=5:chroma=mjpg', '--sout=#standard{access=http,mux=mpjpeg,dst=0.0.0.0:8080}', '--sout-http-mime=multipart/x-mixed-replace;boundary=--7b3cc56e5f51db803f790dad720ed50a'])
controller = None
last_control = time()
c = Create('/dev/ttyO0', FULL_MODE)
drive_speed=0
turn_speed=0
class CreateHandler(BaseHTTPRequestHandler):
def do_GET(self):
global controller, last_control
request_client = self.client_address[0]
try:
request, commands = self.path.split("?")
except:
if self.path == '/jquery.js':
self.set_mime_type('text/javascript')
self.send_file('jquery.js')
return
self.set_mime_type('text/html')
if self.path == '/':
if controller and time() - 60 < last_control:
self.send_file('viewer.html')
else:
controller = request_client
last_control = time()
self.send_file('controller.html')
return
if controller == request_client:
last_control = time()
self.process_commands(commands)
def process_commands(self, commands):
global drive_speed, turn_speed, c
command_list = commands.split("&")
for command in command_list:
name, value = command.split("=")
value = int(value)
if name == "drive":
drive_speed = value*10
elif name == "turn":
turn_speed = value*5
print "motor speed are: Left - ", drive_speed-turn_speed, ", Right - ", drive_speed+turn_speed
c.driveDirect(drive_speed-turn_speed, drive_speed+turn_speed)
def send_file(self, file):
global base_path, ip_address, jquery
full_path = path.join(base_path, file)
print "Sending " + full_path
html = open(full_path)
self.wfile.write(html.read().replace("$$_IP_$$", ip_address).replace("$$_JQUERY_$$", jquery))
html.close()
def set_mime_type(self, type_name):
self.send_response(200)
self.send_header('Content-type', type_name)
self.end_headers()
|
{
"content_hash": "3561dd65336f53f7c19a25c384f17ab2",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 260,
"avg_line_length": 32.827586206896555,
"alnum_prop": 0.6519607843137255,
"repo_name": "squirly/turtlecore-web-controller",
"id": "4f5d2e023ebb9662b7a95628ef4a00c6ce96a89d",
"size": "2856",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web_control/webserver.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "46187"
}
],
"symlink_target": ""
}
|
__author__ = 'danny'
import csv
import logging
import tablib
from datetime import datetime
from django.db.models import Model
from django.db.models.fields.files import FieldFile
from unicodedata import normalize
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.template import Context, Template
from django.conf import settings
from django.core.urlresolvers import reverse
def export_as_excel(modeladmin, request, queryset):
if not request.user.is_staff:
raise PermissionDenied
opts = modeladmin.model._meta
response = HttpResponse(content_type='text/csv; charset=utf-8')
response['Content-Disposition'] = 'attachment; filename=%s.xls' % str(opts).replace('.', '_')
try:
field_names = modeladmin.model.get_csv_fields()
v_field_names = field_names
except:
field_names = [field.name for field in opts.fields]
v_field_names = [getattr(field, 'verbose_name') or field.name for field in opts.fields]
v_field_names = map(lambda x: x if x != 'ID' else 'Id', v_field_names)
ax = []
headers = v_field_names
data = []
data = tablib.Dataset(*data, headers=headers)
for obj in queryset:
acc = []
for field in field_names:
try:
uf = getattr(obj, field)()
except TypeError:
try:
uf = getattr(obj, field)
except:
uf = ' error obteniendo el dato'
if uf is None:
uf = ''
elif isinstance(uf, datetime):
uf = str(uf)
elif isinstance(uf, Model):
uf = str(uf)
elif isinstance(uf, FieldFile):
uf = str(uf.url)
acc.append(uf)
data.append(acc)
response.write(data.xls)
return response
export_as_excel.short_description = "Exportar como Excel"
|
{
"content_hash": "00eec184c14678aa1528cf42fb3aa3ca",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 97,
"avg_line_length": 32.21666666666667,
"alnum_prop": 0.6125193998965339,
"repo_name": "daatrujillopu/Sfotipy",
"id": "b5d647482d6ab236d5edac0e18eb18c9551d366b",
"size": "1933",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "actions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1180"
},
{
"name": "Python",
"bytes": "15049"
}
],
"symlink_target": ""
}
|
"""
This module implements multioutput regression and classification.
The estimators provided in this module are meta-estimators: they require
a base estimator to be provided in their constructor. The meta-estimator
extends single output estimators to multioutput estimators.
"""
# Author: Tim Head <betatim@gmail.com>
# Author: Hugo Bowne-Anderson <hugobowne@gmail.com>
# Author: Chris Rivera <chris.richard.rivera@gmail.com>
# Author: Michael Williamson
# Author: James Ashton Nichols <james.ashton.nichols@gmail.com>
#
# License: BSD 3 clause
import numpy as np
from abc import ABCMeta, abstractmethod
from .base import BaseEstimator, clone, MetaEstimatorMixin
from .base import RegressorMixin, ClassifierMixin
from .utils import check_array, check_X_y
from .utils.fixes import parallel_helper
from .utils.validation import check_is_fitted, has_fit_parameter
from .utils.metaestimators import if_delegate_has_method
from .utils.multiclass import check_classification_targets
from .externals.joblib import Parallel, delayed
from .externals import six
__all__ = ["MultiOutputRegressor", "MultiOutputClassifier"]
def _fit_estimator(estimator, X, y, sample_weight=None):
estimator = clone(estimator)
if sample_weight is not None:
estimator.fit(X, y, sample_weight=sample_weight)
else:
estimator.fit(X, y)
return estimator
def _partial_fit_estimator(estimator, X, y, classes=None, sample_weight=None,
first_time=True):
if first_time:
estimator = clone(estimator)
if sample_weight is not None:
if classes is not None:
estimator.partial_fit(X, y, classes=classes,
sample_weight=sample_weight)
else:
estimator.partial_fit(X, y, sample_weight=sample_weight)
else:
if classes is not None:
estimator.partial_fit(X, y, classes=classes)
else:
estimator.partial_fit(X, y)
return estimator
class MultiOutputEstimator(six.with_metaclass(ABCMeta, BaseEstimator,
MetaEstimatorMixin)):
@abstractmethod
def __init__(self, estimator, n_jobs=1):
self.estimator = estimator
self.n_jobs = n_jobs
@if_delegate_has_method('estimator')
def partial_fit(self, X, y, classes=None, sample_weight=None):
"""Incrementally fit the model to data.
Fit a separate model for each output variable.
Parameters
----------
X : (sparse) array-like, shape (n_samples, n_features)
Data.
y : (sparse) array-like, shape (n_samples, n_outputs)
Multi-output targets.
classes : list of numpy arrays, shape (n_outputs)
Each array is unique classes for one output in str/int
Can be obtained by via
``[np.unique(y[:, i]) for i in range(y.shape[1])]``, where y is the
target matrix of the entire dataset.
This argument is required for the first call to partial_fit
and can be omitted in the subsequent calls.
Note that y doesn't need to contain all labels in `classes`.
sample_weight : array-like, shape = (n_samples) or None
Sample weights. If None, then samples are equally weighted.
Only supported if the underlying regressor supports sample
weights.
Returns
-------
self : object
Returns self.
"""
X, y = check_X_y(X, y,
multi_output=True,
accept_sparse=True)
if y.ndim == 1:
raise ValueError("y must have at least two dimensions for "
"multi-output regression but has only one.")
if (sample_weight is not None and
not has_fit_parameter(self.estimator, 'sample_weight')):
raise ValueError("Underlying estimator does not support"
" sample weights.")
first_time = not hasattr(self, 'estimators_')
self.estimators_ = Parallel(n_jobs=self.n_jobs)(
delayed(_partial_fit_estimator)(
self.estimators_[i] if not first_time else self.estimator,
X, y[:, i],
classes[i] if classes is not None else None,
sample_weight, first_time) for i in range(y.shape[1]))
return self
def fit(self, X, y, sample_weight=None):
""" Fit the model to data.
Fit a separate model for each output variable.
Parameters
----------
X : (sparse) array-like, shape (n_samples, n_features)
Data.
y : (sparse) array-like, shape (n_samples, n_outputs)
Multi-output targets. An indicator matrix turns on multilabel
estimation.
sample_weight : array-like, shape = (n_samples) or None
Sample weights. If None, then samples are equally weighted.
Only supported if the underlying regressor supports sample
weights.
Returns
-------
self : object
Returns self.
"""
if not hasattr(self.estimator, "fit"):
raise ValueError("The base estimator should implement a fit method")
X, y = check_X_y(X, y,
multi_output=True,
accept_sparse=True)
if isinstance(self, ClassifierMixin):
check_classification_targets(y)
if y.ndim == 1:
raise ValueError("y must have at least two dimensions for "
"multi-output regression but has only one.")
if (sample_weight is not None and
not has_fit_parameter(self.estimator, 'sample_weight')):
raise ValueError("Underlying estimator does not support"
" sample weights.")
self.estimators_ = Parallel(n_jobs=self.n_jobs)(
delayed(_fit_estimator)(
self.estimator, X, y[:, i], sample_weight)
for i in range(y.shape[1]))
return self
def predict(self, X):
"""Predict multi-output variable using a model
trained for each target variable.
Parameters
----------
X : (sparse) array-like, shape (n_samples, n_features)
Data.
Returns
-------
y : (sparse) array-like, shape (n_samples, n_outputs)
Multi-output targets predicted across multiple predictors.
Note: Separate models are generated for each predictor.
"""
check_is_fitted(self, 'estimators_')
if not hasattr(self.estimator, "predict"):
raise ValueError("The base estimator should implement a predict method")
X = check_array(X, accept_sparse=True)
y = Parallel(n_jobs=self.n_jobs)(
delayed(parallel_helper)(e, 'predict', X)
for e in self.estimators_)
return np.asarray(y).T
class MultiOutputRegressor(MultiOutputEstimator, RegressorMixin):
"""Multi target regression
This strategy consists of fitting one regressor per target. This is a
simple strategy for extending regressors that do not natively support
multi-target regression.
Parameters
----------
estimator : estimator object
An estimator object implementing `fit` and `predict`.
n_jobs : int, optional, default=1
The number of jobs to run in parallel for `fit`. If -1,
then the number of jobs is set to the number of cores.
When individual estimators are fast to train or predict
using `n_jobs>1` can result in slower performance due
to the overhead of spawning processes.
"""
def __init__(self, estimator, n_jobs=1):
super(MultiOutputRegressor, self).__init__(estimator, n_jobs)
@if_delegate_has_method('estimator')
def partial_fit(self, X, y, sample_weight=None):
"""Incrementally fit the model to data.
Fit a separate model for each output variable.
Parameters
----------
X : (sparse) array-like, shape (n_samples, n_features)
Data.
y : (sparse) array-like, shape (n_samples, n_outputs)
Multi-output targets.
sample_weight : array-like, shape = (n_samples) or None
Sample weights. If None, then samples are equally weighted.
Only supported if the underlying regressor supports sample
weights.
Returns
-------
self : object
Returns self.
"""
super(MultiOutputRegressor, self).partial_fit(
X, y, sample_weight=sample_weight)
def score(self, X, y, sample_weight=None):
"""Returns the coefficient of determination R^2 of the prediction.
The coefficient R^2 is defined as (1 - u/v), where u is the regression
sum of squares ((y_true - y_pred) ** 2).sum() and v is the residual
sum of squares ((y_true - y_true.mean()) ** 2).sum().
Best possible score is 1.0 and it can be negative (because the
model can be arbitrarily worse). A constant model that always
predicts the expected value of y, disregarding the input features,
would get a R^2 score of 0.0.
Notes
-----
R^2 is calculated by weighting all the targets equally using
`multioutput='uniform_average'`.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Test samples.
y : array-like, shape (n_samples) or (n_samples, n_outputs)
True values for X.
sample_weight : array-like, shape [n_samples], optional
Sample weights.
Returns
-------
score : float
R^2 of self.predict(X) wrt. y.
"""
# XXX remove in 0.19 when r2_score default for multioutput changes
from .metrics import r2_score
return r2_score(y, self.predict(X), sample_weight=sample_weight,
multioutput='uniform_average')
class MultiOutputClassifier(MultiOutputEstimator, ClassifierMixin):
"""Multi target classification
This strategy consists of fitting one classifier per target. This is a
simple strategy for extending classifiers that do not natively support
multi-target classification
Parameters
----------
estimator : estimator object
An estimator object implementing `fit`, `score` and `predict_proba`.
n_jobs : int, optional, default=1
The number of jobs to use for the computation. If -1 all CPUs are used.
If 1 is given, no parallel computing code is used at all, which is
useful for debugging. For n_jobs below -1, (n_cpus + 1 + n_jobs) are
used. Thus for n_jobs = -2, all CPUs but one are used.
The number of jobs to use for the computation.
It does each target variable in y in parallel.
Attributes
----------
estimators_ : list of `n_output` estimators
Estimators used for predictions.
"""
def __init__(self, estimator, n_jobs=1):
super(MultiOutputClassifier, self).__init__(estimator, n_jobs)
def predict_proba(self, X):
"""Probability estimates.
Returns prediction probabilites for each class of each output.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Data
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs \
such arrays if n_outputs > 1.
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
"""
check_is_fitted(self, 'estimators_')
if not hasattr(self.estimator, "predict_proba"):
raise ValueError("The base estimator should implement"
"predict_proba method")
results = [estimator.predict_proba(X) for estimator in
self.estimators_]
return results
def score(self, X, y):
""""Returns the mean accuracy on the given test data and labels.
Parameters
----------
X : array-like, shape [n_samples, n_features]
Test samples
y : array-like, shape [n_samples, n_outputs]
True values for X
Returns
-------
scores : float
accuracy_score of self.predict(X) versus y
"""
check_is_fitted(self, 'estimators_')
n_outputs_ = len(self.estimators_)
if y.ndim == 1:
raise ValueError("y must have at least two dimensions for "
"multi target classification but has only one")
if y.shape[1] != n_outputs_:
raise ValueError("The number of outputs of Y for fit {0} and"
" score {1} should be same".
format(n_outputs_, y.shape[1]))
y_pred = self.predict(X)
return np.mean(np.all(y == y_pred, axis=1))
|
{
"content_hash": "2b969ba8741c2786346e625c7cd6cb6a",
"timestamp": "",
"source": "github",
"line_count": 367,
"max_line_length": 84,
"avg_line_length": 35.82016348773842,
"alnum_prop": 0.5969876768598813,
"repo_name": "Vimos/scikit-learn",
"id": "bdb85ad890a97358db79d0a9eebf964afbc92e81",
"size": "13146",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "sklearn/multioutput.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "C",
"bytes": "451977"
},
{
"name": "C++",
"bytes": "140261"
},
{
"name": "Makefile",
"bytes": "1512"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "7000865"
},
{
"name": "Shell",
"bytes": "19388"
}
],
"symlink_target": ""
}
|
"""All methods needed to bootstrap a Home Assistant instance."""
from __future__ import annotations
import asyncio
from collections.abc import Awaitable, Generator, Iterable
import contextlib
import logging.handlers
from timeit import default_timer as timer
from types import ModuleType
from typing import Callable
from homeassistant import config as conf_util, core, loader, requirements
from homeassistant.config import async_notify_setup_error
from homeassistant.const import (
EVENT_COMPONENT_LOADED,
EVENT_HOMEASSISTANT_START,
PLATFORM_FORMAT,
)
from homeassistant.core import CALLBACK_TYPE
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util, ensure_unique_string
# mypy: disallow-any-generics
_LOGGER = logging.getLogger(__name__)
ATTR_COMPONENT = "component"
BASE_PLATFORMS = {
"air_quality",
"alarm_control_panel",
"binary_sensor",
"camera",
"climate",
"cover",
"device_tracker",
"fan",
"humidifier",
"image_processing",
"light",
"lock",
"media_player",
"notify",
"number",
"remote",
"scene",
"select",
"sensor",
"siren",
"switch",
"tts",
"vacuum",
"water_heater",
"weather",
}
DATA_SETUP_DONE = "setup_done"
DATA_SETUP_STARTED = "setup_started"
DATA_SETUP_TIME = "setup_time"
DATA_SETUP = "setup_tasks"
DATA_DEPS_REQS = "deps_reqs_processed"
SLOW_SETUP_WARNING = 10
SLOW_SETUP_MAX_WAIT = 300
@core.callback
def async_set_domains_to_be_loaded(hass: core.HomeAssistant, domains: set[str]) -> None:
"""Set domains that are going to be loaded from the config.
This will allow us to properly handle after_dependencies.
"""
hass.data[DATA_SETUP_DONE] = {domain: asyncio.Event() for domain in domains}
def setup_component(hass: core.HomeAssistant, domain: str, config: ConfigType) -> bool:
"""Set up a component and all its dependencies."""
return asyncio.run_coroutine_threadsafe(
async_setup_component(hass, domain, config), hass.loop
).result()
async def async_setup_component(
hass: core.HomeAssistant, domain: str, config: ConfigType
) -> bool:
"""Set up a component and all its dependencies.
This method is a coroutine.
"""
if domain in hass.config.components:
return True
setup_tasks = hass.data.setdefault(DATA_SETUP, {})
if domain in setup_tasks:
return await setup_tasks[domain] # type: ignore
task = setup_tasks[domain] = hass.async_create_task(
_async_setup_component(hass, domain, config)
)
try:
return await task # type: ignore
finally:
if domain in hass.data.get(DATA_SETUP_DONE, {}):
hass.data[DATA_SETUP_DONE].pop(domain).set()
async def _async_process_dependencies(
hass: core.HomeAssistant, config: ConfigType, integration: loader.Integration
) -> bool:
"""Ensure all dependencies are set up."""
dependencies_tasks = {
dep: hass.loop.create_task(async_setup_component(hass, dep, config))
for dep in integration.dependencies
if dep not in hass.config.components
}
after_dependencies_tasks = {}
to_be_loaded = hass.data.get(DATA_SETUP_DONE, {})
for dep in integration.after_dependencies:
if (
dep not in dependencies_tasks
and dep in to_be_loaded
and dep not in hass.config.components
):
after_dependencies_tasks[dep] = hass.loop.create_task(
to_be_loaded[dep].wait()
)
if not dependencies_tasks and not after_dependencies_tasks:
return True
if dependencies_tasks:
_LOGGER.debug(
"Dependency %s will wait for dependencies %s",
integration.domain,
list(dependencies_tasks),
)
if after_dependencies_tasks:
_LOGGER.debug(
"Dependency %s will wait for after dependencies %s",
integration.domain,
list(after_dependencies_tasks),
)
async with hass.timeout.async_freeze(integration.domain):
results = await asyncio.gather(
*dependencies_tasks.values(), *after_dependencies_tasks.values()
)
failed = [
domain for idx, domain in enumerate(dependencies_tasks) if not results[idx]
]
if failed:
_LOGGER.error(
"Unable to set up dependencies of %s. Setup failed for dependencies: %s",
integration.domain,
", ".join(failed),
)
return False
return True
async def _async_setup_component(
hass: core.HomeAssistant, domain: str, config: ConfigType
) -> bool:
"""Set up a component for Home Assistant.
This method is a coroutine.
"""
def log_error(msg: str, link: str | None = None) -> None:
"""Log helper."""
_LOGGER.error("Setup failed for %s: %s", domain, msg)
async_notify_setup_error(hass, domain, link)
try:
integration = await loader.async_get_integration(hass, domain)
except loader.IntegrationNotFound:
log_error("Integration not found.")
return False
if integration.disabled:
log_error(f"Dependency is disabled - {integration.disabled}")
return False
# Validate all dependencies exist and there are no circular dependencies
if not await integration.resolve_dependencies():
return False
# Process requirements as soon as possible, so we can import the component
# without requiring imports to be in functions.
try:
await async_process_deps_reqs(hass, config, integration)
except HomeAssistantError as err:
log_error(str(err), integration.documentation)
return False
# Some integrations fail on import because they call functions incorrectly.
# So we do it before validating config to catch these errors.
try:
component = integration.get_component()
except ImportError as err:
log_error(f"Unable to import component: {err}", integration.documentation)
return False
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Setup failed for %s: unknown error", domain)
return False
processed_config = await conf_util.async_process_component_config(
hass, config, integration
)
if processed_config is None:
log_error("Invalid config.", integration.documentation)
return False
start = timer()
_LOGGER.info("Setting up %s", domain)
with async_start_setup(hass, [domain]):
if hasattr(component, "PLATFORM_SCHEMA"):
# Entity components have their own warning
warn_task = None
else:
warn_task = hass.loop.call_later(
SLOW_SETUP_WARNING,
_LOGGER.warning,
"Setup of %s is taking over %s seconds.",
domain,
SLOW_SETUP_WARNING,
)
task = None
result = True
try:
if hasattr(component, "async_setup"):
task = component.async_setup(hass, processed_config) # type: ignore
elif hasattr(component, "setup"):
# This should not be replaced with hass.async_add_executor_job because
# we don't want to track this task in case it blocks startup.
task = hass.loop.run_in_executor(
None, component.setup, hass, processed_config # type: ignore
)
elif not hasattr(component, "async_setup_entry"):
log_error("No setup or config entry setup function defined.")
return False
if task:
async with hass.timeout.async_timeout(SLOW_SETUP_MAX_WAIT, domain):
result = await task
except asyncio.TimeoutError:
_LOGGER.error(
"Setup of %s is taking longer than %s seconds."
" Startup will proceed without waiting any longer",
domain,
SLOW_SETUP_MAX_WAIT,
)
return False
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Error during setup of component %s", domain)
async_notify_setup_error(hass, domain, integration.documentation)
return False
finally:
end = timer()
if warn_task:
warn_task.cancel()
_LOGGER.info("Setup of domain %s took %.1f seconds", domain, end - start)
if result is False:
log_error("Integration failed to initialize.")
return False
if result is not True:
log_error(
f"Integration {domain!r} did not return boolean if setup was "
"successful. Disabling component."
)
return False
# Flush out async_setup calling create_task. Fragile but covered by test.
await asyncio.sleep(0)
await hass.config_entries.flow.async_wait_init_flow_finish(domain)
await asyncio.gather(
*(
entry.async_setup(hass, integration=integration)
for entry in hass.config_entries.async_entries(domain)
)
)
hass.config.components.add(domain)
# Cleanup
if domain in hass.data[DATA_SETUP]:
hass.data[DATA_SETUP].pop(domain)
hass.bus.async_fire(EVENT_COMPONENT_LOADED, {ATTR_COMPONENT: domain})
return True
async def async_prepare_setup_platform(
hass: core.HomeAssistant, hass_config: ConfigType, domain: str, platform_name: str
) -> ModuleType | None:
"""Load a platform and makes sure dependencies are setup.
This method is a coroutine.
"""
platform_path = PLATFORM_FORMAT.format(domain=domain, platform=platform_name)
def log_error(msg: str) -> None:
"""Log helper."""
_LOGGER.error("Unable to prepare setup for platform %s: %s", platform_path, msg)
async_notify_setup_error(hass, platform_path)
try:
integration = await loader.async_get_integration(hass, platform_name)
except loader.IntegrationNotFound:
log_error("Integration not found")
return None
# Process deps and reqs as soon as possible, so that requirements are
# available when we import the platform.
try:
await async_process_deps_reqs(hass, hass_config, integration)
except HomeAssistantError as err:
log_error(str(err))
return None
try:
platform = integration.get_platform(domain)
except ImportError as exc:
log_error(f"Platform not found ({exc}).")
return None
# Already loaded
if platform_path in hass.config.components:
return platform
# Platforms cannot exist on their own, they are part of their integration.
# If the integration is not set up yet, and can be set up, set it up.
if integration.domain not in hass.config.components:
try:
component = integration.get_component()
except ImportError as exc:
log_error(f"Unable to import the component ({exc}).")
return None
if (
hasattr(component, "setup") or hasattr(component, "async_setup")
) and not await async_setup_component(hass, integration.domain, hass_config):
log_error("Unable to set up component.")
return None
return platform
async def async_process_deps_reqs(
hass: core.HomeAssistant, config: ConfigType, integration: loader.Integration
) -> None:
"""Process all dependencies and requirements for a module.
Module is a Python module of either a component or platform.
"""
processed = hass.data.get(DATA_DEPS_REQS)
if processed is None:
processed = hass.data[DATA_DEPS_REQS] = set()
elif integration.domain in processed:
return
if not await _async_process_dependencies(hass, config, integration):
raise HomeAssistantError("Could not set up all dependencies.")
if not hass.config.skip_pip and integration.requirements:
async with hass.timeout.async_freeze(integration.domain):
await requirements.async_get_integration_with_requirements(
hass, integration.domain
)
processed.add(integration.domain)
@core.callback
def async_when_setup(
hass: core.HomeAssistant,
component: str,
when_setup_cb: Callable[[core.HomeAssistant, str], Awaitable[None]],
) -> None:
"""Call a method when a component is setup."""
_async_when_setup(hass, component, when_setup_cb, False)
@core.callback
def async_when_setup_or_start(
hass: core.HomeAssistant,
component: str,
when_setup_cb: Callable[[core.HomeAssistant, str], Awaitable[None]],
) -> None:
"""Call a method when a component is setup or state is fired."""
_async_when_setup(hass, component, when_setup_cb, True)
@core.callback
def _async_when_setup(
hass: core.HomeAssistant,
component: str,
when_setup_cb: Callable[[core.HomeAssistant, str], Awaitable[None]],
start_event: bool,
) -> None:
"""Call a method when a component is setup or the start event fires."""
async def when_setup() -> None:
"""Call the callback."""
try:
await when_setup_cb(hass, component)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Error handling when_setup callback for %s", component)
if component in hass.config.components:
hass.async_create_task(when_setup())
return
listeners: list[CALLBACK_TYPE] = []
async def _matched_event(event: core.Event) -> None:
"""Call the callback when we matched an event."""
for listener in listeners:
listener()
await when_setup()
async def _loaded_event(event: core.Event) -> None:
"""Call the callback if we loaded the expected component."""
if event.data[ATTR_COMPONENT] == component:
await _matched_event(event)
listeners.append(hass.bus.async_listen(EVENT_COMPONENT_LOADED, _loaded_event))
if start_event:
listeners.append(
hass.bus.async_listen(EVENT_HOMEASSISTANT_START, _matched_event)
)
@core.callback
def async_get_loaded_integrations(hass: core.HomeAssistant) -> set[str]:
"""Return the complete list of loaded integrations."""
integrations = set()
for component in hass.config.components:
if "." not in component:
integrations.add(component)
continue
domain, platform = component.split(".", 1)
if domain in BASE_PLATFORMS:
integrations.add(platform)
return integrations
@contextlib.contextmanager
def async_start_setup(
hass: core.HomeAssistant, components: Iterable[str]
) -> Generator[None, None, None]:
"""Keep track of when setup starts and finishes."""
setup_started = hass.data.setdefault(DATA_SETUP_STARTED, {})
started = dt_util.utcnow()
unique_components = {}
for domain in components:
unique = ensure_unique_string(domain, setup_started)
unique_components[unique] = domain
setup_started[unique] = started
yield
setup_time = hass.data.setdefault(DATA_SETUP_TIME, {})
time_taken = dt_util.utcnow() - started
for unique, domain in unique_components.items():
del setup_started[unique]
if "." in domain:
_, integration = domain.split(".", 1)
else:
integration = domain
if integration in setup_time:
setup_time[integration] += time_taken
else:
setup_time[integration] = time_taken
|
{
"content_hash": "e3f0c68c6a9a5bfbaea23af0a79d718e",
"timestamp": "",
"source": "github",
"line_count": 488,
"max_line_length": 88,
"avg_line_length": 32.31967213114754,
"alnum_prop": 0.6354298757291402,
"repo_name": "sander76/home-assistant",
"id": "95bb29c4b9d5e2f819cfe87909dc37e798cbc87c",
"size": "15772",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "36548768"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
from django.test import TestCase
from restclients.mock_http import MockHTTP
from myuw.util.cache_implementation import MyUWCache
from restclients.models import CacheEntryTimed
from datetime import timedelta
CACHE = 'myuw.util.cache_implementation.MyUWCache'
class TestCustomCachePolicy(TestCase):
def test_sws_default_policies(self):
with self.settings(RESTCLIENTS_DAO_CACHE_CLASS=CACHE):
cache = MyUWCache()
ok_response = MockHTTP()
ok_response.status = 200
ok_response.data = "xx"
response = cache.getCache('sws', '/student/myuwcachetest1', {})
self.assertEquals(response, None)
cache.processResponse("sws",
"/student/myuwcachetest1",
ok_response)
response = cache.getCache('sws', '/student/myuwcachetest1', {})
self.assertEquals(response["response"].data, 'xx')
cache_entry = CacheEntryTimed.objects.get(
service="sws",
url="/student/myuwcachetest1")
# Cached response is returned after 3 hours and 58 minutes
orig_time_saved = cache_entry.time_saved
cache_entry.time_saved = (orig_time_saved -
timedelta(minutes=(60 * 4)-2))
cache_entry.save()
response = cache.getCache('sws', '/student/myuwcachetest1', {})
self.assertNotEquals(response, None)
# Cached response is not returned after 4 hours and 1 minute
cache_entry.time_saved = (orig_time_saved -
timedelta(minutes=(60 * 4)+1))
cache_entry.save()
response = cache.getCache('sws', '/student/myuwcachetest1', {})
self.assertEquals(response, None)
def test_sws_term_policy(self):
with self.settings(RESTCLIENTS_DAO_CACHE_CLASS=CACHE):
cache = MyUWCache()
ok_response = MockHTTP()
ok_response.status = 200
ok_response.data = "xx"
response = cache.getCache(
'sws', '/student/v5/term/1014,summer.json', {})
self.assertEquals(response, None)
cache.processResponse(
"sws", "/student/v5/term/1014,summer.json", ok_response)
response = cache.getCache(
'sws', '/student/v5/term/1014,summer.json', {})
self.assertEquals(response["response"].data, 'xx')
cache_entry = CacheEntryTimed.objects.get(
service="sws", url="/student/v5/term/1014,summer.json")
# Cached response is returned after 29 days
orig_time_saved = cache_entry.time_saved
cache_entry.time_saved = orig_time_saved - timedelta(days=29)
cache_entry.save()
response = cache.getCache(
'sws', '/student/v5/term/1014,summer.json', {})
self.assertNotEquals(response, None)
# Cached response is not returned after 31 days
cache_entry.time_saved = orig_time_saved - timedelta(days=31)
cache_entry.save()
response = cache.getCache(
'sws', '/student/v5/term/1014,summer.json', {})
self.assertEquals(response, None)
def test_myplan_default(self):
with self.settings(RESTCLIENTS_DAO_CACHE_CLASS=CACHE):
cache = MyUWCache()
ok_response = MockHTTP()
ok_response.status = 200
ok_response.data = "xx"
response = cache.getCache('myplan', '/api/plan/xx', {})
self.assertEquals(response, None)
cache.processResponse("myplan", "/api/plan/xx", ok_response)
response = cache.getCache('myplan', '/api/plan/xx', {})
self.assertEquals(response, None)
def test_default_policies(self):
with self.settings(RESTCLIENTS_DAO_CACHE_CLASS=CACHE):
cache = MyUWCache()
ok_response = MockHTTP()
ok_response.status = 200
ok_response.data = "xx"
response = cache.getCache('no_such', '/student/myuwcachetest1', {})
self.assertEquals(response, None)
cache.processResponse(
"no_such", "/student/myuwcachetest1", ok_response)
response = cache.getCache('no_such', '/student/myuwcachetest1', {})
self.assertEquals(response["response"].data, 'xx')
cache_entry = CacheEntryTimed.objects.get(
service="no_such", url="/student/myuwcachetest1")
# Cached response is returned after 3 hours and 58 minutes
orig_time_saved = cache_entry.time_saved
cache_entry.time_saved = (orig_time_saved -
timedelta(minutes=(60 * 4)-2))
cache_entry.save()
response = cache.getCache('no_such', '/student/myuwcachetest1', {})
self.assertNotEquals(response, None)
# Cached response is not returned after 4 hours and 1 minute
cache_entry.time_saved = (orig_time_saved -
timedelta(minutes=(60 * 4)+1))
cache_entry.save()
response = cache.getCache('no_such', '/student/myuwcachetest1', {})
self.assertEquals(response, None)
|
{
"content_hash": "771cc1e16b0bc81c9d78b81699f9fabe",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 79,
"avg_line_length": 42.65873015873016,
"alnum_prop": 0.5696744186046512,
"repo_name": "fanglinfang/myuw",
"id": "cb429ddf6cee6e783b8052274e648301a4c166d9",
"size": "5375",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "myuw/test/cache.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "54427"
},
{
"name": "HTML",
"bytes": "169387"
},
{
"name": "JavaScript",
"bytes": "226000"
},
{
"name": "Python",
"bytes": "403286"
}
],
"symlink_target": ""
}
|
from django import template
from django.contrib.staticfiles.templatetags import staticfiles
register = staticfiles.register
|
{
"content_hash": "209cedd76f4da13b5f62cc27a26a3ed7",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 63,
"avg_line_length": 31.25,
"alnum_prop": 0.864,
"repo_name": "localmed/django-assetfiles",
"id": "ed1111846604ed126c9d4c0bd72fba3fc47324d5",
"size": "125",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "assetfiles/templatetags/staticfiles.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "65851"
},
{
"name": "Ruby",
"bytes": "1673"
}
],
"symlink_target": ""
}
|
import pygtk
pygtk.require('2.0')
import gtk
import gobject
import pango
import pangocairo
from gettext import gettext as _
try:
import gst
_GST_AVAILABLE = True
except ImportError:
# Turtle Art should not fail if gst is not available
_GST_AVAILABLE = False
import os
import subprocess
import errno
from random import uniform
from math import atan2, pi
DEGTOR = 2 * pi / 360
import locale
from taconstants import (HORIZONTAL_PALETTE, VERTICAL_PALETTE, BLOCK_SCALE,
MEDIA_SHAPES, STATUS_SHAPES, OVERLAY_SHAPES,
TOOLBAR_SHAPES, TAB_LAYER, RETURN, OVERLAY_LAYER,
CATEGORY_LAYER, BLOCKS_WITH_SKIN, ICON_SIZE,
PALETTE_SCALE, PALETTE_WIDTH, SKIN_PATHS, MACROS,
TOP_LAYER, BLOCK_LAYER, OLD_NAMES, DEFAULT_TURTLE,
TURTLE_LAYER, EXPANDABLE, NO_IMPORT, TEMPLATES,
PYTHON_SKIN, PALETTE_HEIGHT, STATUS_LAYER, OLD_DOCK,
EXPANDABLE_ARGS, XO1, XO15, XO175, XO30, XO4, TITLEXY,
CONTENT_ARGS, CONSTANTS, EXPAND_SKIN, PROTO_LAYER,
EXPANDABLE_FLOW, SUFFIX)
from tapalette import (palette_names, palette_blocks, expandable_blocks,
block_names, content_blocks, default_values,
special_names, block_styles, help_strings,
hidden_proto_blocks, string_or_number_args,
make_palette, palette_name_to_index,
palette_init_on_start)
from talogo import (LogoCode, primitive_dictionary, logoerror)
from tacanvas import TurtleGraphics
from tablock import (Blocks, Block)
from taturtle import (Turtles, Turtle)
from tautils import (magnitude, get_load_name, get_save_name, data_from_file,
data_to_file, round_int, get_id, get_pixbuf_from_journal,
movie_media_type, audio_media_type, image_media_type,
save_picture, calc_image_size, get_path, hide_button_hit,
show_button_hit, chooser_dialog, arithmetic_check, xy,
find_block_to_run, find_top_block, journal_check,
find_group, find_blk_below, data_to_string,
find_start_stack, get_hardware, debug_output,
error_output, convert, find_hat, find_bot_block,
restore_clamp, collapse_clamp, data_from_string,
increment_name, get_screen_dpi)
from tasprite_factory import (SVG, svg_str_to_pixbuf, svg_from_file)
from sprites import (Sprites, Sprite)
if _GST_AVAILABLE:
from tagplay import stop_media
_MOTION_THRESHOLD = 6
_SNAP_THRESHOLD = 200
_NO_DOCK = (100, 100) # Blocks cannot be docked
_BUTTON_SIZE = 32
_MARGIN = 5
_UNFULLSCREEN_VISIBILITY_TIMEOUT = 2
_PLUGIN_SUBPATH = 'plugins'
_MACROS_SUBPATH = 'macros'
class TurtleArtWindow():
''' TurtleArt Window class abstraction '''
def __init__(self, canvas_window, path, parent=None,
mycolors=None, mynick=None, turtle_canvas=None,
running_sugar=True):
self.parent = parent
self.turtle_canvas = turtle_canvas
self._loaded_project = ''
self._sharing = False
self._timeout_tag = [0]
self.send_event = None # method to send events over the network
self.gst_available = _GST_AVAILABLE
self.running_sugar = False
self.nick = None
if isinstance(canvas_window, gtk.DrawingArea):
self.interactive_mode = True
self.window = canvas_window
self.window.set_flags(gtk.CAN_FOCUS)
self.window.show_all()
if running_sugar:
self.parent.show_all()
self.running_sugar = True
from sugar import profile
self.nick = profile.get_nick_name()
self.macros_path = os.path.join(
get_path(parent, 'data'), _MACROS_SUBPATH)
else:
# Make sure macros_path is somewhere writable
self.macros_path = os.path.join(
os.path.expanduser('~'), 'Activities',
'TurtleArt.activity', _MACROS_SUBPATH)
self._setup_events()
else:
self.interactive_mode = False
self.window = canvas_window
self.running_sugar = False
self.activity = parent
self.path = path
self.load_save_folder = os.path.join(path, 'samples')
self.py_load_save_folder = os.path.join(path, 'pysamples')
self._py_cache = {}
self.used_block_list = [] # Which blocks has the user used?
self.save_folder = None
self.save_file_name = None
self.width = gtk.gdk.screen_width()
self.height = gtk.gdk.screen_height()
self.rect = gtk.gdk.Rectangle(0, 0, 0, 0)
self.no_help = False
self.last_label = None
self._autohide_shape = True
self.keypress = ''
self.keyvalue = 0
self._focus_out_id = None
self._insert_text_id = None
self._text_to_check = False
self.mouse_flag = 0
self.mouse_x = 0
self.mouse_y = 0
self.update_counter = 0
self.running_blocks = False
self.saving_blocks = False
self.copying_blocks = False
self.sharing_blocks = False
self.deleting_blocks = False
try:
locale.setlocale(locale.LC_NUMERIC, '')
except locale.Error:
debug_output('unsupported locale', self.running_sugar)
self.decimal_point = locale.localeconv()['decimal_point']
if self.decimal_point == '' or self.decimal_point is None:
self.decimal_point = '.'
self.orientation = HORIZONTAL_PALETTE
self.hw = get_hardware()
self.lead = 1.0
if self.hw in (XO1, XO15, XO175, XO4):
self.scale = 1.0
self.entry_scale = 0.67
if self.hw == XO1:
self.color_mode = '565'
else:
self.color_mode = '888'
if self.running_sugar and not self.activity.has_toolbarbox:
self.orientation = VERTICAL_PALETTE
else:
self.scale = 1.0
self.entry_scale = 1.0
self.color_mode = '888' # TODO: Read visual mode from gtk image
self._set_screen_dpi()
self.block_scale = BLOCK_SCALE[3]
self.trash_scale = 0.5
self.myblock = {}
self.python_code = None
self.nop = 'nop'
self.loaded = 0
self.step_time = 0
self.hide = False
self.palette = True
self.coord_scale = 1
self.buddies = []
self._saved_string = ''
self._saved_action_name = ''
self._saved_box_name = ''
self.dx = 0
self.dy = 0
self.media_shapes = {}
self.cartesian = False
self.polar = False
self.metric = False
self.overlay_shapes = {}
self.toolbar_shapes = {}
self.toolbar_offset = 0
self.status_spr = None
self.status_shapes = {}
self.toolbar_spr = None
self.palette_sprs = []
self.palettes = []
self.palette_button = []
self.trash_stack = []
self.selected_palette = None
self.previous_palette = None
self.selectors = []
self.selected_selector = None
self.previous_selector = None
self.selector_shapes = []
self.selected_blk = None
self.selected_spr = None
self.selected_turtle = None
self.drag_group = None
self.drag_turtle = 'move', 0, 0
self.drag_pos = 0, 0
self.dragging_canvas = [False, 0, 0]
self.turtle_movement_to_share = None
self.paste_offset = 20 # Don't paste on top of where you copied.
self.block_list = Blocks(font_scale_factor=self.scale,
decimal_point=self.decimal_point)
if self.interactive_mode:
self.sprite_list = Sprites(self.window)
else:
self.sprite_list = None
self.canvas = TurtleGraphics(self, self.width, self.height)
if self.hw == XO175 and self.canvas.width == 1024:
self.hw = XO30
if self.interactive_mode:
self.sprite_list.set_cairo_context(self.canvas.canvas)
self.turtles = Turtles(self.sprite_list)
if self.nick is None:
self.default_turtle_name = DEFAULT_TURTLE
else:
self.default_turtle_name = self.nick
if mycolors is None:
Turtle(self.turtles, self.default_turtle_name)
else:
Turtle(self.turtles, self.default_turtle_name, mycolors.split(','))
self.active_turtle = self.turtles.get_turtle(self.default_turtle_name)
self.active_turtle.show()
self.canvas.clearscreen(False)
self._configure_cb(None)
self._icon_paths = [os.path.join(self.path, 'icons')]
self.turtleart_plugins = []
self._init_plugins()
self.lc = LogoCode(self)
from tabasics import Palettes
Palettes(self)
self._setup_plugins()
if self.interactive_mode:
self._setup_misc()
for name in palette_init_on_start:
debug_output('initing palette %s' % (name), self.running_sugar)
self.show_toolbar_palette(palette_names.index(name),
init_only=False, regenerate=True,
show=False)
self.show_toolbar_palette(0, init_only=False, regenerate=True,
show=True)
self.saved_pictures = []
self.block_operation = ''
def _set_screen_dpi(self):
dpi = get_screen_dpi()
if self.hw in (XO1, XO15, XO175, XO4):
dpi = 133 # Tweek because of XO display peculiarities
font_map_default = pangocairo.cairo_font_map_get_default()
font_map_default.set_resolution(dpi)
def _tablet_mode(self):
return False # Sugar will autoscroll the window for me
def _get_plugin_home(self):
''' Look in the execution directory '''
path = os.path.join(self.path, _PLUGIN_SUBPATH)
if os.path.exists(path):
return path
else:
return None
def _get_plugins_from_plugins_dir(self, path):
''' Look for plugin files in plugin dir. '''
plugin_files = []
if path is not None:
candidates = os.listdir(path)
candidates.sort()
for dirname in candidates:
pname = os.path.join(path, dirname, dirname + '.py')
if os.path.exists(pname):
plugin_files.append(dirname)
return plugin_files
def _init_plugins(self):
''' Try importing plugin files from the plugin dir. '''
plist = self._get_plugins_from_plugins_dir(self._get_plugin_home())
for plugin_dir in plist:
self.init_plugin(plugin_dir)
def init_plugin(self, plugin_dir):
''' Initialize plugin in plugin_dir '''
plugin_class = plugin_dir.capitalize()
f = "def f(self): from plugins.%s.%s import %s; return %s(self)" \
% (plugin_dir, plugin_dir, plugin_class, plugin_class)
plugins = {}
# NOTE: When debugging plugins, it may be useful to not trap errors
try:
exec f in globals(), plugins
self.turtleart_plugins.append(plugins.values()[0](self))
debug_output('Successfully importing %s' % (plugin_class),
self.running_sugar)
# Add the icon dir to the icon_theme search path
self._add_plugin_icon_dir(os.path.join(self._get_plugin_home(),
plugin_dir))
except Exception as e:
debug_output('Failed to load %s: %s' % (plugin_class, str(e)),
self.running_sugar)
def _add_plugin_icon_dir(self, dirname):
''' If there is an icon subdir, add it to the search path. '''
icon_theme = gtk.icon_theme_get_default()
icon_path = os.path.join(dirname, 'icons')
if os.path.exists(icon_path):
icon_theme.append_search_path(icon_path)
self._icon_paths.append(icon_path)
def _get_plugin_instance(self, plugin_name):
''' Returns the plugin 'plugin_name' instance '''
list_plugins = self._get_plugins_from_plugins_dir(
self._get_plugin_home())
if plugin_name in list_plugins:
number_plugin = list_plugins.index(plugin_name)
return self.turtleart_plugins[number_plugin]
else:
return None
def _setup_plugins(self):
''' Initial setup -- called just once. '''
for plugin in self.turtleart_plugins:
plugin.setup()
def _start_plugins(self):
''' Start is called everytime we execute blocks. '''
for plugin in self.turtleart_plugins:
plugin.start()
def stop_plugins(self):
''' Stop is called whenever we stop execution. '''
for plugin in self.turtleart_plugins:
plugin.stop()
def clear_plugins(self):
''' Clear is called from the clean block and erase button. '''
for plugin in self.turtleart_plugins:
if hasattr(plugin, 'clear'):
plugin.clear()
def background_plugins(self):
''' Background is called when we are pushed to the background. '''
for plugin in self.turtleart_plugins:
plugin.goto_background()
def foreground_plugins(self):
''' Foreground is called when we are return from the background. '''
for plugin in self.turtleart_plugins:
plugin.return_to_foreground()
def quit_plugins(self):
''' Quit is called upon program exit. '''
for plugin in self.turtleart_plugins:
plugin.quit()
def _setup_events(self):
''' Register the events we listen to. '''
self.window.add_events(gtk.gdk.BUTTON_PRESS_MASK)
self.window.add_events(gtk.gdk.BUTTON_RELEASE_MASK)
self.window.add_events(gtk.gdk.POINTER_MOTION_MASK)
self.window.add_events(gtk.gdk.KEY_PRESS_MASK)
self.window.connect("expose-event", self._expose_cb)
self.window.connect("button-press-event", self._buttonpress_cb)
self.window.connect("button-release-event", self._buttonrelease_cb)
self.window.connect("motion-notify-event", self._move_cb)
self.window.connect("key-press-event", self._keypress_cb)
gtk.gdk.screen_get_default().connect('size-changed',
self._configure_cb)
target = [("text/plain", 0, 0)]
self.window.drag_dest_set(gtk.DEST_DEFAULT_ALL, target,
gtk.gdk.ACTION_COPY | gtk.gdk.ACTION_MOVE)
self.window.connect('drag_data_received', self._drag_data_received)
def _show_unfullscreen_button(self):
if self.activity._is_fullscreen and \
self.activity.props.enable_fullscreen_mode:
if not self.activity._unfullscreen_button.props.visible:
self.activity._unfullscreen_button.show()
# Reset the timer
if self.activity._unfullscreen_button_timeout_id is not None:
gobject.source_remove(
self.activity._unfullscreen_button_timeout_id)
self.activity._unfullscreen_button_timeout_id = None
self.activity._unfullscreen_button_timeout_id = \
gobject.timeout_add_seconds(_UNFULLSCREEN_VISIBILITY_TIMEOUT,
self.__unfullscreen_button_timeout_cb)
def __unfullscreen_button_timeout_cb(self):
self.activity._unfullscreen_button.hide()
def _drag_data_received(self, w, context, x, y, data, info, time):
''' Handle dragging of block data from clipboard to canvas. '''
debug_output(data.data, True)
if data and data.format == 8 and data.data[0:2] == '[[':
self.process_data(data_from_string(data.data),
self.paste_offset)
self.paste_offset += 20
context.finish(True, False, time)
elif data and data.format == 8 and \
self.selected_blk is not None and \
self.selected_blk.name == 'string':
bounds = self._text_buffer.get_bounds()
self._text_buffer.set_text(
self._text_buffer.get_text(bounds[0], bounds[1]) + data.data)
self.text_entry.set_buffer(self._text_buffer)
context.finish(True, False, time)
else:
context.finish(False, False, time)
def load_media_shapes(self):
''' Media shapes get positioned onto blocks '''
for name in MEDIA_SHAPES:
if name in self.media_shapes:
continue
if name[0:7] == 'journal' and not self.running_sugar:
filename = 'file' + name[7:]
else:
filename = name
# Try both images/ and plugins/*/images/
for path in SKIN_PATHS:
if os.path.exists(os.path.join(self.path, path,
filename + '.svg')):
self.media_shapes[name] = svg_str_to_pixbuf(
svg_from_file(
os.path.join(self.path, path, filename + '.svg')))
break
def _setup_misc(self):
''' Misc. sprites for status, overlays, etc. '''
self.load_media_shapes()
for i, name in enumerate(STATUS_SHAPES):
# Temporary hack to use wider shapes
if name in ['print', 'help', 'status'] and self.width > 1024:
self.status_shapes[name] = svg_str_to_pixbuf(
svg_from_file(
os.path.join(self.path, 'images', name + '1200.svg')))
else:
self.status_shapes[name] = svg_str_to_pixbuf(
svg_from_file(
os.path.join(self.path, 'images', name + '.svg')))
self.status_spr = Sprite(self.sprite_list, 0, self.height - 200,
self.status_shapes['status'])
self.status_spr.hide()
self.status_spr.type = 'status'
self._autohide_shape = True
for name in OVERLAY_SHAPES:
self.overlay_shapes[name] = Sprite(
self.sprite_list,
int(self.width / 2 - 600),
int(self.height / 2 - 450),
svg_str_to_pixbuf(
svg_from_file("%s/images/%s.svg" % (self.path, name))))
self.overlay_shapes[name].hide()
self.overlay_shapes[name].type = 'overlay'
if not self.running_sugar:
# offset = 2 * self.width - 55 * len(TOOLBAR_SHAPES)
offset = 55 * (1 + len(palette_blocks))
for i, name in enumerate(TOOLBAR_SHAPES):
self.toolbar_shapes[name] = Sprite(
self.sprite_list, i * 55 + offset, 0,
svg_str_to_pixbuf(
svg_from_file(
os.path.join(
self.path, 'icons', '%s.svg' % (name)))))
self.toolbar_shapes[name].set_layer(TAB_LAYER)
self.toolbar_shapes[name].name = name
self.toolbar_shapes[name].type = 'toolbar'
self.toolbar_shapes['stopiton'].hide()
def set_sharing(self, shared):
self._sharing = shared
def sharing(self):
return self._sharing
def is_project_empty(self):
''' Check to see if project has any blocks in use '''
return len(self.just_blocks()) == 1
def _configure_cb(self, event):
''' Screen size has changed '''
self.width = gtk.gdk.screen_width()
self.height = gtk.gdk.screen_height()
CONSTANTS['titlex'] = int(-(self.width * TITLEXY[0]) /
(self.coord_scale * 2))
CONSTANTS['leftx'] = int(-(self.width * TITLEXY[0]) /
(self.coord_scale * 2))
CONSTANTS['rightx'] = 0
CONSTANTS['titley'] = int((self.height * TITLEXY[1]) /
(self.coord_scale * 2))
CONSTANTS['topy'] = int((self.height * (TITLEXY[1] - 0.125)) /
(self.coord_scale * 2))
CONSTANTS['bottomy'] = 0
CONSTANTS['leftpos'] = int(-self.width / (self.coord_scale * 2))
CONSTANTS['toppos'] = int(self.height / (self.coord_scale * 2))
CONSTANTS['rightpos'] = int(self.width / (self.coord_scale * 2))
CONSTANTS['bottompos'] = int(-self.height / (self.coord_scale * 2))
CONSTANTS['width'] = int(self.width / self.coord_scale)
CONSTANTS['height'] = int(self.height / self.coord_scale)
if event is None:
return
self.activity.check_buttons_for_fit()
# If there are any constant blocks on the canvas, relabel them
for blk in self.just_blocks():
if blk.name in ['leftpos', 'toppos', 'rightpos', 'bottompos',
'width', 'height']:
blk.spr.set_label('%s = %d' % (block_names[blk.name][0],
CONSTANTS[blk.name]))
blk.resize()
def _expose_cb(self, win=None, event=None):
''' Repaint '''
self.do_expose_event(event)
return True
def do_expose_event(self, event=None):
''' Handle the expose-event by drawing '''
# Create the cairo context
cr = self.window.window.cairo_create()
# TODO: set global scale
# find_sprite needs rescaled coordinates
# sw needs new bounds set
# cr.scale(self.activity.global_x_scale, self.activity.global_y_scale)
if event is None:
cr.rectangle(self.rect.x, self.rect.y,
self.rect.width, self.rect.height)
else:
# Restrict Cairo to the exposed area; avoid extra work
cr.rectangle(event.area.x, event.area.y,
event.area.width, event.area.height)
cr.clip()
if self.turtle_canvas is not None:
cr.set_source_surface(self.turtle_canvas)
cr.paint()
# Refresh sprite list
self.sprite_list.redraw_sprites(cr=cr)
def eraser_button(self):
''' Eraser_button (hide status block when clearing the screen.) '''
if self.status_spr is not None:
self.status_spr.hide()
self._autohide_shape = True
self.lc.find_value_blocks() # Are there blocks to update?
self.lc.prim_clear()
self.display_coordinates()
def run_button(self, time, running_from_button_push=False):
''' Run turtle! '''
if self.running_sugar:
self.activity.recenter()
# Look for a 'start' block
for blk in self.just_blocks():
if find_start_stack(blk):
self.step_time = time
if self.running_sugar:
debug_output("running stack starting from %s" % (blk.name),
self.running_sugar)
if running_from_button_push:
self.selected_blk = None
else:
self.selected_blk = blk
self._run_stack(blk)
return
# If there is no 'start' block, run stacks that aren't 'def action'
for blk in self.just_blocks():
if find_block_to_run(blk):
self.step_time = time
if self.running_sugar:
debug_output("running stack starting from %s" % (blk.name),
self.running_sugar)
if running_from_button_push:
self.selected_blk = None
else:
self.selected_blk = blk
self._run_stack(blk)
return
def stop_button(self):
''' Stop button '''
self.lc.stop_logo()
def set_userdefined(self, blk=None):
''' Change icon for user-defined blocks after loading Python code. '''
if blk is not None:
if blk.name in PYTHON_SKIN:
x, y = self._calc_image_offset('pythonon', blk.spr)
blk.set_image(self.media_shapes['pythonon'], x, y)
self._resize_skin(blk)
def set_fullscreen(self):
''' Enter fullscreen mode '''
if self.running_sugar:
self.activity.fullscreen()
self.activity.recenter()
def set_cartesian(self, flag):
''' Turn on/off Cartesian coordinates '''
if self.coord_scale == 1:
self.draw_overlay('Cartesian_labeled')
else:
self.draw_overlay('Cartesian')
return
def set_polar(self, flag):
''' Turn on/off polar coordinates '''
self.draw_overlay('polar')
return
def set_metric(self, flag):
''' Turn on/off metric coordinates '''
self.draw_overlay('metric')
return
def draw_overlay(self, overlay):
''' Draw a coordinate grid onto the canvas. '''
save_heading = self.canvas.heading
self.canvas.heading = 0
w = self.overlay_shapes[overlay].rect[2]
h = self.overlay_shapes[overlay].rect[3]
self.canvas.draw_surface(
self.overlay_shapes[overlay].cached_surfaces[0],
(self.canvas.width - w) / 2.,
(self.canvas.height - h) / 2., w, h)
self.canvas.heading = save_heading
def update_overlay_position(self, widget, event):
''' Reposition the overlays when window size changes '''
self.width = event.width
self.height = event.height
for name in OVERLAY_SHAPES:
shape = self.overlay_shapes[name]
showing = False
if shape in shape._sprites.list:
shape.hide()
showing = True
self.overlay_shapes[name] = Sprite(
self.sprite_list,
int(self.width / 2 - 600),
int(self.height / 2 - 450),
svg_str_to_pixbuf(
svg_from_file("%s/images/%s.svg" % (self.path, name))))
if showing:
self.overlay_shapes[name].set_layer(OVERLAY_LAYER)
else:
self.overlay_shapes[name].hide()
self.overlay_shapes[name].type = 'overlay'
self.cartesian = False
self.polar = False
self.metric = False
self.canvas.width = self.width
self.canvas.height = self.height
self.canvas.move_turtle()
def hideshow_button(self):
''' Hide/show button '''
if not self.hide:
for blk in self.just_blocks():
blk.spr.hide()
self.hide_palette()
self.hide = True
else:
for blk in self.just_blocks():
if blk.status != 'collapsed':
blk.spr.set_layer(BLOCK_LAYER)
self.show_palette()
self.hide = False
if self.running_sugar:
self.activity.recenter()
self.inval_all()
def inval_all(self):
''' Force a refresh '''
if self.interactive_mode:
self.window.queue_draw_area(0, 0, self.width, self.height)
def hideshow_palette(self, state):
''' Hide or show palette '''
if not state:
self.palette = False
if self.running_sugar:
self.activity.do_hidepalette()
self.hide_palette()
else:
self.palette = True
if self.running_sugar:
self.activity.do_showpalette()
self.activity.recenter()
self.show_palette()
def show_palette(self, n=None):
''' Show palette. '''
if n is None:
if self.selected_palette is None:
n = 0
else:
n = self.selected_palette
self.show_toolbar_palette(n)
self.palette_button[self.orientation].set_layer(TAB_LAYER)
self.palette_button[2].set_layer(TAB_LAYER)
self._display_palette_shift_button(n)
if not self.running_sugar or not self.activity.has_toolbarbox:
self.toolbar_spr.set_layer(CATEGORY_LAYER)
self.palette = True
def hide_palette(self):
''' Hide the palette. '''
self._hide_toolbar_palette()
for button in self.palette_button:
button.hide()
if not self.running_sugar or not self.activity.has_toolbarbox:
self.toolbar_spr.hide()
self.palette = False
def move_palettes(self, x, y):
''' Move the palettes. '''
for p in self.palettes:
for blk in p:
blk.spr.move((x + blk.spr.save_xy[0], y + blk.spr.save_xy[1]))
for spr in self.palette_button:
spr.move((x + spr.save_xy[0], y + spr.save_xy[1]))
for p in self.palette_sprs:
if p[0] is not None:
p[0].move((x + p[0].save_xy[0], y + p[0].save_xy[1]))
if p[1] is not None:
p[1].move((x + p[1].save_xy[0], y + p[1].save_xy[1]))
self.status_spr.move((x + self.status_spr.save_xy[0],
y + self.status_spr.save_xy[1]))
# To do: set save_xy for blocks in Trash
for blk in self.trash_stack:
for gblk in find_group(blk):
gblk.spr.move((x + gblk.spr.save_xy[0],
y + gblk.spr.save_xy[1]))
def hideblocks(self):
''' Callback from 'hide blocks' block '''
if not self.interactive_mode:
return
self.hide = False
self.hideshow_button()
if self.running_sugar:
self.activity.do_hide_blocks()
def showblocks(self):
''' Callback from 'show blocks' block '''
if not self.interactive_mode:
return
self.hide = True
self.hideshow_button()
if self.running_sugar:
self.activity.do_show_blocks()
def resize_blocks(self, blocks=None):
''' Resize blocks or if blocks is None, all of the blocks '''
if blocks is None:
blocks = self.just_blocks()
# Do the resizing.
for blk in blocks:
blk.rescale(self.block_scale)
for blk in blocks:
self._adjust_dock_positions(blk)
# Resize the skins on some blocks: media content and Python
for blk in blocks:
if blk.name in BLOCKS_WITH_SKIN:
self._resize_skin(blk)
# Resize text_entry widget
if hasattr(self, '_text_entry') and len(blocks) > 0:
font_desc = pango.FontDescription('Sans')
font_desc.set_size(
int(blocks[0].font_size[0] * pango.SCALE * self.entry_scale))
self._text_entry.modify_font(font_desc)
def _shift_toolbar_palette(self, n):
''' Shift blocks on specified palette '''
x, y = self.palette_sprs[n][self.orientation].get_xy()
w, h = self.palette_sprs[n][self.orientation].get_dimensions()
bx, by = self.palettes[n][0].spr.get_xy()
if self.orientation == 0:
if bx != _BUTTON_SIZE:
dx = w - self.width
else:
dx = self.width - w
dy = 0
else:
dx = 0
if by != self.toolbar_offset + _BUTTON_SIZE + _MARGIN:
dy = h - self.height + ICON_SIZE
else:
dy = self.height - h - ICON_SIZE
for blk in self.palettes[n]:
if blk.get_visibility():
blk.spr.move_relative((dx, dy))
self.palette_button[self.orientation].set_layer(TOP_LAYER)
if dx < 0 or dy < 0:
self.palette_button[self.orientation + 5].set_layer(TOP_LAYER)
self.palette_button[self.orientation + 3].hide()
else:
self.palette_button[self.orientation + 5].hide()
self.palette_button[self.orientation + 3].set_layer(TOP_LAYER)
def show_toolbar_palette(self, n, init_only=False, regenerate=False,
show=True):
''' Show the toolbar palettes, creating them on init_only '''
# If we are running the 0.86+ toolbar, the selectors are already
# created, as toolbar buttons. Otherwise, we need to create them.
if (not self.running_sugar or not self.activity.has_toolbarbox) and \
self.selectors == []:
# First, create the selector buttons
self._create_the_selectors()
# Create the empty palettes that we'll then populate with prototypes.
if self.palette_sprs == []:
self._create_the_empty_palettes()
# At initialization of the program, we don't actually populate
# the palettes.
if init_only:
return
if show:
# Hide the previously displayed palette
self._hide_previous_palette()
else:
save_selected = self.selected_palette
save_previous = self.previous_palette
self.selected_palette = n
self.previous_palette = self.selected_palette
# Make sure all of the selectors are visible. (We don't need to do
# this for 0.86+ toolbars since the selectors are toolbar buttons.)
if show and \
(not self.running_sugar or not self.activity.has_toolbarbox):
self.selected_selector = self.selectors[n]
self.selectors[n].set_shape(self.selector_shapes[n][1])
for i in range(len(palette_blocks)):
self.selectors[i].set_layer(TAB_LAYER)
# Show the palette with the current orientation.
if self.palette_sprs[n][self.orientation] is not None:
self.palette_sprs[n][self.orientation].set_layer(
CATEGORY_LAYER)
self._display_palette_shift_button(n)
# Create 'proto' blocks for each palette entry
self._create_proto_blocks(n)
if show or save_selected == n:
self._layout_palette(n, regenerate=regenerate)
else:
self._layout_palette(n, regenerate=regenerate, show=False)
for blk in self.palettes[n]:
if blk.get_visibility():
if hasattr(blk.spr, 'set_layer'):
blk.spr.set_layer(PROTO_LAYER)
else:
debug_output('WARNING: block sprite is None' % (blk.name),
self.running_sugar)
else:
blk.spr.hide()
if n == palette_names.index('trash'):
for blk in self.trash_stack:
# Deprecated
for gblk in find_group(blk):
if gblk.status != 'collapsed':
gblk.spr.set_layer(TAB_LAYER)
if not show:
if not save_selected == n:
self._hide_previous_palette(palette=n)
self.selected_palette = save_selected
self.previous_palette = save_previous
def regenerate_palette(self, n):
''' Regenerate palette (used by some plugins) '''
if (not self.running_sugar or not self.activity.has_toolbarbox) and \
self.selectors == []:
return
if self.palette_sprs == []:
return
save_selected = self.selected_palette
save_previous = self.previous_palette
self.selected_palette = n
self.previous_palette = self.selected_palette
if save_selected == n:
self._layout_palette(n, regenerate=True)
else:
self._layout_palette(n, regenerate=True, show=False)
for blk in self.palettes[n]:
if blk.get_visibility():
if hasattr(blk.spr, 'set_layer'):
blk.spr.set_layer(PROTO_LAYER)
else:
debug_output('WARNING: block sprite is None' % (blk.name),
self.running_sugar)
else:
blk.spr.hide()
if not save_selected == n:
self._hide_previous_palette(palette=n)
self.selected_palette = save_selected
self.previous_palette = save_previous
def _display_palette_shift_button(self, n):
''' Palettes too wide (or tall) for the screen get a shift button '''
for i in range(4):
self.palette_button[i + 3].hide()
if self.palette_sprs[n][self.orientation].type == \
'category-shift-horizontal':
self.palette_button[3].set_layer(CATEGORY_LAYER)
elif self.palette_sprs[n][self.orientation].type == \
'category-shift-vertical':
self.palette_button[4].set_layer(CATEGORY_LAYER)
def _create_the_selectors(self):
''' Create the palette selector buttons: only when running
old-style Sugar toolbars or from GNOME '''
svg = SVG()
if self.running_sugar:
x, y = 50, 0 # positioned at the left, top
else:
x, y = 0, 0
for i, name in enumerate(palette_names):
for path in self._icon_paths:
if os.path.exists(os.path.join(path, '%soff.svg' % (name))):
icon_pathname = os.path.join(path, '%soff.svg' % (name))
break
if icon_pathname is not None:
off_shape = svg_str_to_pixbuf(svg_from_file(icon_pathname))
else:
off_shape = svg_str_to_pixbuf(
svg_from_file(
os.path.join(
self._icon_paths[0], 'extrasoff.svg')))
error_output('Unable to open %soff.svg' % (name),
self.running_sugar)
for path in self._icon_paths:
if os.path.exists(os.path.join(path, '%son.svg' % (name))):
icon_pathname = os.path.join(path, '%son.svg' % (name))
break
if icon_pathname is not None:
on_shape = svg_str_to_pixbuf(svg_from_file(icon_pathname))
else:
on_shape = svg_str_to_pixbuf(
svg_from_file(
os.path.join(
self._icon_paths[0], 'extrason.svg')))
error_output('Unable to open %son.svg' % (name),
self.running_sugar)
self.selector_shapes.append([off_shape, on_shape])
self.selectors.append(Sprite(self.sprite_list, x, y, off_shape))
self.selectors[i].type = 'selector'
self.selectors[i].name = name
self.selectors[i].set_layer(TAB_LAYER)
w = self.selectors[i].get_dimensions()[0]
x += int(w) # running from left to right
# Create the toolbar background for the selectors
self.toolbar_offset = ICON_SIZE
self.toolbar_spr = Sprite(self.sprite_list, 0, 0,
svg_str_to_pixbuf(svg.toolbar(2 * self.width,
ICON_SIZE)))
self.toolbar_spr.type = 'toolbar'
self.toolbar_spr.set_layer(CATEGORY_LAYER)
def _create_the_empty_palettes(self):
''' Create the empty palettes to be populated by prototype blocks. '''
if len(self.palettes) == 0:
for i in range(len(palette_blocks)):
self.palettes.append([])
# Create empty palette backgrounds
for i in palette_names:
self.palette_sprs.append([None, None])
# Create the palette orientation button
self.palette_button.append(
Sprite(
self.sprite_list,
0,
self.toolbar_offset,
svg_str_to_pixbuf(
svg_from_file(
"%s/images/palettehorizontal.svg" % (self.path)))))
self.palette_button.append(
Sprite(
self.sprite_list,
0,
self.toolbar_offset,
svg_str_to_pixbuf(
svg_from_file(
"%s/images/palettevertical.svg" % (self.path)))))
self.palette_button[0].name = _('orientation')
self.palette_button[1].name = _('orientation')
self.palette_button[0].type = 'palette'
self.palette_button[1].type = 'palette'
self.palette_button[self.orientation].set_layer(TAB_LAYER)
self.palette_button[1 - self.orientation].hide()
# Create the palette next button
self.palette_button.append(
Sprite(
self.sprite_list, 16,
self.toolbar_offset,
svg_str_to_pixbuf(
svg_from_file(
"%s/images/palettenext.svg" % (self.path)))))
self.palette_button[2].name = _('next')
self.palette_button[2].type = 'palette'
self.palette_button[2].set_layer(TAB_LAYER)
# Create the palette shift buttons
dims = self.palette_button[0].get_dimensions()
self.palette_button.append(
Sprite(
self.sprite_list,
0,
self.toolbar_offset + dims[1],
svg_str_to_pixbuf(
svg_from_file(
"%s/images/palettehshift.svg" % (self.path)))))
self.palette_button.append(
Sprite(
self.sprite_list,
dims[0],
self.toolbar_offset,
svg_str_to_pixbuf(
svg_from_file(
"%s/images/palettevshift.svg" % (self.path)))))
self.palette_button.append(
Sprite(
self.sprite_list,
0,
self.toolbar_offset + dims[1],
svg_str_to_pixbuf(
svg_from_file(
"%s/images/palettehshift2.svg" % (self.path)))))
self.palette_button.append(
Sprite(
self.sprite_list,
dims[0],
self.toolbar_offset,
svg_str_to_pixbuf(
svg_from_file(
"%s/images/palettevshift2.svg" % (self.path)))))
for i in range(4):
self.palette_button[3 + i].name = _('shift')
self.palette_button[3 + i].type = 'palette'
self.palette_button[3 + i].hide()
def _create_proto_blocks(self, n):
''' Create the protoblocks that will populate a palette. '''
# Reload the palette, but reuse the existing blocks
# If a block doesn't exist, add it
if not n < len(self.palettes):
debug_output(
'_create_proto_blocks: palette index %d is out of range' %
(n), self.running_sugar)
return
for blk in self.palettes[n]:
blk.spr.hide()
old_blocks = self.palettes[n][:]
self.palettes[n] = []
for name in palette_blocks[n]:
found_block = False
for oblk in old_blocks:
if oblk.name == name:
self.palettes[n].append(oblk)
found_block = True
break
if not found_block:
self.palettes[n].append(
Block(self.block_list, self.sprite_list, name, 0, 0,
'proto', [], PALETTE_SCALE))
if name in hidden_proto_blocks:
self.palettes[n][-1].set_visibility(False)
else:
if hasattr(self.palettes[n][-1].spr, 'set_layer'):
self.palettes[n][-1].spr.set_layer(PROTO_LAYER)
self.palettes[n][-1].unhighlight()
else:
debug_output('WARNING: block sprite is None' %
(self.palettes[n][-1].name),
self.running_sugar)
# Some proto blocks get a skin.
if name in block_styles['box-style-media']:
self._proto_skin(name + 'small', n, -1)
elif name[:8] == 'template': # Deprecated
self._proto_skin(name[8:], n, -1)
elif name[:7] == 'picture': # Deprecated
self._proto_skin(name[7:], n, -1)
elif name in PYTHON_SKIN:
self._proto_skin('pythonsmall', n, -1)
return
def _hide_toolbar_palette(self):
''' Hide the toolbar palettes '''
self._hide_previous_palette()
if not self.running_sugar or not self.activity.has_toolbarbox:
# Hide the selectors
for i in range(len(palette_blocks)):
self.selectors[i].hide()
elif self.selected_palette is not None and \
not self.activity.has_toolbarbox:
self.activity.palette_buttons[self.selected_palette].set_icon(
palette_names[self.selected_palette] + 'off')
def _hide_previous_palette(self, palette=None):
''' Hide just the previously viewed toolbar palette '''
if palette is None:
palette = self.previous_palette
# Hide previously selected palette
if palette is not None:
if not palette < len(self.palettes):
debug_output(
'_hide_previous_palette: index %d is out of range' %
(palette), self.running_sugar)
return
for proto in self.palettes[palette]:
proto.spr.hide()
if self.palette_sprs[palette][self.orientation] is not None:
self.palette_sprs[palette][self.orientation].hide()
if not self.running_sugar or not self.activity.has_toolbarbox:
self.selectors[palette].set_shape(
self.selector_shapes[palette][0])
elif palette is not None and palette != self.selected_palette \
and not self.activity.has_toolbarbox:
self.activity.palette_buttons[palette].set_icon(
palette_names[palette] + 'off')
if palette == palette_names.index('trash'):
for blk in self.trash_stack:
for gblk in find_group(blk):
gblk.spr.hide()
def _horizontal_layout(self, x, y, blocks):
''' Position prototypes in a horizontal palette. '''
max_w = 0
for blk in blocks:
if not blk.get_visibility():
continue
w, h = self._width_and_height(blk)
if y + h > PALETTE_HEIGHT + self.toolbar_offset:
x += int(max_w + 3)
y = self.toolbar_offset + 3
max_w = 0
(bx, by) = blk.spr.get_xy()
dx = x - bx
dy = y - by
for g in find_group(blk):
g.spr.move_relative((int(dx), int(dy)))
g.spr.save_xy = g.spr.get_xy()
if self.running_sugar and not self.hw in [XO1]:
g.spr.move_relative((self.activity.hadj_value,
self.activity.vadj_value))
y += int(h + 3)
if w > max_w:
max_w = w
return x, y, max_w
def _vertical_layout(self, x, y, blocks):
''' Position prototypes in a vertical palette. '''
row = []
row_w = 0
max_h = 0
for blk in blocks:
if not blk.get_visibility():
continue
w, h = self._width_and_height(blk)
if x + w > PALETTE_WIDTH:
# Recenter row.
dx = int((PALETTE_WIDTH - row_w) / 2)
for r in row:
for g in find_group(r):
g.spr.move_relative((dx, 0))
g.spr.save_xy = (g.spr.save_xy[0] + dx,
g.spr.save_xy[1])
row = []
row_w = 0
x = 4
y += int(max_h + 3)
max_h = 0
row.append(blk)
row_w += (4 + w)
(bx, by) = blk.spr.get_xy()
dx = int(x - bx)
dy = int(y - by)
for g in find_group(blk):
g.spr.move_relative((dx, dy))
g.spr.save_xy = g.spr.get_xy()
if self.running_sugar and not self.hw in [XO1]:
g.spr.move_relative((self.activity.hadj_value,
self.activity.vadj_value))
x += int(w + 4)
if h > max_h:
max_h = h
# Recenter last row.
dx = int((PALETTE_WIDTH - row_w) / 2)
for r in row:
for g in find_group(r):
g.spr.move_relative((dx, 0))
g.spr.save_xy = (g.spr.save_xy[0] + dx, g.spr.save_xy[1])
return x, y, max_h
def _layout_palette(self, n, regenerate=False, show=True):
''' Layout prototypes in a palette. '''
if n is not None:
if self.orientation == HORIZONTAL_PALETTE:
x, y = _BUTTON_SIZE, self.toolbar_offset + _MARGIN
x, y, max_w = self._horizontal_layout(x, y, self.palettes[n])
if n == palette_names.index('trash'):
x, y, max_w = self._horizontal_layout(x + max_w, y,
self.trash_stack)
w = x + max_w + _BUTTON_SIZE + _MARGIN
self._make_palette_spr(n, 0, self.toolbar_offset,
w, PALETTE_HEIGHT, regenerate)
if show:
self.palette_button[2].move(
(w - _BUTTON_SIZE, self.toolbar_offset))
self.palette_button[4].move(
(_BUTTON_SIZE, self.toolbar_offset))
self.palette_button[6].move(
(_BUTTON_SIZE, self.toolbar_offset))
else:
x, y = _MARGIN, self.toolbar_offset + _BUTTON_SIZE + _MARGIN
x, y, max_h = self._vertical_layout(x, y, self.palettes[n])
if n == palette_names.index('trash'):
x, y, max_h = self._vertical_layout(x, y + max_h,
self.trash_stack)
h = y + max_h + _BUTTON_SIZE + _MARGIN - self.toolbar_offset
self._make_palette_spr(n, 0, self.toolbar_offset,
PALETTE_WIDTH, h, regenerate)
if show:
self.palette_button[2].move((PALETTE_WIDTH - _BUTTON_SIZE,
self.toolbar_offset))
self.palette_button[3].move(
(0, self.toolbar_offset + _BUTTON_SIZE))
self.palette_button[5].move(
(0, self.toolbar_offset + _BUTTON_SIZE))
if show:
self.palette_button[2].save_xy = \
self.palette_button[2].get_xy()
if self.running_sugar and not self.hw in [XO1]:
self.palette_button[2].move_relative(
(self.activity.hadj_value, self.activity.vadj_value))
self.palette_sprs[n][self.orientation].set_layer(
CATEGORY_LAYER)
self._display_palette_shift_button(n)
def _make_palette_spr(self, n, x, y, w, h, regenerate=False):
''' Make the background for the palette. '''
if regenerate and not self.palette_sprs[n][self.orientation] is None:
self.palette_sprs[n][self.orientation].hide()
self.palette_sprs[n][self.orientation] = None
if self.palette_sprs[n][self.orientation] is None:
svg = SVG()
self.palette_sprs[n][self.orientation] = \
Sprite(self.sprite_list, x, y, svg_str_to_pixbuf(
svg.palette(w, h)))
self.palette_sprs[n][self.orientation].save_xy = (x, y)
if self.running_sugar and not self.hw in [XO1]:
self.palette_sprs[n][self.orientation].move_relative(
(self.activity.hadj_value, self.activity.vadj_value))
if self.orientation == 0 and w > self.width:
self.palette_sprs[n][self.orientation].type = \
'category-shift-horizontal'
elif self.orientation == 1 and h > self.height - ICON_SIZE:
self.palette_sprs[n][self.orientation].type = \
'category-shift-vertical'
else:
self.palette_sprs[n][self.orientation].type = 'category'
if n == palette_names.index('trash'):
svg = SVG()
self.palette_sprs[n][self.orientation].set_shape(
svg_str_to_pixbuf(svg.palette(w, h)))
def _buttonpress_cb(self, win, event):
''' Button press '''
self.window.grab_focus()
x, y = xy(event)
self.mouse_flag = 1
self.mouse_x = x
self.mouse_y = y
self.button_press(event.get_state() & gtk.gdk.CONTROL_MASK, x, y)
return True
def button_press(self, mask, x, y):
if self.running_sugar:
self._show_unfullscreen_button()
# Find out what was clicked
spr = self.sprite_list.find_sprite((x, y))
if self.running_blocks:
if spr is not None:
blk = self.block_list.spr_to_block(spr)
if blk is not None:
self.showlabel('status',
label=_('Please hit the Stop Button \
before making changes to your Turtle Blocks program'))
self._autohide_shape = True
return True
self.block_operation = 'click'
self._unselect_all_blocks()
self._hide_status_layer(spr)
self.dx = 0
self.dy = 0
self.dragging_canvas[1] = x
self.dragging_canvas[2] = y
if spr is None:
if not self.running_blocks and not self.hw in [XO1]:
self.dragging_canvas[0] = True
self.dragging_counter = 0
self.dragging_dx = 0
self.dragging_dy = 0
return True
self.dragging_canvas[0] = False
self.selected_spr = spr
if self._look_for_a_blk(spr, x, y):
return True
elif self._look_for_a_turtle(spr, x, y):
return True
elif self._check_for_anything_else(spr, x, y):
return True
def _unselect_all_blocks(self):
# Unselect things that may have been selected earlier
if self.selected_blk is not None:
if self._action_name(self.selected_blk, hat=True):
if self.selected_blk.values[0] == _('action'):
self._new_stack_block(self.selected_blk.spr.labels[0])
self._update_action_names(self.selected_blk.spr.labels[0])
elif self._box_name(self.selected_blk, storein=True):
if self.selected_blk.values[0] == _('my box'):
self._new_storein_block(self.selected_blk.spr.labels[0])
self._new_box_block(self.selected_blk.spr.labels[0])
self._update_storein_names(self.selected_blk.spr.labels[0])
self._update_box_names(self.selected_blk.spr.labels[0])
# Un-highlight any blocks in the stack
grp = find_group(self.selected_blk)
for blk in grp:
if blk.status != 'collapsed':
blk.unhighlight()
self._unselect_block()
if self.running_sugar and self._sharing and \
hasattr(self.activity, 'share_button'):
self.activity.share_button.set_tooltip(
_('Select blocks to share'))
self.selected_turtle = None
def _hide_status_layer(self, spr):
# Almost always hide the status layer on a click
if self._autohide_shape and self.status_spr is not None:
self.status_spr.hide()
elif spr == self.status_spr:
self.status_spr.hide()
self._autohide_shape = True
def _look_for_a_blk(self, spr, x, y):
# From the sprite at x, y, look for a corresponding block
blk = self.block_list.spr_to_block(spr)
''' If we were copying and didn't click on a block... '''
if self.copying_blocks or self.sharing_blocks or self.saving_blocks:
if blk is None or blk.type != 'block':
self.parent.get_window().set_cursor(
gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))
self.copying_blocks = False
self.sharing_blocks = False
self.saving_blocks = False
elif self.deleting_blocks:
if blk is None or blk.type != 'proto':
self.parent.get_window().set_cursor(
gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))
self.deleting_blocks = False
if blk is not None:
if blk.type == 'block':
self.selected_blk = blk
self._block_pressed(x, y, blk)
elif blk.type == 'trash':
self._restore_from_trash(find_top_block(blk))
elif blk.type == 'proto':
if self.deleting_blocks:
if 'myblocks' in palette_names and \
self.selected_palette == \
palette_names.index('myblocks'):
self._delete_stack_alert(blk)
self.parent.get_window().set_cursor(
gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))
self.deleting_blocks = False
elif blk.name == 'restoreall':
self._restore_all_from_trash()
elif blk.name == 'restore':
self.restore_latest_from_trash()
elif blk.name == 'empty':
self._empty_trash()
elif blk.name == 'trashall':
for b in self.just_blocks():
if b.type != 'trash':
if b.name == 'start': # Don't trash start block
b1 = b.connections[-1]
if b1 is not None:
b.connections[-1] = None
b1.connections[0] = None
self._put_in_trash(b1)
else:
self._put_in_trash(find_top_block(b))
self.show_toolbar_palette(palette_names.index('trash'),
regenerate=True)
elif blk.name in MACROS:
self.new_macro(blk.name, x + 20, y + 20)
else:
defaults = None
name = blk.name
# You can only have one instance of some blocks
if blk.name in ['start', 'hat1', 'hat2']:
if len(self.block_list.get_similar_blocks(
'block', blk.name)) > 0:
self.showlabel('dupstack')
return True
# We need to check to see if there is already a
# similarly default named stack
elif blk.name == 'hat':
similars = self.block_list.get_similar_blocks(
'block', blk.name)
# First look for a hat with _('action') as its label
found_the_action_block = False
bname = _('action')
if isinstance(bname, unicode):
bname = bname.encode('ascii', 'replace')
for sblk in similars:
cblk = sblk.connections[1]
if cblk is not None:
blabel = cblk.spr.labels[0]
if isinstance(blabel, unicode):
blabel = blabel.encode('ascii', 'replace')
if bname == blabel:
found_the_action_block = True
# If there is an action block in use, change the name
if len(similars) > 0 and found_the_action_block:
defaults = [_('action')]
if self._find_proto_name('stack', defaults[0]):
defaults[0] = increment_name(defaults[0])
while self._find_proto_name('stack_%s' %
(defaults[0]),
defaults[0]):
defaults[0] = increment_name(defaults[0])
self._new_stack_block(defaults[0])
# If we autogenerated a stack prototype, we need
# to change its name from 'stack_foo' to 'stack'
elif blk.name[0:6] == 'stack_':
defaults = [blk.name[6:]]
name = 'stack'
# If we autogenerated a box prototype, we need
# to change its name from 'box_foo' to 'box'
elif blk.name[0:4] == 'box_':
defaults = [blk.name[4:]]
name = 'box'
# If we autogenerated a storein prototype, we need
# to change its name from 'storein_foo' to 'foo'
# and label[1] from foo to box
elif blk.name[0:8] == 'storein_':
defaults = [blk.name[8:], 100]
name = 'storein'
# storein my_box gets incremented
elif blk.name == 'storein':
defaults = [_('my box'), 100]
defaults[0] = increment_name(defaults[0])
while self._find_proto_name('storein_%s' %
(defaults[0]),
defaults[0]):
defaults[0] = increment_name(defaults[0])
self._new_storein_block(defaults[0])
self._new_box_block(defaults[0])
name = 'storein'
# You cannot mix and match sensor blocks
elif blk.name in ['sound', 'volume', 'pitch']:
if len(self.block_list.get_similar_blocks(
'block', ['resistance', 'voltage',
'resistance2', 'voltage2'])) > 0:
self.showlabel('incompatible')
return True
elif blk.name in ['resistance', 'voltage',
'resistance2', 'voltage2']:
if len(self.block_list.get_similar_blocks(
'block', ['sound', 'volume', 'pitch'])) > 0:
self.showlabel('incompatible')
return True
if blk.name in ['resistance', 'resistance2']:
if len(self.block_list.get_similar_blocks(
'block', ['voltage', 'voltage2'])) > 0:
self.showlabel('incompatible')
return True
elif blk.name in ['voltage', 'voltage2']:
if len(self.block_list.get_similar_blocks(
'block', ['resistance',
'resistance2'])) > 0:
self.showlabel('incompatible')
return True
blk.highlight()
self._new_block(name, x, y, defaults=defaults)
blk.unhighlight()
return True
return False
def _save_stack_alert(self, name, data, macro_path):
if self.running_sugar:
from sugar.graphics.alert import Alert
from sugar.graphics.icon import Icon
alert = Alert()
alert.props.title = _('Save stack')
alert.props.msg = _('Really overwrite stack?')
cancel_icon = Icon(icon_name='dialog-cancel')
alert.add_button(gtk.RESPONSE_CANCEL, _('Cancel'),
cancel_icon)
stop_icon = Icon(icon_name='dialog-ok')
alert.add_button(gtk.RESPONSE_OK, '%s %s' %
(_('Overwrite stack'), name), stop_icon)
self.activity.add_alert(alert)
alert.connect('response',
self._overwrite_stack_dialog_response_cb, data,
macro_path)
else:
msg = _('Really overwrite stack?')
dialog = gtk.MessageDialog(self.parent, 0, gtk.MESSAGE_WARNING,
gtk.BUTTONS_OK_CANCEL, msg)
dialog.set_title('%s %s' % (_('Overwrite stack'), name))
answer = dialog.run()
dialog.destroy()
if answer == gtk.RESPONSE_OK:
self._save_stack(data, macro_path)
def _overwrite_stack_dialog_response_cb(self, alert, response_id,
data, macro_path):
self.activity.remove_alert(alert)
if response_id == gtk.RESPONSE_OK:
self._save_stack(data, macro_path)
def _save_stack(self, data, macro_path):
data_to_file(data, macro_path)
def _delete_stack_alert(self, blk):
if self.running_sugar:
from sugar.graphics.alert import Alert
from sugar.graphics.icon import Icon
alert = Alert()
alert.props.title = _('Delete stack')
alert.props.msg = _('Really delete stack?')
cancel_icon = Icon(icon_name='dialog-cancel')
alert.add_button(gtk.RESPONSE_CANCEL, _('Cancel'),
cancel_icon)
stop_icon = Icon(icon_name='dialog-ok')
alert.add_button(gtk.RESPONSE_OK, '%s %s' %
(_('Delete stack'), blk.spr.labels[0]), stop_icon)
self.activity.add_alert(alert)
alert.connect('response', self._delete_stack_dialog_response_cb,
blk)
else:
msg = _('Really delete stack?')
dialog = gtk.MessageDialog(self.parent, 0, gtk.MESSAGE_WARNING,
gtk.BUTTONS_OK_CANCEL, msg)
dialog.set_title('%s %s' % (_('Delete stack'), blk.spr.labels[0]))
answer = dialog.run()
dialog.destroy()
if answer == gtk.RESPONSE_OK:
self._delete_stack(blk)
def _delete_stack_dialog_response_cb(self, alert, response_id, blk):
self.activity.remove_alert(alert)
if response_id == gtk.RESPONSE_OK:
self._delete_stack(blk)
def _delete_stack(self, blk):
name = blk.spr.labels[0]
error_output('deleting proto: clicked on %s %s' % (blk.name, name),
self.running_sugar)
macro_path = os.path.join(self.macros_path, '%s.tb' % (name))
if os.path.exists(macro_path):
try:
os.remove(macro_path)
except Exception, e:
error_output('Could not remove macro %s: %s' %
(macro_path, e))
return
i = palette_names.index('myblocks')
palette_blocks[i].remove(blk.name)
for pblk in self.palettes[i]:
if pblk.name == blk.name:
pblk.spr.hide()
self.palettes[i].remove(pblk)
break
self.show_toolbar_palette(i, regenerate=True)
def _look_for_a_turtle(self, spr, x, y):
# Next, look for a turtle
t = self.turtles.spr_to_turtle(spr)
if t is not None:
# If turtle is shared, ignore click
if self.remote_turtle(t.get_name()):
return True
self.selected_turtle = t
self.canvas.set_turtle(self.turtles.get_turtle_key(t))
self._turtle_pressed(x, y)
self.update_counter = 0
return True
return False
def _check_for_anything_else(self, spr, x, y):
# Finally, check for anything else
if hasattr(spr, 'type'):
if spr.type == 'selector':
self._select_category(spr)
elif spr.type in ['category', 'category-shift-horizontal',
'category-shift-vertical']:
if hide_button_hit(spr, x, y):
self.hideshow_palette(False)
elif spr.type == 'palette':
if spr.name == _('next'):
i = self.selected_palette + 1
if i == len(palette_names):
i = 0
if not self.running_sugar or \
not self.activity.has_toolbarbox:
self._select_category(self.selectors[i])
else:
if self.selected_palette is not None and \
not self.activity.has_toolbarbox:
self.activity.palette_buttons[
self.selected_palette].set_icon(
palette_names[self.selected_palette] +
'off')
else:
# select radio button associated with this palette
self.activity.palette_buttons[i].set_active(True)
if not self.activity.has_toolbarbox:
self.activity.palette_buttons[i].set_icon(
palette_names[i] + 'on')
self.show_palette(i)
elif spr.name == _('shift'):
self._shift_toolbar_palette(self.selected_palette)
else:
self.orientation = 1 - self.orientation
self.palette_button[self.orientation].set_layer(TAB_LAYER)
self.palette_button[1 - self.orientation].hide()
self.palette_sprs[self.selected_palette][
1 - self.orientation].hide()
self._layout_palette(self.selected_palette)
self.show_palette(self.selected_palette)
elif spr.type == 'toolbar':
self._select_toolbar_button(spr)
return False
def _update_action_names(self, name):
''' change the label on action blocks of the same name '''
if isinstance(name, (float, int)):
return
if isinstance(name, unicode):
name = name.encode('ascii', 'replace')
for blk in self.just_blocks():
if self._action_name(blk, hat=False):
if blk.spr.labels[0] == self._saved_action_name:
blk.spr.labels[0] = name
blk.values[0] = name
if blk.status == 'collapsed':
blk.spr.hide()
else:
blk.spr.set_layer(BLOCK_LAYER)
self._update_proto_name(name, 'stack_%s' % (self._saved_action_name),
'stack_%s' % (name), 'basic-style-1arg')
def _update_box_names(self, name):
''' change the label on box blocks of the same name '''
if isinstance(name, (float, int)):
return
if isinstance(name, unicode):
name = name.encode('ascii', 'replace')
for blk in self.just_blocks():
if self._box_name(blk, storein=False):
if blk.spr.labels[0] == self._saved_box_name:
blk.spr.labels[0] = name
blk.values[0] = name
if blk.status == 'collapsed':
blk.spr.hide()
else:
blk.spr.set_layer(BLOCK_LAYER)
self._update_proto_name(name, 'box_%s' % (self._saved_box_name),
'box_%s' % (name), 'number-style-1strarg')
def _update_storein_names(self, name):
''' change the label on storin blocks of the same name '''
if isinstance(name, (float, int)):
return
if isinstance(name, unicode):
name = name.encode('ascii', 'replace')
for blk in self.just_blocks():
if self._box_name(blk, storein=True):
if blk.spr.labels[0] == self._saved_box_name:
blk.spr.labels[0] = name
blk.values[0] = name
if blk.status == 'collapsed':
blk.spr.hide()
else:
blk.spr.set_layer(BLOCK_LAYER)
self._update_proto_name(name, 'storein_%s' % (self._saved_box_name),
'storein_%s' % (name), 'basic-style-2arg',
label=1)
def _update_proto_name(self, name, old, new, style, palette='blocks',
label=0):
''' Change the name of a proto block '''
# The name change has to happen in multiple places:
# (1) The proto block itself
# (2) The list of block styles
# (3) The list of proto blocks on the palette
# (4) The list of block names
if isinstance(name, unicode):
name = name.encode('ascii', 'replace')
if isinstance(old, unicode):
old = old.encode('ascii', 'replace')
if isinstance(new, unicode):
new = new.encode('ascii', 'replace')
if old == new:
'''
debug_output('update_proto_name: %s == %s' % (old, new),
self.running_sugar)
'''
return
if old in block_styles[style]:
block_styles[style].remove(old)
if not new in block_styles[style]:
block_styles[style].append(new)
if old in block_names:
del block_names[old]
if not new in block_names:
block_names[new] = name
i = palette_name_to_index(palette)
for blk in self.palettes[i]:
if blk.name == old:
blk.name = new
blk.spr.labels[label] = name
blk.spr.set_layer(PROTO_LAYER)
blk.resize()
break # Should only be one proto block by this name
if old in palette_blocks[i]:
palette_blocks[i].remove(old)
if not new in palette_blocks[i]:
palette_blocks[i].append(new)
self.show_toolbar_palette(i, regenerate=True)
def _action_name(self, blk, hat=False):
''' is this a label for an action block? '''
if blk is None:
return False
if blk.name != 'string': # Ignoring int/float names
return False
if blk.connections is None:
return False
if blk.connections[0] is None:
return False
if hat and blk.connections[0].name == 'hat':
return True
if not hat and blk.connections[0].name == 'stack':
return True
return False
def _box_name(self, blk, storein=False):
''' is this a label for a storein block? '''
if blk is None:
return False
if blk.name != 'string': # Ignoring int names
return False
if blk.connections is None:
return False
if blk.connections[0] is None:
return False
if storein and blk.connections[0].name == 'storein':
if blk.connections[0].connections[1] == blk:
return True
else:
return False
if not storein and blk.connections[0].name == 'box':
return True
return False
def _select_category(self, spr):
''' Select a category from the toolbar '''
i = self.selectors.index(spr)
spr.set_shape(self.selector_shapes[i][1])
if self.selected_selector is not None:
j = self.selectors.index(self.selected_selector)
if i == j:
return
self.selected_selector.set_shape(self.selector_shapes[j][0])
self.previous_selector = self.selected_selector
self.selected_selector = spr
self.show_palette(i)
def _select_toolbar_button(self, spr):
''' Select a toolbar button (Used when not running Sugar). '''
if not hasattr(spr, 'name'):
return
if spr.name == 'run-fastoff':
self.lc.trace = 0
self.hideblocks()
self.display_coordinates(clear=True)
self.run_button(0)
elif spr.name == 'run-slowoff':
self.lc.trace = 1
self.showblocks()
self.run_button(3)
elif spr.name == 'stopiton':
self.stop_button()
self.display_coordinates()
self.showblocks()
self.toolbar_shapes['stopiton'].hide()
elif spr.name == 'eraseron':
self.eraser_button()
elif spr.name == 'hideshowoff':
self.hideshow_button()
def _put_in_trash(self, blk, x=0, y=0):
''' Put a group of blocks into the trash. '''
self.trash_stack.append(blk)
group = find_group(blk)
for gblk in group:
gblk.type = 'trash'
gblk.rescale(self.trash_scale)
blk.spr.move((x, y))
for gblk in group:
self._adjust_dock_positions(gblk)
# And resize any skins.
for gblk in group:
if gblk.name in BLOCKS_WITH_SKIN:
self._resize_skin(gblk)
if self.selected_palette != palette_names.index('trash'):
for gblk in group:
gblk.spr.hide()
# If there was a named hat or storein, remove it from the
# proto palette, the palette name list, the block name list,
# and the style list
for gblk in group:
if (gblk.name == 'hat' or gblk.name == 'storein') and \
gblk.connections is not None and \
gblk.connections[1] is not None and \
gblk.connections[1].name == 'string':
if gblk.name == 'hat':
self._remove_palette_blocks(
'stack_%s' % (gblk.connections[1].values[0]),
'basic-style-1arg')
else: # Only if it was the only one
remove = True
similars = self.block_list.get_similar_blocks(
'block', 'storein')
for blk in similars:
if blk.connections is not None and \
blk.connections[1] is not None and \
blk.connections[1].name == 'string':
if blk.connections[1].values[0] == \
gblk.connections[1].values[0]:
remove = False
similars = self.block_list.get_similar_blocks(
'block', 'box')
for blk in similars:
if blk.connections is not None and \
blk.connections[1] is not None and \
blk.connections[1].name == 'string':
if blk.connections[1].values[0] == \
gblk.connections[1].values[0]:
remove = False
if remove:
self._remove_palette_blocks(
'box_%s' % gblk.connections[1].values[0],
'number-style-1strarg')
self._remove_palette_blocks(
'storein_%s' % gblk.connections[1].values[0],
'basic-style-2arg')
def _remove_palette_blocks(self, name, style, palette='blocks'):
''' Remove blocks from palette and block, style lists '''
i = palette_name_to_index('blocks')
if name in palette_blocks[i]:
palette_blocks[i].remove(name)
for blk in self.palettes[i]:
if blk.name == name:
blk.spr.hide()
self.palettes[i].remove(blk)
self.show_toolbar_palette(i, regenerate=True)
if name in block_styles[style]:
block_styles[style].remove(name)
if name in block_names:
del block_names[name]
def _restore_all_from_trash(self):
''' Restore all the blocks in the trash can. '''
for blk in self.block_list.list:
if blk.type == 'trash':
self._restore_from_trash(blk)
def restore_latest_from_trash(self):
''' Restore most recent blocks from the trash can. '''
if len(self.trash_stack) == 0:
return
self._restore_from_trash(self.trash_stack[len(self.trash_stack) - 1])
def _restore_from_trash(self, blk):
group = find_group(blk)
for gblk in group:
if gblk.name == 'sandwichclampcollapsed':
restore_clamp(gblk)
self.resize_parent_clamps(gblk)
for gblk in group:
gblk.rescale(self.block_scale)
gblk.spr.set_layer(BLOCK_LAYER)
x, y = gblk.spr.get_xy()
if self.orientation == 0:
gblk.spr.move((x, y + PALETTE_HEIGHT + self.toolbar_offset))
else:
gblk.spr.move((x + PALETTE_WIDTH, y))
gblk.type = 'block'
for gblk in group:
self._adjust_dock_positions(gblk)
# And resize any skins.
for gblk in group:
if gblk.name in BLOCKS_WITH_SKIN:
self._resize_skin(gblk)
self.trash_stack.remove(blk)
def _empty_trash(self):
''' Permanently remove all blocks presently in the trash can. '''
for blk in self.block_list.list:
if blk.type == 'trash':
blk.type = 'deleted'
blk.spr.hide()
self.trash_stack = []
self.show_toolbar_palette(palette_names.index('trash'),
regenerate=True)
def _in_the_trash(self, x, y):
''' Is x, y over a palette? '''
if self.selected_palette is not None and \
self.palette_sprs[self.selected_palette][self.orientation]\
.hit((x, y)):
return True
return False
def _block_pressed(self, x, y, blk):
''' Block pressed '''
if blk is not None:
blk.highlight()
self._disconnect(blk)
self.drag_group = find_group(blk)
(sx, sy) = blk.spr.get_xy()
self.drag_pos = x - sx, y - sy
for blk in self.drag_group:
if blk.status != 'collapsed':
blk.spr.set_layer(TOP_LAYER)
if self.copying_blocks or self.sharing_blocks or \
self.saving_blocks:
for blk in self.drag_group:
if blk.status != 'collapsed':
blk.highlight()
self.block_operation = 'copying'
data = self.assemble_data_to_save(False, False)
if data is not []:
if self.saving_blocks:
debug_output('Serialize blocks and save.',
self.running_sugar)
i = find_hat(data)
if i is not None:
name = ''
try:
name = str(data[data[i][4][1]][1][1])
except:
pass
if name == '':
name = 'stack_%d' % (int(uniform(0, 10000)))
debug_output('saving macro %s' % (name),
self.running_sugar)
if not os.path.exists(self.macros_path):
try:
os.makedirs(self.macros_path)
except OSError, exc:
if exc.errno == errno.EEXIST:
pass
else:
raise
macro_path = os.path.join(
self.macros_path, '%s.tb' % (name))
# Make sure name is unique
if os.path.exists(macro_path):
self._save_stack_alert(name, data, macro_path)
else:
self._save_stack(data, macro_path)
self.drag_group = None
elif self.copying_blocks:
clipboard = gtk.Clipboard()
debug_output('Serialize blocks and copy to clipboard',
self.running_sugar)
text = data_to_string(data)
clipboard.set_text(text)
elif self.sharing():
debug_output('Serialize blocks and send as event',
self.running_sugar)
text = data_to_string(data)
event = 'B|%s' % (data_to_string([self.nick, text]))
self.send_event(event)
self.paste_offset = 20
self.parent.get_window().set_cursor(
gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))
self.saving_blocks = False
if self.running_sugar and self._sharing and \
hasattr(self.activity, 'share_button'):
self.activity.share_button.set_tooltip(
_('Share selected blocks'))
if len(blk.spr.labels) > 0:
self._saved_string = blk.spr.labels[0]
self._saved_action_name = self._saved_string
self._saved_box_name = self._saved_string
else:
self._saved_string = ''
def _unselect_block(self):
''' Unselect block '''
# After unselecting a 'number' block, we need to check its value
if self.selected_blk is None:
return
if self.selected_blk.name == 'number':
if self._text_to_check:
self._test_number()
elif self.selected_blk.name == 'string':
if self._text_to_check:
self._test_string()
self._text_to_check = False
if self._action_name(self.selected_blk, hat=True):
if self._saved_action_name == _('action'):
self._new_stack_block(self.selected_blk.spr.labels[0])
self._update_action_names(self.selected_blk.spr.labels[0])
elif self._box_name(self.selected_blk, storein=True):
if self._saved_box_name == _('my box'):
self._new_storein_block(self.selected_blk.spr.labels[0])
self._new_box_block(self.selected_blk.spr.labels[0])
self._update_storein_names(self.selected_blk.spr.labels[0])
self._update_box_names(self.selected_blk.spr.labels[0])
self.selected_blk.unhighlight()
self.selected_blk = None
def _new_block(self, name, x, y, defaults=None):
''' Make a new block. '''
x_pos = x - 20
y_pos = y - 20
if name in content_blocks:
if defaults is None:
defaults = default_values[name]
newblk = Block(self.block_list, self.sprite_list, name, x_pos,
y_pos, 'block', defaults, self.block_scale)
else:
newblk = Block(self.block_list, self.sprite_list, name, x_pos,
y_pos, 'block', [], self.block_scale)
# Add a 'skin' to some blocks
if name in PYTHON_SKIN:
if self.nop == 'pythonloaded':
self._block_skin('pythonon', newblk)
else:
self._block_skin('pythonoff', newblk)
elif name in block_styles['box-style-media']:
if name in EXPAND_SKIN:
if newblk.ex == 0:
newblk.expand_in_x(EXPAND_SKIN[name][0])
if newblk.ey == 0:
newblk.expand_in_y(EXPAND_SKIN[name][1])
self._block_skin(name + 'off', newblk)
newspr = newblk.spr
newspr.set_layer(TOP_LAYER)
self.drag_pos = 20, 20
newblk.connections = [None] * len(newblk.docks)
if newblk.name in default_values:
if defaults is None:
defaults = default_values[newblk.name]
for i, argvalue in enumerate(defaults):
# skip the first dock position since it is always a connector
dock = newblk.docks[i + 1]
argname = dock[0]
if argname == 'unavailable':
continue
if argname == 'media':
argname = 'journal'
elif argname == 'number' and \
isinstance(argvalue, (str, unicode)):
argname = 'string'
elif argname == 'string' and \
name in block_styles['number-style-1strarg'] and \
isinstance(argvalue, (float, int)):
argname = 'number'
elif argname == 'bool':
argname = argvalue
elif argname == 'flow':
argname = argvalue
(sx, sy) = newspr.get_xy()
if argname is not None:
if argname in content_blocks:
argblk = Block(self.block_list, self.sprite_list,
argname, 0, 0, 'block', [argvalue],
self.block_scale)
else:
argblk = Block(self.block_list, self.sprite_list,
argname, 0, 0, 'block', [],
self.block_scale)
argdock = argblk.docks[0]
nx = sx + dock[2] - argdock[2]
ny = sy + dock[3] - argdock[3]
if argname == 'journal':
self._block_skin('journaloff', argblk)
argblk.spr.move((nx, ny))
argblk.spr.set_layer(TOP_LAYER)
argblk.connections = [newblk, None]
newblk.connections[i + 1] = argblk
self.drag_group = find_group(newblk)
self.block_operation = 'new'
if len(newblk.spr.labels) > 0 and newblk.spr.labels[0] is not None \
and newblk.name not in ['', 'number', 'string']:
if len(self.used_block_list) > 0:
self.used_block_list.append(', ')
if newblk.name in special_names:
self.used_block_list.append(special_names[newblk.name])
elif newblk.spr.labels[0] not in self.used_block_list:
self.used_block_list.append(newblk.spr.labels[0])
def new_macro(self, name, x, y):
''' Create a "macro" (predefined stack of blocks). '''
macro = MACROS[name]
macro[0][2] = x
macro[0][3] = y
top = self.process_data(macro)
self.block_operation = 'new'
self.drag_group = find_group(top)
def process_data(self, block_data, offset=0):
''' Process block_data (from a macro, a file, or the clipboard). '''
self._process_block_data = []
for blk in block_data:
if not self._found_a_turtle(blk):
self._process_block_data.append(
[blk[0], blk[1], blk[2], blk[3], blk[4]])
self._extra_block_data = []
# Create the blocks (or turtle).
blocks = []
for i, blk in enumerate(self._process_block_data):
if not self._found_a_turtle(blk):
newblk = self.load_block(blk, offset)
if newblk is not None:
blocks.append(newblk)
if newblk.spr is not None:
newblk.spr.set_layer(TOP_LAYER)
else:
blocks.append(None)
# Some extra blocks may have been added by load_block
for blk in self._extra_block_data:
self._process_block_data.append(blk)
newblk = self.load_block(blk, offset)
if newblk is not None:
blocks.append(newblk)
if newblk.spr is not None:
newblk.spr.set_layer(TOP_LAYER)
# Make the connections.
for i, blk in enumerate(blocks):
if blk is None:
continue
cons = []
# Normally, it is simply a matter of copying the connections.
if blk.connections is None:
if self._process_block_data[i][4] is not None:
for c in self._process_block_data[i][4]:
if c is None or c > (len(blocks) - 1):
cons.append(None)
else:
cons.append(blocks[c])
else:
debug_output("connection error %s" %
(str(self._process_block_data[i])),
self.running_sugar)
cons.append(None)
elif blk.connections == 'check':
# Convert old-style boolean and arithmetic blocks
cons.append(None) # Add an extra connection.
for c in self._process_block_data[i][4]:
if c is None:
cons.append(None)
else:
cons.append(blocks[c])
# If the boolean op was connected, readjust the plumbing.
if blk.name in block_styles['boolean-style']:
if self._process_block_data[i][4][0] is not None:
c = self._process_block_data[i][4][0]
cons[0] = blocks[self._process_block_data[c][4][0]]
c0 = self._process_block_data[c][4][0]
for j, cj \
in enumerate(self._process_block_data[c0][4]):
if cj == c:
blocks[c0].connections[j] = blk
if c < i:
blocks[c].connections[0] = blk
blocks[c].connections[3] = None
else:
# Connection was to a block we haven't seen yet.
debug_output("Warning: dock to the future",
self.running_sugar)
else:
if self._process_block_data[i][4][0] is not None:
c = self._process_block_data[i][4][0]
cons[0] = blocks[self._process_block_data[c][4][0]]
c0 = self._process_block_data[c][4][0]
for j, cj \
in enumerate(self._process_block_data[c0][4]):
if cj == c:
blocks[c0].connections[j] = blk
if c < i:
blocks[c].connections[0] = blk
blocks[c].connections[1] = None
else:
# Connection was to a block we haven't seen yet.
debug_output("Warning: dock to the future",
self.running_sugar)
else:
debug_output("Warning: unknown connection state %s" %
(str(blk.connections)), self.running_sugar)
blk.connections = cons[:]
# Block sizes and shapes may have changed.
for blk in blocks:
if blk is None:
continue
self._adjust_dock_positions(blk)
# Look for any stacks that need to be collapsed
for blk in blocks:
if blk is None:
continue
if blk.name == 'sandwichclampcollapsed':
collapse_clamp(blk, False)
# process in reverse order
for i in range(len(blocks)):
blk = blocks[-i - 1]
if blk is None:
continue
if blk.name in EXPANDABLE_FLOW:
if blk.name in block_styles['clamp-style-1arg'] or\
blk.name in block_styles['clamp-style-boolean']:
if blk.connections[2] is not None:
self._resize_clamp(blk, blk.connections[2])
elif blk.name in block_styles['clamp-style']:
if blk.connections[1] is not None:
self._resize_clamp(blk, blk.connections[1])
elif blk.name in block_styles['clamp-style-else']:
if blk.connections[2] is not None:
self._resize_clamp(blk, blk.connections[2], dockn=2)
if blk.connections[3] is not None:
self._resize_clamp(blk, blk.connections[3], dockn=3)
# Eliminate None blocks from the block list
blocks_copy = []
for blk in blocks:
if blk is not None:
blocks_copy.append(blk)
blocks = blocks_copy[:]
# Resize blocks to current scale
if self.interactive_mode:
self.resize_blocks(blocks)
if len(blocks) > 0:
return blocks[0]
else:
return None
def _adjust_dock_positions(self, blk):
''' Adjust the dock x, y positions '''
if not self.interactive_mode:
return
(sx, sy) = blk.spr.get_xy()
for i, c in enumerate(blk.connections):
if i > 0 and c is not None and i < len(blk.docks):
bdock = blk.docks[i]
for j in range(len(c.docks)):
if j < len(c.connections) and c.connections[j] == blk:
cdock = c.docks[j]
nx = sx + bdock[2] - cdock[2]
ny = sy + bdock[3] - cdock[3]
c.spr.move((nx, ny))
self._adjust_dock_positions(c)
def _turtle_pressed(self, x, y):
(tx, ty) = self.selected_turtle.get_xy()
w = self.selected_turtle.spr.rect.width / 2
h = self.selected_turtle.spr.rect.height / 2
dx = x - tx - w
dy = y - ty - h
# if x, y is near the edge, rotate
if not hasattr(self.lc, 'value_blocks'):
self.lc.find_value_blocks()
self.lc.update_values = True
if (dx * dx) + (dy * dy) > ((w * w) + (h * h)) / 6:
self.drag_turtle = \
('turn', self.canvas.heading - atan2(dy, dx) / DEGTOR, 0)
else:
self.drag_turtle = ('move', x - tx, y - ty)
def _move_cb(self, win, event):
x, y = xy(event)
self.mouse_x = x
self.mouse_y = y
self._mouse_move(x, y)
return True
def _share_mouse_move(self):
''' Share turtle movement and rotation after button up '''
if self.sharing():
nick = self.turtle_movement_to_share.get_name()
self.send_event("r|%s" %
(data_to_string([nick,
round_int(self.canvas.heading)])))
if self.canvas.pendown:
self.send_event('p|%s' % (data_to_string([nick, False])))
put_pen_back_down = True
else:
put_pen_back_down = False
self.send_event("x|%s" %
(data_to_string([nick,
[round_int(self.canvas.xcor),
round_int(self.canvas.ycor)]])))
if put_pen_back_down:
self.send_event('p|%s' % (data_to_string([nick, True])))
self.turtle_movement_to_share = None
def _mouse_move(self, x, y):
''' Process mouse movements '''
if self.running_sugar and self.dragging_canvas[0]:
# Don't adjust with each mouse move or GTK cannot keep pace.
if self.dragging_counter < 10:
self.dragging_dx += self.dragging_canvas[1] - x
self.dragging_dy += self.dragging_canvas[2] - y
self.dragging_canvas[1] = x
self.dragging_canvas[2] = y
self.dragging_counter += 1
else:
self.activity.adjust_sw(self.dragging_dx, self.dragging_dy)
self.dragging_counter = 0
self.dragging_dx = 0
self.dragging_dy = 0
return True
self.block_operation = 'move'
# First, check to see if we are dragging or rotating a turtle.
if self.selected_turtle is not None:
dtype, dragx, dragy = self.drag_turtle
(sx, sy) = self.selected_turtle.get_xy()
# self.canvas.set_turtle(self.selected_turtle.get_name())
self.update_counter += 1
if dtype == 'move':
dx = x - dragx - sx + self.selected_turtle.spr.rect.width / 2
dy = y - dragy - sy + self.selected_turtle.spr.rect.height / 2
self.selected_turtle.spr.set_layer(TOP_LAYER)
tx, ty = self.canvas.screen_to_turtle_coordinates(sx + dx,
sy + dy)
if self.canvas.pendown:
self.canvas.setpen(False)
self.canvas.setxy(tx, ty, share=False)
self.canvas.setpen(True)
else:
self.canvas.setxy(tx, ty, share=False)
if self.update_counter % 5:
self.lc.update_label_value(
'xcor', self.canvas.xcor / self.coord_scale)
self.lc.update_label_value(
'ycor', self.canvas.ycor / self.coord_scale)
else:
dx = x - sx - self.selected_turtle.spr.rect.width / 2
dy = y - sy - self.selected_turtle.spr.rect.height / 2
self.canvas.seth(int(dragx + atan2(dy, dx) / DEGTOR + 5) /
10 * 10, share=False)
if self.update_counter % 5:
self.lc.update_label_value('heading', self.canvas.heading)
if self.update_counter % 20:
self.display_coordinates()
self.turtle_movement_to_share = self.selected_turtle
# If we are hoving, show popup help.
elif self.drag_group is None:
self._show_popup(x, y)
return
# If we have a stack of blocks selected, move them.
elif self.drag_group[0] is not None:
blk = self.drag_group[0]
self.selected_spr = blk.spr
dragx, dragy = self.drag_pos
(sx, sy) = blk.spr.get_xy()
dx = x - dragx - sx
dy = y - dragy - sy
# Take no action if there was a move of 0, 0.
if dx == 0 and dy == 0:
return
self.drag_group = find_group(blk)
# Prevent blocks from ending up with a negative x or y
for blk in self.drag_group:
(bx, by) = blk.spr.get_xy()
if bx + dx < 0:
dx = -bx
if by + dy < 0:
dy = -by
# Calculate a bounding box and only invalidate once.
minx = blk.spr.rect.x
miny = blk.spr.rect.y
maxx = blk.spr.rect.x + blk.spr.rect.width
maxy = blk.spr.rect.y + blk.spr.rect.height
for blk in self.drag_group:
if blk.spr.rect.x < minx:
minx = blk.spr.rect.x
if blk.spr.rect.x + blk.spr.rect.width > maxx:
maxx = blk.spr.rect.x + blk.spr.rect.width
if blk.spr.rect.y < miny:
miny = blk.spr.rect.y
if blk.spr.rect.y + blk.spr.rect.height > maxy:
maxy = blk.spr.rect.y + blk.spr.rect.height
blk.spr.rect.x += dx
blk.spr.rect.y += dy
if dx < 0:
minx += dx
else:
maxx += dx
if dy < 0:
miny += dy
else:
maxy += dy
self.rect.x = minx
self.rect.y = miny
self.rect.width = maxx - minx
self.rect.height = maxy - miny
self.window.queue_draw_area(self.rect.x,
self.rect.y,
self.rect.width,
self.rect.height)
self.dx += dx
self.dy += dy
def _show_popup(self, x, y):
''' Let's help our users by displaying a little help. '''
spr = self.sprite_list.find_sprite((x, y))
blk = self.block_list.spr_to_block(spr)
if spr and blk is not None:
if self._timeout_tag[0] == 0:
self._timeout_tag[0] = self._do_show_popup(blk.name)
self.selected_spr = spr
else:
if self._timeout_tag[0] > 0:
try:
gobject.source_remove(self._timeout_tag[0])
self._timeout_tag[0] = 0
except:
self._timeout_tag[0] = 0
elif spr and hasattr(spr, 'type') and \
(spr.type == 'selector' or
spr.type == 'palette' or
spr.type == 'toolbar'):
if self._timeout_tag[0] == 0 and hasattr(spr, 'name'):
self._timeout_tag[0] = self._do_show_popup(spr.name)
self.selected_spr = spr
else:
if self._timeout_tag[0] > 0:
try:
gobject.source_remove(self._timeout_tag[0])
self._timeout_tag[0] = 0
except:
self._timeout_tag[0] = 0
else:
if self._timeout_tag[0] > 0:
try:
gobject.source_remove(self._timeout_tag[0])
self._timeout_tag[0] = 0
except:
self._timeout_tag[0] = 0
def _do_show_popup(self, block_name):
''' Fetch the help text and display it. '''
if self.no_help:
return 0
if block_name in special_names:
special_block_name = special_names[block_name]
elif block_name in block_names:
special_block_name = str(block_names[block_name][0])
elif block_name in TOOLBAR_SHAPES:
special_block_name = ''
else:
special_block_name = _(block_name)
if block_name in help_strings:
label = help_strings[block_name]
else:
label = special_block_name
if self.last_label == label:
return 0
self.showlabel('help', label=label)
self.last_label = label
return 0
def _buttonrelease_cb(self, win, event):
''' Button release '''
x, y = xy(event)
self.mouse_flag = 0
self.mouse_x = x
self.mouse_y = y
self.button_release(x, y)
if self.turtle_movement_to_share is not None:
self._share_mouse_move()
return True
def button_release(self, x, y):
if self.running_sugar and self.dragging_canvas[0]:
if self.dragging_counter > 0:
self.activity.adjust_sw(self.dragging_dx, self.dragging_dy)
self.dragging_counter = 0
self.dragging_dx = 0
self.dragging_dy = 0
self.dragging_canvas[0] = False
self.dragging_canvas[1] = x
self.dragging_canvas[2] = y
self.activity.adjust_palette()
return True
# We may have been moving the turtle
if self.selected_turtle is not None:
(tx, ty) = self.selected_turtle.get_xy()
k = self.turtles.get_turtle_key(self.selected_turtle)
# Remove turtles by dragging them onto the trash palette.
if self._in_the_trash(tx, ty):
# If it is the default turtle, just recenter it.
if k == self.default_turtle_name:
self._move_turtle(0, 0)
self.canvas.heading = 0
self.canvas.turn_turtle()
self.lc.update_label_value('heading', self.canvas.heading)
else:
self.selected_turtle.hide()
self.turtles.remove_from_dict(k)
self.active_turtle = None
else:
self._move_turtle(
tx - self.canvas.width / 2. +
self.active_turtle.spr.rect.width / 2.,
self.canvas.height / 2. - ty -
self.active_turtle.spr.rect.height / 2.)
self.selected_turtle = None
if self.active_turtle is None:
self.canvas.set_turtle(self.default_turtle_name)
self.display_coordinates()
return
# If we don't have a group of blocks, then there is nothing to do.
if self.drag_group is None:
return
blk = self.drag_group[0]
# Remove blocks by dragging them onto any palette.
if self.block_operation == 'move' and self._in_the_trash(x, y):
self._put_in_trash(blk, x, y)
self.drag_group = None
return
# Pull a stack of new blocks off of the category palette.
if self.block_operation == 'new':
for gblk in self.drag_group:
(bx, by) = gblk.spr.get_xy()
if self.orientation == 0:
gblk.spr.move((bx + 20,
by + PALETTE_HEIGHT + self.toolbar_offset))
else:
gblk.spr.move((bx + PALETTE_WIDTH, by + 20))
# Look to see if we can dock the current stack.
self._snap_to_dock()
for gblk in self.drag_group:
if gblk.status != 'collapsed':
gblk.spr.set_layer(BLOCK_LAYER)
self.drag_group = None
# Find the block we clicked on and process it.
# Consider a very small move a click (for touch interfaces)
if self.block_operation == 'click' or \
(self.hw in [XO175, XO30, XO4] and
self.block_operation == 'move' and (
abs(self.dx) < _MOTION_THRESHOLD and
abs(self.dy < _MOTION_THRESHOLD))):
self._click_block(x, y)
elif self.block_operation == 'copying':
gobject.timeout_add(500, self._unhighlight_drag_group, blk)
def _unhighlight_drag_group(self, blk):
self.drag_group = find_group(blk)
for gblk in self.drag_group:
gblk.unhighlight()
self.drag_group = None
def remote_turtle(self, name):
''' Is this a remote turtle? '''
if name == self.nick:
return False
if hasattr(self, 'remote_turtle_dictionary') and \
name in self.remote_turtle_dictionary:
return True
return False
def label_remote_turtle(self, name, colors=['#A0A0A0', '#C0C0C0']):
''' Add a label to remote turtles '''
turtle = self.turtles.get_turtle(name)
if turtle is not None:
turtle.label_block = Block(self.block_list,
self.sprite_list,
'turtle-label',
0,
0,
'label',
[],
2.0 / self.scale,
colors)
turtle.label_block.spr.set_label_attributes(12.0 / self.scale)
if len(name) > 6:
turtle.label_block.spr.set_label(name[0:4] + '…')
else:
turtle.label_block.spr.set_label(name)
turtle.show()
def _move_turtle(self, x, y):
''' Move the selected turtle to (x, y). '''
self.canvas.xcor = x
self.canvas.ycor = y
self.canvas.move_turtle()
if self.interactive_mode:
self.display_coordinates()
if self.running_sugar:
self.selected_turtle.spr.set_layer(TURTLE_LAYER)
self.lc.update_label_value('xcor',
self.canvas.xcor / self.coord_scale)
self.lc.update_label_value('ycor',
self.canvas.ycor / self.coord_scale)
def _click_block(self, x, y):
''' Click block: lots of special cases to handle... '''
blk = self.block_list.spr_to_block(self.selected_spr)
if blk is None:
return
self.selected_blk = blk
if blk.name in ['string', 'number']:
self._saved_string = blk.spr.labels[0]
if not hasattr(self, '_text_entry'):
self._text_entry = gtk.TextView()
self._text_entry.set_justification(gtk.JUSTIFY_CENTER)
self._text_buffer = self._text_entry.get_buffer()
font_desc = pango.FontDescription('Sans')
font_desc.set_size(
int(blk.font_size[0] * pango.SCALE * self.entry_scale))
self._text_entry.modify_font(font_desc)
self.activity.fixed.put(self._text_entry, 0, 0)
self._text_entry.show()
w = blk.spr.label_safe_width()
if blk.name == 'string':
count = self._saved_string.count(RETURN)
self._text_buffer.set_text(
self._saved_string.replace(RETURN, '\12'))
h = blk.spr.label_safe_height() * (count + 1)
else:
self._text_buffer.set_text(self._saved_string)
h = blk.spr.label_safe_height()
self._text_entry.set_size_request(w, h)
bx, by = blk.spr.get_xy()
if not self.running_sugar:
by += self.activity.menu_height + 4 # FIXME: padding
mx, my = blk.spr.label_left_top()
self._text_entry.set_pixels_above_lines(my)
bx -= int(self.activity.sw.get_hadjustment().get_value())
by -= int(self.activity.sw.get_vadjustment().get_value())
self.activity.fixed.move(self._text_entry, bx + mx, by + my * 2)
self.activity.fixed.show()
if blk.name == 'number':
self._insert_text_id = self._text_buffer.connect(
'insert-text', self._insert_text_cb)
self._focus_out_id = self._text_entry.connect(
'focus-out-event', self._text_focus_out_cb)
self._text_entry.grab_focus()
elif blk.name in block_styles['box-style-media'] and \
blk.name not in NO_IMPORT:
self._import_from_journal(self.selected_blk)
if blk.name == 'journal' and self.running_sugar:
self._load_description_block(blk)
elif blk.name == 'identity2' or blk.name == 'hspace':
group = find_group(blk)
if hide_button_hit(blk.spr, x, y):
dx = -20
blk.contract_in_x(-dx)
# dx = blk.reset_x()
elif show_button_hit(blk.spr, x, y):
dx = 20
blk.expand_in_x(dx)
else:
self._run_stack(blk)
return
for gblk in group:
if gblk != blk:
gblk.spr.move_relative((dx * blk.scale, 0))
elif blk.name == 'vspace':
group = find_group(blk)
if hide_button_hit(blk.spr, x, y):
dy = -20
blk.contract_in_y(-dy)
# dy = blk.reset_y()
elif show_button_hit(blk.spr, x, y):
dy = 20
blk.expand_in_y(dy)
else:
self._run_stack(blk)
return
for gblk in group:
if gblk != blk:
gblk.spr.move_relative((0, dy * blk.scale))
self._resize_parent_clamps(blk)
elif blk.name in expandable_blocks:
# Connection may be lost during expansion, so store it...
blk0 = blk.connections[0]
if blk0 is not None:
dock0 = blk0.connections.index(blk)
if hide_button_hit(blk.spr, x, y):
dy = -20
blk.contract_in_y(-dy)
# dy = blk.reset_y()
elif show_button_hit(blk.spr, x, y):
dy = 20
blk.expand_in_y(dy)
else:
self._run_stack(blk)
return
if blk.name in block_styles['boolean-style']:
self._expand_boolean(blk, blk.connections[2], dy)
else:
self._expand_expandable(blk, blk.connections[1], dy)
# and restore it...
if blk0 is not None:
blk.connections[0] = blk0
blk0.connections[dock0] = blk
self._cascade_expandable(blk)
self._resize_parent_clamps(blk)
elif blk.name in EXPANDABLE_ARGS or blk.name == 'nop':
if show_button_hit(blk.spr, x, y):
n = len(blk.connections)
group = find_group(blk.connections[n - 1])
if blk.name == 'myfunc1arg':
blk.spr.labels[1] = 'f(x, y)'
blk.spr.labels[2] = ' '
dy = blk.add_arg()
blk.primitive = 'myfunction2'
blk.name = 'myfunc2arg'
elif blk.name == 'myfunc2arg':
blk.spr.labels[1] = 'f(x, y, z)'
dy = blk.add_arg(False)
blk.primitive = 'myfunction3'
blk.name = 'myfunc3arg'
elif blk.name == 'userdefined':
dy = blk.add_arg()
blk.primitive = 'userdefined2'
blk.name = 'userdefined2args'
self._resize_skin(blk)
elif blk.name == 'userdefined2args':
dy = blk.add_arg(False)
blk.primitive = 'userdefined3'
blk.name = 'userdefined3args'
self._resize_skin(blk)
elif blk.name == 'loadblock':
dy = blk.add_arg()
blk.primitive = 'loadblock2'
blk.name = 'loadblock2arg'
self._resize_skin(blk)
elif blk.name == 'loadblock2arg':
dy = blk.add_arg(False)
blk.primitive = 'loadblock3'
blk.name = 'loadblock3arg'
self._resize_skin(blk)
else:
dy = blk.add_arg()
for gblk in group:
gblk.spr.move_relative((0, dy))
blk.connections.append(blk.connections[n - 1])
argname = blk.docks[n - 1][0]
argvalue = default_values[blk.name][
len(default_values[blk.name]) - 1]
argblk = Block(self.block_list, self.sprite_list, argname,
0, 0, 'block', [argvalue], self.block_scale)
argdock = argblk.docks[0]
(bx, by) = blk.spr.get_xy()
nx = bx + blk.docks[n - 1][2] - argdock[2]
ny = by + blk.docks[n - 1][3] - argdock[3]
argblk.spr.move((nx, ny))
argblk.spr.set_layer(TOP_LAYER)
argblk.connections = [blk, None]
blk.connections[n - 1] = argblk
if blk.name in block_styles['number-style-var-arg']:
self._cascade_expandable(blk)
self._resize_parent_clamps(blk)
elif blk.name in PYTHON_SKIN:
self._import_py()
else:
self._run_stack(blk)
elif blk.name == 'sandwichclampcollapsed':
restore_clamp(blk)
if blk.connections[1] is not None:
self._resize_clamp(blk, blk.connections[1], 1)
self._resize_parent_clamps(blk)
elif blk.name == 'sandwichclamp':
if hide_button_hit(blk.spr, x, y):
collapse_clamp(blk, True)
self._resize_parent_clamps(blk)
else:
self._run_stack(blk)
else:
self._run_stack(blk)
def _resize_parent_clamps(self, blk):
''' If we changed size, we need to let any parent clamps know. '''
nblk, dockn = self._expandable_flow_above(blk)
while nblk is not None:
self._resize_clamp(nblk, nblk.connections[dockn], dockn=dockn)
nblk, dockn = self._expandable_flow_above(nblk)
def _expand_boolean(self, blk, blk2, dy):
''' Expand a boolean blk if blk2 is too big to fit. '''
group = find_group(blk2)
for gblk in find_group(blk):
if gblk not in group:
gblk.spr.move_relative((0, -dy * blk.scale))
def _expand_expandable(self, blk, blk2, dy):
''' Expand an expandable blk if blk2 is too big to fit. '''
if blk2 is None:
group = [blk]
else:
group = find_group(blk2)
group.append(blk)
for gblk in find_group(blk):
if gblk not in group:
gblk.spr.move_relative((0, dy * blk.scale))
if blk.name in block_styles['compare-style'] or \
blk.name in block_styles['compare-porch-style']:
for gblk in find_group(blk):
gblk.spr.move_relative((0, -dy * blk.scale))
def _number_style(self, name):
if name in block_styles['number-style']:
return True
if name in block_styles['number-style-porch']:
return True
if name in block_styles['number-style-block']:
return True
if name in block_styles['number-style-var-arg']:
return True
return False
def _cascade_expandable(self, blk):
''' If expanding/shrinking a block, cascade. '''
while self._number_style(blk.name):
if blk.connections[0] is None:
break
if blk.connections[0].name in expandable_blocks:
if blk.connections[0].connections.index(blk) != 1:
break
blk = blk.connections[0]
if blk.connections[1].name == 'myfunc2arg':
dy = 40 + blk.connections[1].ey - blk.ey
elif blk.connections[1].name == 'myfunc3arg':
dy = 60 + blk.connections[1].ey - blk.ey
else:
dy = 20 + blk.connections[1].ey - blk.ey
blk.expand_in_y(dy)
if dy != 0:
group = find_group(blk.connections[1])
group.append(blk)
for gblk in find_group(blk):
if gblk not in group:
gblk.spr.move_relative((0, dy * blk.scale))
if blk.name in block_styles['compare-style'] or \
blk.name in block_styles['compare-porch-style']:
for gblk in find_group(blk):
gblk.spr.move_relative((0, -dy * blk.scale))
else:
break
def _run_stack(self, blk):
''' Run a stack of blocks. '''
if not self.interactive_mode:
# Test for forever block
if len(self.block_list.get_similar_blocks('block', 'forever')) > 0:
debug_output('WARNING: Projects with forever blocks \
may not terminate.', False)
if self.status_spr is not None:
self.status_spr.hide()
self._autohide_shape = True
if blk is None:
return
self.lc.find_value_blocks() # Are there blocks to update?
# Is there a savesvg block?
if len(self.block_list.get_similar_blocks('block', 'savesvg')) > 0:
if self.canvas.cr_svg is None:
self.canvas.setup_svg_surface()
self.running_blocks = True
self._start_plugins() # Let the plugins know we are running.
top = find_top_block(blk)
code = self.lc.generate_code(top, self.just_blocks())
self.lc.run_blocks(code)
if self.interactive_mode:
gobject.idle_add(self.lc.doevalstep)
else:
while self.lc.doevalstep():
pass
def _snap_to_dock(self):
''' Snap a block (selected_block) to the dock of another block
(destination_block). '''
selected_block = self.drag_group[0]
best_destination = None
d = _SNAP_THRESHOLD
self.inserting_block_mid_stack = False
for selected_block_dockn in range(len(selected_block.docks)):
for destination_block in self.just_blocks():
# Don't link to a block that is hidden
if destination_block.status == 'collapsed':
continue
# Don't link to a block to which you're already connected
if destination_block in self.drag_group:
continue
# Check each dock of destination for a possible connection
for destination_dockn in range(len(destination_block.docks)):
this_xy = self.dock_dx_dy(
destination_block, destination_dockn,
selected_block, selected_block_dockn)
if magnitude(this_xy) > d:
continue
d = magnitude(this_xy)
best_xy = this_xy
best_destination = destination_block
best_destination_dockn = destination_dockn
best_selected_block_dockn = selected_block_dockn
if d < _SNAP_THRESHOLD:
# Some combinations of blocks are not valid
if not arithmetic_check(selected_block, best_destination,
best_selected_block_dockn,
best_destination_dockn):
return
if not journal_check(selected_block, best_destination,
best_selected_block_dockn,
best_destination_dockn):
return
# Move the selected blocks into the docked position
for blk in self.drag_group:
(sx, sy) = blk.spr.get_xy()
blk.spr.move((sx + best_xy[0], sy + best_xy[1]))
blk_in_dock = best_destination.connections[best_destination_dockn]
if self.inserting_block_mid_stack:
# If there was already a block docked there, move it
# to the bottom of the drag group.
if blk_in_dock is not None and blk_in_dock != selected_block:
bot = find_bot_block(self.drag_group[0])
if bot is not None:
blk_in_dock.connections[0] = None
drag_group = find_group(blk_in_dock)
blk_in_dock.connections[0] = bot
bot.connections[-1] = blk_in_dock
dx = bot.spr.get_xy()[0] - \
self.drag_group[0].spr.get_xy()[0] + \
bot.docks[-1][2] - blk_in_dock.docks[0][2]
dy = bot.spr.get_xy()[1] - \
self.drag_group[0].spr.get_xy()[1] + \
bot.docks[-1][3] - blk_in_dock.docks[0][3]
# Move each sprite in the group associated
# with the block we are moving.
for gblk in drag_group:
gblk.spr.move_relative((dx, dy))
else:
# If there was already a block docked there, move it
# to the trash.
if blk_in_dock is not None and blk_in_dock != selected_block:
blk_in_dock.connections[0] = None
self._put_in_trash(blk_in_dock)
# Note the connection in destination dock
best_destination.connections[best_destination_dockn] = \
selected_block
# And in the selected block dock
if selected_block.connections is not None:
if best_selected_block_dockn < len(selected_block.connections):
selected_block.connections[best_selected_block_dockn] = \
best_destination
# Are we renaming an action or variable?
if best_destination.name in ['hat', 'storein'] and \
selected_block.name == 'string' and \
best_destination_dockn == 1:
name = selected_block.values[0]
if best_destination.name == 'storein':
if not self._find_proto_name('storein_%s' % (name), name):
self._new_storein_block(name)
if not self._find_proto_name('box_%s' % (name), name):
self._new_box_block(name)
else: # 'hat'
# Check to see if it is unique...
unique = True
similars = self.block_list.get_similar_blocks(
'block', 'hat')
for blk in similars:
if blk == best_destination:
continue
if blk.connections is not None and \
blk.connections[1] is not None and \
blk.connections[1].name == 'string':
if blk.connections[1].values[0] == name:
unique = False
if not unique:
while self._find_proto_name('stack_%s' % (name), name):
name = increment_name(name)
blk.connections[1].values[0] = name
blk.connections[1].spr.labels[0] = name
blk.resize()
self._new_stack_block(name)
# Some destination blocks expand to accomodate large blocks
if best_destination.name in block_styles['boolean-style']:
if best_destination_dockn == 2 and \
(selected_block.name in
block_styles['boolean-style'] or
selected_block.name in
block_styles['compare-style'] or
selected_block.name in
block_styles['compare-porch-style']
):
dy = selected_block.ey - best_destination.ey
if selected_block.name in block_styles['boolean-style']:
# Even without expanding, boolean blocks are
# too large to fit in the lower dock position
dy += 45
best_destination.expand_in_y(dy)
self._expand_boolean(best_destination, selected_block, dy)
elif best_destination.name in EXPANDABLE_FLOW:
if best_destination.name in \
block_styles['clamp-style-1arg'] or \
best_destination.name in \
block_styles['clamp-style-boolean']:
if best_destination_dockn == 2:
self._resize_clamp(best_destination,
self.drag_group[0])
elif best_destination.name in block_styles['clamp-style'] or \
best_destination.name in \
block_styles['clamp-style-collapsible']:
if best_destination_dockn == 1:
self._resize_clamp(best_destination,
self.drag_group[0])
elif best_destination.name in block_styles['clamp-style-else']:
if best_destination_dockn == 2:
self._resize_clamp(
best_destination, self.drag_group[0], dockn=2)
elif best_destination_dockn == 3:
self._resize_clamp(
best_destination, self.drag_group[0], dockn=3)
elif best_destination.name in expandable_blocks and \
best_destination_dockn == 1:
dy = 0
if (selected_block.name in expandable_blocks or
selected_block.name in block_styles[
'number-style-var-arg']):
if selected_block.name == 'myfunc2arg':
dy = 40 + selected_block.ey - best_destination.ey
elif selected_block.name == 'myfunc3arg':
dy = 60 + selected_block.ey - best_destination.ey
else:
dy = 20 + selected_block.ey - best_destination.ey
best_destination.expand_in_y(dy)
else:
if best_destination.ey > 0:
dy = best_destination.reset_y()
if dy != 0:
self._expand_expandable(
best_destination, selected_block, dy)
self._cascade_expandable(best_destination)
# If we are in an expandable flow, expand it...
if best_destination is not None:
self._resize_parent_clamps(best_destination)
# Check for while nesting
if best_destination is not None:
while_blk = self._while_in_drag_group(self.drag_group[0])
if while_blk is not None:
self._check_while_nesting(best_destination,
self.drag_group[0], while_blk)
def _while_in_drag_group(self, blk):
''' Is there a contained while or until block? '''
if blk.name in ['while', 'until']:
return blk
return find_blk_below(blk, ['while', 'until'])
def _check_while_nesting(self, blk, dock_blk, while_blk):
''' Is there a containing while or until block? If so, swap them '''
if blk.name in ['while', 'until']:
if blk.connections[2] == dock_blk:
self._swap_while_blocks(blk, while_blk)
while blk.connections[-1] is not None:
blk = blk.connections[-1]
if blk.name in ['while', 'until']:
if blk.connections[2] == dock_blk:
self._swap_while_blocks(blk, while_blk)
dock_blk = blk
def _swap_while_blocks(self, blk1, blk2):
''' Swap postion in block list of nested while blocks '''
# Check to see if blk1 comes before blk2 in the block list.
# If so, swap them.
i1 = self.just_blocks().index(blk1)
i2 = self.just_blocks().index(blk2)
if i1 < i2:
self.block_list.swap(blk1, blk2)
def _disconnect(self, blk):
''' Disconnect block from stack above it. '''
if blk is None:
return
if blk.connections is None:
return
if blk.connections[0] is None:
return
c = None
blk2 = blk.connections[0]
if blk in blk2.connections:
c = blk2.connections.index(blk)
blk2.connections[c] = None
blk3, dockn = self._expandable_flow_above(blk)
if blk2.name in block_styles['boolean-style']:
if c == 2 and blk2.ey > 0:
dy = -blk2.ey
blk2.expand_in_y(dy)
self._expand_boolean(blk2, blk, dy)
elif blk2.name in expandable_blocks and c == 1:
if blk2.ey > 0:
dy = blk2.reset_y()
if dy != 0:
self._expand_expandable(blk2, blk, dy)
self._cascade_expandable(blk2)
elif c is not None and blk2.name in EXPANDABLE_FLOW:
if blk2.name in block_styles['clamp-style-1arg'] or\
blk2.name in block_styles['clamp-style-boolean']:
if c == 2:
self._resize_clamp(blk2, None, c)
elif blk2.name in block_styles['clamp-style'] or \
blk2.name in block_styles['clamp-style-collapsible']:
if c == 1:
self._resize_clamp(blk2, None)
elif blk2.name in block_styles['clamp-style-else']:
if c == 2 or c == 3:
self._resize_clamp(blk2, None, dockn=c)
while blk3 is not None and blk3.connections[dockn] is not None:
self._resize_clamp(blk3, blk3.connections[dockn], dockn=dockn)
blk3, dockn = self._expandable_flow_above(blk3)
blk.connections[0] = None
def _resize_clamp(self, blk, gblk, dockn=-2):
''' If the content of a clamp changes, resize it '''
if not self.interactive_mode:
return
if dockn < 0:
dockn = len(blk.docks) + dockn
y1 = blk.docks[-1][3]
if blk.name in block_styles['clamp-style-else'] and dockn == 3:
blk.reset_y2()
else:
blk.reset_y()
dy = 0
# Calculate height of drag group
while gblk is not None:
delta = int((gblk.docks[-1][3] - gblk.docks[0][3]) / gblk.scale)
if delta == 0:
dy += 21 # Fixme: don't hardcode size of stop action block
else:
dy += delta
gblk = gblk.connections[-1]
# Clamp has room for one "standard" block by default
if dy > 0:
dy -= 21 # Fixme: don't hardcode
if blk.name in block_styles['clamp-style-else'] and dockn == 3:
blk.expand_in_y2(dy)
else:
blk.expand_in_y(dy)
y2 = blk.docks[-1][3]
gblk = blk.connections[-1]
# Move group below clamp up or down
if blk.connections[-1] is not None:
drag_group = find_group(blk.connections[-1])
for gblk in drag_group:
gblk.spr.move_relative((0, y2-y1))
# We may have to move the else clamp group down too.
if blk.name in block_styles['clamp-style-else'] and dockn == 2:
if blk.connections[3] is not None:
drag_group = find_group(blk.connections[3])
for gblk in drag_group:
gblk.spr.move_relative((0, y2 - y1))
def _expandable_flow_above(self, blk):
''' Is there an expandable flow block above this one? '''
while blk.connections[0] is not None:
if blk.connections[0].name in EXPANDABLE_FLOW:
if blk.connections[0].name == 'ifelse':
if blk.connections[0].connections[2] == blk:
return blk.connections[0], 2
elif blk.connections[0].connections[3] == blk:
return blk.connections[0], 3
else:
if blk.connections[0].connections[-2] == blk:
return blk.connections[0], -2
blk = blk.connections[0]
return None, None
def _import_from_journal(self, blk):
''' Import a file from the Sugar Journal '''
# TODO: check blk name to set filter
if self.running_sugar:
chooser_dialog(self.parent, '', self._update_media_blk)
else:
fname, self.load_save_folder = get_load_name('.*',
self.load_save_folder)
if fname is None:
return
self._update_media_icon(blk, fname)
def _load_description_block(self, blk):
''' Look for a corresponding description block '''
if blk is None or blk.name != 'journal' or len(blk.values) == 0 or \
blk.connections[0] is None:
return
_blk = blk.connections[0]
dblk = find_blk_below(_blk, 'description')
# Autoupdate the block if it is empty
if dblk is not None and \
(len(dblk.values) == 0 or dblk.values[0] is None):
self._update_media_icon(dblk, None, blk.values[0])
def _update_media_blk(self, dsobject):
''' Called from the chooser to load a media block '''
if dsobject is not None:
self._update_media_icon(self.selected_blk, dsobject,
dsobject.object_id)
dsobject.destroy()
def _update_media_icon(self, blk, name, value=''):
''' Update the icon on a 'loaded' media block. '''
if blk.name == 'journal':
self._load_image_thumb(name, blk)
elif blk.name == 'audio':
self._block_skin('audioon', blk)
elif blk.name == 'video':
self._block_skin('videoon', blk)
else:
self._block_skin('descriptionon', blk)
if value == '':
value = name
if len(blk.values) > 0:
blk.values[0] = value
else:
blk.values.append(value)
blk.spr.set_label(' ')
def _load_image_thumb(self, picture, blk):
''' Replace icon with a preview image. '''
pixbuf = None
self._block_skin('descriptionon', blk)
if self.running_sugar:
w, h = calc_image_size(blk.spr)
pixbuf = get_pixbuf_from_journal(picture, w, h)
else:
if movie_media_type(picture):
self._block_skin('videoon', blk)
blk.name = 'video'
elif audio_media_type(picture):
self._block_skin('audioon', blk)
blk.name = 'audio'
elif image_media_type(picture):
w, h = calc_image_size(blk.spr)
pixbuf = gtk.gdk.pixbuf_new_from_file_at_size(picture, w, h)
else:
blk.name = 'description'
if pixbuf is not None:
x, y = self._calc_image_offset('', blk.spr)
blk.set_image(pixbuf, x, y)
self._resize_skin(blk)
def _keypress_cb(self, area, event):
''' Keyboard '''
keyname = gtk.gdk.keyval_name(event.keyval)
keyunicode = gtk.gdk.keyval_to_unicode(event.keyval)
if event.get_state() & gtk.gdk.MOD1_MASK:
alt_mask = True
else:
alt_mask = False
self._key_press(alt_mask, keyname, keyunicode)
return keyname
def _key_press(self, alt_mask, keyname, keyunicode):
if keyname is None:
return False
self.keypress = keyname
if alt_mask:
if keyname == "p":
self.hideshow_button()
elif keyname == 'q':
self.quit_plugins()
if self.gst_available:
stop_media(self.lc)
exit()
elif keyname == 'g':
self._align_to_grid()
elif self.selected_blk is not None and \
self.selected_blk.name != 'proto':
self._process_keyboard_commands(keyname, block_flag=True)
elif self.turtles.spr_to_turtle(self.selected_spr) is not None:
self._process_keyboard_commands(keyname, block_flag=False)
return True
def _process_keyboard_commands(self, keyname, block_flag=True):
''' Use the keyboard to move blocks and turtle '''
mov_dict = {'KP_Up': [0, 20], 'j': [0, 20], 'Up': [0, 20],
'KP_Down': [0, -20], 'k': [0, -20], 'Down': [0, -20],
'KP_Left': [-20, 0], 'h': [-20, 0], 'Left': [-20, 0],
'KP_Right': [20, 0], 'l': [20, 0], 'Right': [20, 0],
'KP_Page_Down': [-1, -1], 'Page_Down': [-1, -1],
'KP_Page_Up': [-1, -1], 'Page_Up': [-1, -1],
'KP_End': [0, 0], 'End': [0, 0],
'KP_Home': [0, 0], 'Home': [0, 0], 'space': [0, 0],
'Return': [-1, -1], 'Esc': [-1, -1]}
if keyname not in mov_dict:
return True
if keyname in ['KP_End', 'End']:
self.run_button(self.step_time)
elif self.selected_spr is not None:
if not self.lc.running and block_flag:
blk = self.block_list.spr_to_block(self.selected_spr)
if keyname in ['Return', 'KP_Page_Up', 'Page_Up', 'Esc']:
(x, y) = blk.spr.get_xy()
self._click_block(x, y)
elif keyname in ['KP_Page_Down', 'Page_Down']:
if self.drag_group is None:
self.drag_group = find_group(blk)
self._put_in_trash(blk)
self.drag_group = None
elif keyname in ['KP_Home', 'Home', 'space']:
block = self.block_list.spr_to_block(self.selected_spr)
if block is None:
return True
block.unhighlight()
block = self.block_list.get_next_block_of_same_type(
block)
if block is not None:
self.selected_spr = block.spr
block.highlight()
else:
self._jog_block(blk, mov_dict[keyname][0],
mov_dict[keyname][1])
elif not block_flag:
self._jog_turtle(mov_dict[keyname][0], mov_dict[keyname][1])
# Always exit fullscreen mode if applicable
if self.running_sugar and self.activity.is_fullscreen:
self.activity.unfullscreen()
return True
def _jog_turtle(self, dx, dy):
''' Jog turtle '''
if dx == -1 and dy == -1:
self.canvas.xcor = 0
self.canvas.ycor = 0
else:
self.canvas.xcor += dx
self.canvas.ycor += dy
self.active_turtle = self.turtles.spr_to_turtle(self.selected_spr)
self.canvas.move_turtle()
self.display_coordinates()
self.selected_turtle = None
def _align_to_grid(self, grid=20):
''' Align blocks at the top of stacks to a grid '''
for blk in self.block_list.list:
if blk.type == 'block':
top = find_top_block(blk)
if top == blk:
x = top.spr.get_xy()[0]
y = top.spr.get_xy()[1]
if x < 0:
dx = -x % grid
else:
dx = -(x % grid)
if y < 0:
dy = -y % grid
else:
dy = -(y % grid)
self._jog_block(top, dx, -dy)
def _jog_block(self, blk, dx, dy):
''' Jog block '''
if blk.type == 'proto':
return
if dx == 0 and dy == 0:
return
self._disconnect(blk)
self.drag_group = find_group(blk)
for blk in self.drag_group:
(sx, sy) = blk.spr.get_xy()
if sx + dx < 0:
dx += -(sx + dx)
if sy + dy < 0:
dy += -(sy + dy)
for blk in self.drag_group:
(sx, sy) = blk.spr.get_xy()
blk.spr.move((sx + dx, sy - dy))
self._snap_to_dock()
self.drag_group = None
def _test_number(self):
''' Make sure a 'number' block contains a number. '''
if hasattr(self, '_text_entry'):
bounds = self._text_buffer.get_bounds()
text = self._text_buffer.get_text(bounds[0], bounds[1])
if self._focus_out_id is not None:
self._text_entry.disconnect(self._focus_out_id)
self._focus_out_id = None
if self._insert_text_id is not None:
self._text_buffer.disconnect(self._insert_text_id)
self._insert_text_id = None
self._text_entry.hide()
else:
text = self.selected_blk.spr.labels[0]
self._number_check(text)
def _number_check(self, text):
text = text.strip() # Ignore any whitespace
if text == '':
text = '0'
if text in ['-', '.', '-.', ',', '-,']:
num = 0
elif text is not None:
try:
num = float(text.replace(self.decimal_point, '.'))
if num > 1000000:
num = 1
self.showlabel("#overflowerror")
elif num < -1000000:
num = -1
self.showlabel("#overflowerror")
if int(num) == num:
num = int(num)
except ValueError:
num = 0
self.showlabel("#notanumber")
else:
num = 0
self.selected_blk.spr.set_label(str(num))
try:
self.selected_blk.values[0] = \
float(str(num).replace(self.decimal_point, '.'))
except ValueError:
self.selected_blk.values[0] = float(str(num))
except IndexError:
self.selected_blk.values[0] = float(str(num))
def _text_focus_out_cb(self, widget=None, event=None):
self._text_to_check = True
self._unselect_block()
def _insert_text_cb(self, textbuffer, textiter, text, length):
self._text_to_check = True
if '\12' in text:
self._unselect_block()
def _test_string(self):
if hasattr(self, '_text_entry'):
if self._focus_out_id is not None:
self._text_entry.disconnect(self._focus_out_id)
self._focus_out_id = None
bounds = self._text_buffer.get_bounds()
text = self._text_buffer.get_text(bounds[0], bounds[1])
self._text_entry.hide()
else:
text = self.selected_blk.spr.labels[0]
self.selected_blk.spr.set_label(text.replace('\12', RETURN))
self.selected_blk.resize()
self.selected_blk.values[0] = text.replace(RETURN, '\12')
self._saved_string = self.selected_blk.values[0]
def load_python_code_from_file(self, fname=None, add_new_block=True):
''' Load Python code from a file '''
id = None
self.python_code = None
if fname is None:
fname, self.py_load_save_folder = get_load_name(
'.py',
self.py_load_save_folder)
if fname is None:
return id
try:
f = open(fname, 'r')
self.python_code = f.read()
f.close()
id = fname
except IOError:
error_output("Unable to read Python code from %s" % (fname),
self.running_sugar)
return id
# if we are running Sugar, copy the file into the Journal
if self.running_sugar:
if fname in self._py_cache:
id = self._py_cache[fname]
else:
from sugar.datastore import datastore
from sugar import profile
dsobject = datastore.create()
dsobject.metadata['title'] = os.path.basename(fname)
dsobject.metadata['icon-color'] = \
profile.get_color().to_string()
dsobject.metadata['mime_type'] = 'text/x-python'
dsobject.metadata['activity'] = 'org.laptop.Pippy'
dsobject.set_file_path(fname)
try:
datastore.write(dsobject)
id = dsobject.object_id
debug_output("Copied %s to the datastore" % (fname),
self.running_sugar)
# Don't copy the same file more than once
self._py_cache[fname] = id
except IOError:
error_output("Error copying %s to the datastore" % (fname),
self.running_sugar)
id = None
dsobject.destroy()
if add_new_block:
# add a new block for this code at turtle position
(tx, ty) = self.active_turtle.get_xy()
self._new_block('userdefined', tx, ty)
self.myblock[self.block_list.list.index(self.drag_group[0])] =\
self.python_code
self.set_userdefined(self.drag_group[0])
self.drag_group[0].values.append(id)
self.drag_group = None
# Save object ID in block value
if self.selected_blk is not None:
if len(self.selected_blk.values) == 0:
self.selected_blk.values.append(id)
else:
self.selected_blk.values[0] = id
else:
if len(self.selected_blk.values) == 0:
self.selected_blk.values.append(fname)
else:
self.selected_blk.values[0] = fname
return id
def load_python_code_from_journal(self, dsobject, blk=None):
''' Read the Python code from the Journal object '''
self.python_code = None
if dsobject is None:
return
try:
file_handle = open(dsobject.file_path, "r")
self.python_code = file_handle.read()
file_handle.close()
except IOError:
debug_output("couldn't open %s" % dsobject.file_path,
self.running_sugar)
# Save the object id as the block value
if blk is None:
blk = self.selected_blk
if blk is not None:
if len(blk.values) == 0:
blk.values.append(dsobject.object_id)
else:
blk.values[0] = dsobject.object_id
def _import_py(self):
''' Import Python code into a block '''
if self.running_sugar:
chooser_dialog(self.parent, 'org.laptop.Pippy',
self.load_python_code_from_journal)
dsobject.destroy()
else:
self.load_python_code_from_file(fname=None, add_new_block=False)
if self.selected_blk is not None:
self.myblock[self.block_list.list.index(self.selected_blk)] = \
self.python_code
self.set_userdefined(self.selected_blk)
def new_project(self):
''' Start a new project '''
self.lc.stop_logo()
self._loaded_project = ""
# Put current project in the trash.
while len(self.just_blocks()) > 0:
blk = self.just_blocks()[0]
top = find_top_block(blk)
self._put_in_trash(top)
self.canvas.clearscreen()
self.save_file_name = None
def is_new_project(self):
''' Is this a new project or was a old project loaded from a file? '''
return self._loaded_project == ""
def project_has_changed(self):
''' WARNING: order of JSON serialized data may have changed. '''
try:
f = open(self._loaded_project, 'r')
saved_project_data = f.read()
f.close()
except:
debug_output("problem loading saved project data from %s" %
(self._loaded_project), self.running_sugar)
saved_project_data = ""
current_project_data = data_to_string(self.assemble_data_to_save())
return saved_project_data != current_project_data
def load_files(self, ta_file, create_new_project=True):
''' Load a project from a file '''
if create_new_project:
self.new_project()
self.process_data(data_from_file(ta_file))
self._loaded_project = ta_file
# Always start on the Turtle palette
self.show_toolbar_palette(palette_name_to_index('turtle'))
def load_file_from_chooser(self, create_new_project=True):
''' Load a project from file chooser '''
file_name, self.load_save_folder = get_load_name(
'.t[a-b]',
self.load_save_folder)
if file_name is None:
return
if not file_name.endswith(SUFFIX):
file_name = file_name + SUFFIX[1]
self.load_files(file_name, create_new_project)
if create_new_project:
self.save_file_name = os.path.basename(file_name)
if self.running_sugar:
self.activity.metadata['title'] = os.path.split(file_name)[1]
def _found_a_turtle(self, blk):
''' Either [-1, 'turtle', ...] or [-1, ['turtle', key], ...] '''
if blk[1] == 'turtle':
self.load_turtle(blk)
return True
elif isinstance(blk[1], (list, tuple)) and blk[1][0] == 'turtle':
if blk[1][1] == DEFAULT_TURTLE:
if self.nick is not None and self.nick is not '':
self.load_turtle(blk, self.nick)
else:
self.load_turtle(blk, blk[1][1])
return True
return False
def load_turtle(self, blk, key=1):
''' Restore a turtle from its saved state '''
tid, name, xcor, ycor, heading, color, shade, pensize = blk
self.canvas.set_turtle(key)
self.canvas.setxy(xcor, ycor, pendown=False)
self.canvas.seth(heading)
self.canvas.setcolor(color)
self.canvas.setshade(shade)
self.canvas.setpensize(pensize)
def load_block(self, b, offset=0):
''' Restore individual blocks from saved state '''
if self.running_sugar:
from sugar.datastore import datastore
if b[1] == 0:
return None
# A block is saved as: (i, (btype, value), x, y, (c0,... cn))
# The x, y position is saved/loaded for backward compatibility
btype, value = b[1], None
if isinstance(btype, tuple):
btype, value = btype
elif isinstance(btype, list):
btype, value = btype[0], btype[1]
# Replace deprecated sandwich blocks
if btype == 'sandwichtop_no_label':
btype = 'sandwichclamp'
docks = []
for d in b[4]:
docks.append(d)
docks.append(None)
b[4] = docks
elif btype == 'sandwichtop_no_arm_no_label':
btype = 'sandwichclampcollapsed'
docks = []
for d in b[4]:
docks.append(d)
docks.append(None)
b[4] = docks
# FIXME: blocks after sandwich bottom must be attached to
# sandwich top dock[2], currently set to None
elif btype in ['sandwichbottom', 'sandwichcollapsed']:
btype = 'vspace'
# FIXME: blocks after sandwichtop should be in a sandwich clamp
elif btype in ['sandwichtop', 'sandwichtop_no_arm']:
btype = 'comment'
# Some blocks can only appear once...
if btype in ['start', 'hat1', 'hat2']:
if self._check_for_duplicate(btype):
name = block_names[btype][0]
while self._find_proto_name('stack_%s' % (name), name):
name = increment_name(name)
i = len(self._process_block_data) + len(self._extra_block_data)
self._extra_block_data.append(
[i, ['string', name], 0, 0, [b[0], None]])
# To do: check for a duplicate name
self._new_stack_block(name)
btype = 'hat'
self._process_block_data[b[0]] = [
b[0], b[1], b[2], b[3], [b[4][0], i, b[4][1]]]
elif btype == 'hat':
if b[4][1] < len(self._process_block_data):
i = b[4][1]
name = self._process_block_data[i][1][1]
else:
i = b[4][1] - len(self._process_block_data)
name = self._extra_block_data[i][1][1]
while self._find_proto_name('stack_%s' % (name), name):
name = increment_name(name)
if b[4][1] < len(self._process_block_data):
dblk = self._process_block_data[i]
self._process_block_data[i] = [dblk[0], (dblk[1][0], name),
dblk[2], dblk[3], dblk[4]]
else:
dblk = self._extra_block_data[i]
self._extra_block_data[i] = [dblk[0], (dblk[1][0], name),
dblk[2], dblk[3], dblk[4]]
self._new_stack_block(name)
elif btype == 'storein':
if b[4][1] < len(self._process_block_data):
i = b[4][1]
name = self._process_block_data[i][1][1]
else:
i = b[4][1] - len(self._process_block_data)
name = self._extra_block_data[i][1][1]
if not self._find_proto_name('storein_%s' % (name), name):
self._new_storein_block(name)
if not self._find_proto_name('box_%s' % (name), name):
self._new_box_block(name)
if btype in content_blocks:
if btype == 'number':
try:
values = [round_int(value)]
except ValueError:
values = [0]
else:
values = [value]
else:
values = []
if btype in OLD_DOCK:
check_dock = True
else:
check_dock = False
if btype in OLD_NAMES:
btype = OLD_NAMES[btype]
blk = Block(self.block_list, self.sprite_list, btype,
b[2] + self.canvas.cx + offset,
b[3] + self.canvas.cy + offset,
'block', values, self.block_scale)
# If it was an unknown block type, we need to match the number
# of dock items. TODO: Try to infer the dock type from connections
if blk.unknown and len(b[4]) > len(blk.docks):
debug_output('%s: dock mismatch %d > %d' %
(btype, len(b[4]), len(blk.docks)),
self.running_sugar)
for i in range(len(b[4]) - len(blk.docks)):
blk.docks.append(['unavailable', True, 0, 0])
# Some blocks get transformed.
if btype in block_styles['basic-style-var-arg'] and value is not None:
# Is there code stored in this userdefined block?
if value > 0: # catch deprecated format (#2501)
self.python_code = None
if self.running_sugar:
# For security reasons, only open files found in
# Python samples directory
if os.path.exists(os.path.join(self.path, value)) and \
value[0:9] == 'pysamples':
self.selected_blk = blk
self.load_python_code_from_file(
fname=os.path.join(self.path, value),
add_new_block=False)
self.selected_blk = None
else: # or files from the Journal
try:
dsobject = datastore.get(value)
except: # Should be IOError, but dbus error is raised
dsobject = None
debug_output("couldn't get dsobject %s" % (value),
self.running_sugar)
if dsobject is not None:
self.load_python_code_from_journal(dsobject, blk)
else:
self.selected_blk = blk
self.load_python_code_from_file(fname=value,
add_new_block=False)
self.selected_blk = None
if self.python_code is not None:
self.myblock[self.block_list.list.index(blk)] = \
self.python_code
self.set_userdefined(blk)
if btype == 'string' and blk.spr is not None:
value = blk.values[0]
if isinstance(value, unicode):
value = value.encode('ascii', 'replace')
blk.spr.set_label(value.replace('\n', RETURN))
elif btype == 'start': # block size is saved in start block
if value is not None:
self.block_scale = value
elif btype in block_styles['box-style-media'] and blk.spr is not None:
if btype in EXPAND_SKIN:
if blk.ex == 0:
blk.expand_in_x(EXPAND_SKIN[btype][0])
if blk.ey == 0:
blk.expand_in_y(EXPAND_SKIN[btype][1])
if len(blk.values) == 0 or blk.values[0] == 'None' or \
blk.values[0] is None or btype in NO_IMPORT:
self._block_skin(btype + 'off', blk)
elif btype in ['video', 'audio', 'description']:
self._block_skin(btype + 'on', blk)
elif self.running_sugar:
try:
dsobject = datastore.get(blk.values[0])
if not movie_media_type(dsobject.file_path[-4:]):
w, h, = calc_image_size(blk.spr)
pixbuf = get_pixbuf_from_journal(dsobject, w, h)
if pixbuf is not None:
x, y = self._calc_image_offset('', blk.spr)
blk.set_image(pixbuf, x, y)
else:
self._block_skin('journalon', blk)
dsobject.destroy()
except:
try:
w, h, = calc_image_size(blk.spr)
pixbuf = gtk.gdk.pixbuf_new_from_file_at_size(
blk.values[0], w, h)
x, y = self._calc_image_offset('', blk.spr)
blk.set_image(pixbuf, x, y)
except:
debug_output("Couldn't open dsobject (%s)" %
(blk.values[0]), self.running_sugar)
self._block_skin('journaloff', blk)
else:
if not movie_media_type(blk.values[0][-4:]):
try:
w, h, = calc_image_size(blk.spr)
pixbuf = gtk.gdk.pixbuf_new_from_file_at_size(
blk.values[0], w, h)
x, y = self._calc_image_offset('', blk.spr)
blk.set_image(pixbuf, x, y)
except:
self._block_skin('journaloff', blk)
else:
self._block_skin('journalon', blk)
blk.spr.set_label(' ')
blk.resize()
elif btype in EXPANDABLE or \
btype in expandable_blocks or \
btype in EXPANDABLE_FLOW or \
btype in EXPANDABLE_ARGS or \
btype == 'nop':
if btype == 'vspace' or btype in expandable_blocks:
if value is not None:
blk.expand_in_y(value)
elif btype == 'hspace' or btype == 'identity2':
if value is not None:
blk.expand_in_x(value)
elif btype in EXPANDABLE_FLOW:
if value is not None:
if isinstance(value, int):
blk.expand_in_y(value)
else: # thenelse blocks
blk.expand_in_y(value[0])
blk.expand_in_y2(value[1])
elif btype == 'templatelist' or btype == 'list':
for i in range(len(b[4]) - 4):
blk.add_arg()
elif btype == 'myfunc2arg' or \
btype == 'myfunc3arg' or \
btype == 'userdefined2args' or \
btype == 'userdefined3args' or\
btype == 'loadblock2arg' or \
btype == 'loadblock3arg':
blk.add_arg()
if btype == 'myfunc3arg' or \
btype == 'userdefined3args' or \
btype == 'loadblock3arg':
blk.add_arg(False)
if btype in PYTHON_SKIN:
if self.nop == 'pythonloaded':
self._block_skin('pythonon', blk)
else:
self._block_skin('pythonoff', blk)
if self.interactive_mode:
blk.spr.set_layer(BLOCK_LAYER)
if check_dock:
blk.connections = 'check'
if self.running_sugar and len(blk.spr.labels) > 0 and \
blk.name not in ['', ' ', 'number', 'string']:
if len(self.used_block_list) > 0:
self.used_block_list.append(', ')
if blk.name in special_names:
self.used_block_list.append(special_names[blk.name])
elif blk.spr.labels[0] not in self.used_block_list:
self.used_block_list.append(blk.spr.labels[0])
return blk
def _check_for_duplicate(self, name):
''' Is there already a block of this name? '''
for blk in self.just_blocks():
if blk.name == name:
return True
return False
def load_start(self, ta_file=None):
''' Start a new project with a 'start' brick '''
if ta_file is None:
self.process_data([[0, "start", PALETTE_WIDTH + 20,
self.toolbar_offset + PALETTE_HEIGHT + 20,
[None, None]]])
else:
self.process_data(data_from_file(ta_file))
def save_file(self, file_name=None):
''' Start a project to a file '''
if self.save_folder is not None:
self.load_save_folder = self.save_folder
if file_name is None:
file_name, self.load_save_folder = get_save_name(
'.t[a-b]', self.load_save_folder, self.save_file_name)
if file_name is None:
return
if not file_name.endswith(SUFFIX):
file_name = file_name + SUFFIX[1]
data_to_file(self.assemble_data_to_save(), file_name)
self.save_file_name = os.path.basename(file_name)
if not self.running_sugar:
self.save_folder = self.load_save_folder
def assemble_data_to_save(self, save_turtle=True, save_project=True):
''' Pack the project (or stack) into a datastream to be serialized '''
data = []
blks = []
if save_project:
blks = self.just_blocks()
else:
if self.selected_blk is None:
return []
blks = find_group(find_top_block(self.selected_blk))
for i, blk in enumerate(blks):
blk.id = i
for blk in blks:
if blk.name in content_blocks:
if len(blk.values) > 0:
name = (blk.name, blk.values[0])
else:
name = (blk.name)
elif blk.name in block_styles['basic-style-var-arg'] and \
len(blk.values) > 0:
name = (blk.name, blk.values[0])
elif blk.name in EXPANDABLE or blk.name in expandable_blocks or \
blk.name in EXPANDABLE_ARGS or blk.name in EXPANDABLE_FLOW:
ex, ey, ey2 = blk.get_expand_x_y()
if blk.name in block_styles['clamp-style-else']:
name = (blk.name, (ey, ey2))
elif ex > 0:
name = (blk.name, ex)
elif ey > 0:
name = (blk.name, ey)
else:
name = (blk.name, 0)
elif blk.name == 'start': # save block_size in start block
name = (blk.name, self.block_scale)
else:
name = (blk.name)
if hasattr(blk, 'connections') and blk.connections is not None:
connections = [get_id(cblk) for cblk in blk.connections]
else:
connections = None
(sx, sy) = blk.spr.get_xy()
# Add a slight offset for copy/paste
if not save_project:
sx += 20
sy += 20
data.append((blk.id, name, sx - self.canvas.cx,
sy - self.canvas.cy, connections))
if save_turtle:
for turtle in iter(self.turtles.dict):
# Don't save remote turtles
if not self.remote_turtle(turtle):
# Save default turtle as 'Yertle'
if turtle == self.nick:
turtle = DEFAULT_TURTLE
data.append(
(-1,
['turtle', turtle],
self.canvas.xcor, self.canvas.ycor,
self.canvas.heading, self.canvas.color,
self.canvas.shade, self.canvas.pensize))
return data
def display_coordinates(self, clear=False):
''' Display the coordinates of the current turtle on the toolbar '''
if clear:
if self.running_sugar:
self.activity.coordinates_label.set_text('')
self.activity.coordinates_label.show()
elif self.interactive_mode:
self.parent.set_title('')
else:
x = round_int(float(self.canvas.xcor) / self.coord_scale)
y = round_int(float(self.canvas.ycor) / self.coord_scale)
h = round_int(self.canvas.heading)
if self.running_sugar:
if int(x) == x and int(y) == y and int(h) == h:
formatting = '(%d, %d) %d'
else:
formatting = '(%0.2f, %0.2f) %0.2f'
self.activity.coordinates_label.set_text(
formatting % (x, y, h))
self.activity.coordinates_label.show()
elif self.interactive_mode:
if int(x) == x and int(y) == y and int(h) == h:
formatting = '%s — %s: %d %s: %d %s: %d'
else:
formatting = '%s — %s: %0.2f %s: %0.2f %s: %0.2f'
self.parent.set_title(
formatting % (self.activity.name, _('xcor'), x,
_('ycor'), y, _('heading'), h))
self.update_counter = 0
def showlabel(self, shp, label=''):
''' Display a message on a status block '''
if not self.interactive_mode:
debug_output(label, self.running_sugar)
return
# Don't overwrite an error message
if not self._autohide_shape:
return
if shp in ['print', 'info', 'help']:
self._autohide_shape = True
else:
self._autohide_shape = False
if shp == 'syntaxerror' and str(label) != '':
if str(label)[1:] in self.status_shapes:
shp = str(label)[1:]
label = ''
else:
shp = 'status'
elif shp[0] == '#':
shp = shp[1:]
label = ''
self.status_spr.set_shape(self.status_shapes[shp])
self.status_spr.set_label_attributes(12.0, rescale=False)
if shp == 'status':
self.status_spr.set_label('"%s"' % (str(label)))
else:
self.status_spr.set_label(str(label))
self.status_spr.set_layer(STATUS_LAYER)
if shp == 'info':
self.status_spr.move((PALETTE_WIDTH, self.height - 400))
else:
# Adjust vertical position based on scrolled window adjustment
if self.running_sugar:
self.status_spr.move(
(0,
self.height - 200 +
self.activity.sw.get_vadjustment().get_value()))
elif self.interactive_mode:
self.status_spr.move((0, self.height - 100))
def calc_position(self, template):
''' Relative placement of portfolio objects (deprecated) '''
w, h, x, y, dx, dy = TEMPLATES[template]
x *= self.canvas.width
y *= self.canvas.height
w *= (self.canvas.width - x)
h *= (self.canvas.height - y)
dx *= w
dy *= h
return(w, h, x, y, dx, dy)
def save_for_upload(self, file_name):
''' Grab the current canvas and save it for upload '''
if not file_name.endswith(SUFFIX):
ta_file = file_name + SUFFIX[1]
image_file = file_name + '.png'
else:
ta_file = file_name
image_file = file_name[0:-3] + '.png'
data_to_file(self.assemble_data_to_save(), ta_file)
save_picture(self.canvas, image_file)
return ta_file, image_file
def save_as_image(self, name="", svg=False):
''' Grab the current canvas and save it. '''
if svg:
suffix = '.svg'
else:
suffix = '.png'
if not self.interactive_mode:
save_picture(self.canvas, name[:-3] + suffix)
return
if self.running_sugar:
if len(name) == 0:
filename = 'turtleblocks' + suffix
else:
filename = name + suffix
datapath = get_path(self.activity, 'instance')
elif len(name) == 0:
name = 'turtleblocks' + suffix
if self.save_folder is not None:
self.load_save_folder = self.save_folder
filename, self.load_save_folder = get_save_name(
suffix, self.load_save_folder, name)
datapath = self.load_save_folder
else:
datapath = os.getcwd()
filename = name + suffix
if filename is None:
return
file_path = os.path.join(datapath, filename)
if svg:
if self.canvas.cr_svg is None:
return
self.canvas.svg_reset()
else:
save_picture(self.canvas, file_path)
if self.running_sugar:
from sugar.datastore import datastore
from sugar import profile
dsobject = datastore.create()
if len(name) == 0:
dsobject.metadata['title'] = "%s %s" % \
(self.activity.metadata['title'], _("image"))
else:
dsobject.metadata['title'] = name
dsobject.metadata['icon-color'] = profile.get_color().to_string()
if svg:
dsobject.metadata['mime_type'] = 'image/svg+xml'
dsobject.set_file_path(os.path.join(datapath, 'output.svg'))
else:
dsobject.metadata['mime_type'] = 'image/png'
dsobject.set_file_path(file_path)
datastore.write(dsobject)
dsobject.destroy()
self.saved_pictures.append((dsobject.object_id, svg))
if svg:
os.remove(os.path.join(datapath, 'output.svg'))
else:
os.remove(file_path)
else:
if svg:
subprocess.check_output(
['mv', os.path.join(datapath, 'output.svg'),
os.path.join(datapath, filename)])
self.saved_pictures.append((file_path, svg))
def just_blocks(self):
''' Filter out 'proto', 'trash', and 'deleted' blocks '''
just_blocks_list = []
for blk in self.block_list.list:
if blk.type == 'block':
just_blocks_list.append(blk)
return just_blocks_list
def just_protos(self):
''' Filter out 'block', 'trash', and 'deleted' blocks '''
just_protos_list = []
for blk in self.block_list.list:
if blk.type == 'proto':
just_protos_list.append(blk)
return just_protos_list
def _width_and_height(self, blk):
''' What are the width and height of a stack? '''
minx = 10000
miny = 10000
maxx = -10000
maxy = -10000
for gblk in find_group(blk):
(x, y) = gblk.spr.get_xy()
w, h = gblk.spr.get_dimensions()
if x < minx:
minx = x
if y < miny:
miny = y
if x + w > maxx:
maxx = x + w
if y + h > maxy:
maxy = y + h
return(maxx - minx, maxy - miny)
# Utilities related to putting a image 'skin' on a block
def _calc_image_offset(self, name, spr, iw=0, ih=0):
''' Calculate the postion for placing an image onto a sprite. '''
_l, _t = spr.label_left_top()
if name == '':
return _l, _t
_w = spr.label_safe_width()
_h = spr.label_safe_height()
if iw == 0:
iw = self.media_shapes[name].get_width()
ih = self.media_shapes[name].get_height()
return int(_l + (_w - iw) / 2), int(_t + (_h - ih) / 2)
def _calc_w_h(self, name, spr):
''' Calculate new image size '''
target_w = spr.label_safe_width()
target_h = spr.label_safe_height()
if name == '':
return target_w, target_h
image_w = self.media_shapes[name].get_width()
image_h = self.media_shapes[name].get_height()
scale_factor = float(target_w) / image_w
new_w = target_w
new_h = image_h * scale_factor
if new_h > target_h:
scale_factor = float(target_h) / new_h
new_h = target_h
new_w = target_w * scale_factor
return int(new_w), int(new_h)
def _proto_skin(self, name, n, i):
''' Utility for calculating proto skin images '''
x, y = self._calc_image_offset(name, self.palettes[n][i].spr)
self.palettes[n][i].spr.set_image(self.media_shapes[name], 1, x, y)
def _block_skin(self, name, blk):
''' Some blocks get a skin '''
x, y = self._calc_image_offset(name, blk.spr)
blk.set_image(self.media_shapes[name], x, y)
self._resize_skin(blk)
def _resize_skin(self, blk):
''' Resize the 'skin' when block scale changes. '''
if blk.name in PYTHON_SKIN:
w, h = self._calc_w_h('pythonoff', blk.spr)
x, y = self._calc_image_offset('pythonoff', blk.spr, w, h)
elif blk.name == 'journal':
if len(blk.values) == 1 and blk.values[0] is not None:
w, h = self._calc_w_h('', blk.spr)
x, y = self._calc_image_offset('journaloff', blk.spr, w, h)
else:
w, h = self._calc_w_h('journaloff', blk.spr)
x, y = self._calc_image_offset('journaloff', blk.spr, w, h)
else:
# w, h = self._calc_w_h('descriptionoff', blk.spr)
w, h = self._calc_w_h('', blk.spr)
# x, y = self._calc_image_offset('descriptionoff', blk.spr, w, h)
x, y = self._calc_image_offset('', blk.spr, w, h)
blk.scale_image(x, y, w, h)
def _find_proto_name(self, name, label, palette='blocks'):
''' Look for a protoblock with this name '''
if isinstance(name, unicode):
name = name.encode('ascii', 'replace')
if isinstance(label, unicode):
label = label.encode('ascii', 'replace')
i = palette_name_to_index(palette)
for blk in self.palettes[i]:
blk_label = blk.spr.labels[0]
if isinstance(blk.name, unicode):
blk.name = blk.name.encode('ascii', 'replace')
if isinstance(blk_label, unicode):
blk_label = blk_label.encode('ascii', 'replace')
if blk.name == name and blk_label == label:
return True
# Check labels[1] too (e.g., store in block)
if len(blk.spr.labels) > 1:
blk_label = blk.spr.labels[1]
if blk.name == name and blk_label == label:
return True
return False
def _new_stack_block(self, name):
''' Add a stack block to the 'blocks' palette '''
if isinstance(name, (float, int)):
return
if isinstance(name, unicode):
name = name.encode('ascii', 'replace')
if name == _('action'):
return
# Choose a palette for the new block.
palette = make_palette('blocks')
# Create a new block prototype.
primitive_dictionary['stack'] = self._prim_stack
palette.add_block('stack_%s' % (name),
style='basic-style-1arg',
label=name,
string_or_number=True,
prim_name='stack',
logo_command='action',
default=name,
help_string=_('invokes named action stack'))
self.lc.def_prim('stack', 1, primitive_dictionary['stack'], True)
# Regenerate the palette, which will now include the new block.
self.show_toolbar_palette(palette_name_to_index('blocks'),
regenerate=True)
def _new_box_block(self, name):
''' Add a box block to the 'blocks' palette '''
if isinstance(name, (float, int)):
return
if isinstance(name, unicode):
name = name.encode('ascii', 'replace')
if name == _('my box'):
return
# Choose a palette for the new block.
palette = make_palette('blocks')
# Create a new block prototype.
primitive_dictionary['box'] = self._prim_box
palette.add_block('box_%s' % (name),
style='number-style-1strarg',
label=name,
string_or_number=True,
prim_name='box',
default=name,
logo_command='box',
help_string=_('named variable (numeric value)'))
self.lc.def_prim('box', 1,
lambda self, x: primitive_dictionary['box'](x))
# Regenerate the palette, which will now include the new block.
self.show_toolbar_palette(palette_name_to_index('blocks'),
regenerate=True)
def _new_storein_block(self, name):
''' Add a storin block to the 'blocks' palette '''
if isinstance(name, (float, int)):
return
if isinstance(name, unicode):
name = name.encode('ascii', 'replace')
if name == _('my box'):
return
# Choose a palette for the new block.
palette = make_palette('blocks')
# Create a new block prototype.
primitive_dictionary['setbox'] = self._prim_setbox
palette.add_block('storein_%s' % (name),
style='basic-style-2arg',
label=[_('store in'), name, _('value')],
string_or_number=True,
prim_name='storeinbox',
logo_command='storeinbox',
default=[name, 100],
help_string=_('stores numeric value in named \
variable'))
self.lc.def_prim(
'storeinbox',
2,
lambda self, x, y: primitive_dictionary['setbox']('box3', x, y))
# Regenerate the palette, which will now include the new block.
self.show_toolbar_palette(palette_name_to_index('blocks'),
regenerate=True)
def _prim_stack(self, x):
''' Process a named stack '''
if isinstance(convert(x, float, False), float):
if int(float(x)) == x:
x = int(x)
if 'stack3' + str(x) not in self.lc.stacks or \
self.lc.stacks['stack3' + str(x)] is None:
raise logoerror("#nostack")
self.lc.icall(self.lc.evline,
self.lc.stacks['stack3' + str(x)][:])
yield True
self.lc.procstop = False
self.lc.ireturn()
yield True
def _prim_box(self, x):
''' Retrieve value from named box '''
if isinstance(convert(x, float, False), float):
if int(float(x)) == x:
x = int(x)
try:
return self.lc.boxes['box3' + str(x)]
except KeyError:
raise logoerror("#emptybox")
def _prim_setbox(self, name, x, val):
""" Define value of named box """
if x is not None:
if isinstance(convert(x, float, False), float):
if int(float(x)) == x:
x = int(x)
self.lc.boxes[name + str(x)] = val
self.lc.update_label_value('box', val, label=x)
else:
self.lc.boxes[name] = val
self.lc.update_label_value(name, val)
def dock_dx_dy(self, block1, dock1n, block2, dock2n):
''' Find the distance between the dock points of two blocks. '''
# Cannot dock a block to itself
if block1 == block2:
return _NO_DOCK
dock1 = block1.docks[dock1n]
dock2 = block2.docks[dock2n]
# Dock types include flow, number, string, unavailable
# Dock directions: Flow: True -> in; False -> out
# Dock directions: Number: True -> out; False -> in
# Each dock point as an associated relative x, y position on its block
d1type, d1dir, d1x, d1y = dock1[0:4]
d2type, d2dir, d2x, d2y = dock2[0:4]
# Cannot connect an innie to an innie or an outie to an outie
if d1dir == d2dir:
return _NO_DOCK
# Flow blocks can be inserted into the middle of a stack
if d2type is 'flow' and dock2n is 0:
if block1.connections is not None and \
dock1n == len(block1.connections) - 1 and \
block1.connections[dock1n] is not None:
self.inserting_block_mid_stack = True
elif block1.connections is not None and \
block1.name in EXPANDABLE_FLOW and \
block1.connections[dock1n] is not None:
self.inserting_block_mid_stack = True
# Only number blocks can be docked when the dock is not empty
elif d2type is not 'number' or dock2n is not 0:
if block1.connections is not None and \
dock1n < len(block1.connections) and \
block1.connections[dock1n] is not None:
return _NO_DOCK
if block2.connections is not None and \
dock2n < len(block2.connections) and \
block2.connections[dock2n] is not None:
return _NO_DOCK
# Only some dock types are interchangeable
if d1type != d2type:
# Some blocks will take strings or numbers
if block1.name in string_or_number_args:
if d2type == 'number' or d2type == 'string':
pass
# Some blocks will take content blocks
elif block1.name in CONTENT_ARGS:
if d2type in content_blocks:
pass
else:
return _NO_DOCK
(b1x, b1y) = block1.spr.get_xy()
(b2x, b2y) = block2.spr.get_xy()
return ((b1x + d1x) - (b2x + d2x), (b1y + d1y) - (b2y + d2y))
|
{
"content_hash": "8443d86ce88a4cfb504e516314313382",
"timestamp": "",
"source": "github",
"line_count": 4596,
"max_line_length": 79,
"avg_line_length": 42.29852045256745,
"alnum_prop": 0.494495998024732,
"repo_name": "walterbender/turtleartmini",
"id": "5b0c3e076d1a8f66d2c9ea2e8bf6ef65ba102828",
"size": "195673",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "TurtleArt/tawindow.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1148643"
}
],
"symlink_target": ""
}
|
"""Config flow for NEW_NAME integration."""
from __future__ import annotations
import logging
from typing import Any
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
# TODO adjust the data schema to the data that you need
STEP_USER_DATA_SCHEMA = vol.Schema({"host": str, "username": str, "password": str})
class PlaceholderHub:
"""Placeholder class to make tests pass.
TODO Remove this placeholder class and replace with things from your PyPI package.
"""
def __init__(self, host: str) -> None:
"""Initialize."""
self.host = host
async def authenticate(self, username: str, password: str) -> bool:
"""Test if we can authenticate with the host."""
return True
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]:
"""Validate the user input allows us to connect.
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
"""
# TODO validate the data can be used to set up a connection.
# If your PyPI package is not built with async, pass your methods
# to the executor:
# await hass.async_add_executor_job(
# your_validate_func, data["username"], data["password"]
# )
hub = PlaceholderHub(data["host"])
if not await hub.authenticate(data["username"], data["password"]):
raise InvalidAuth
# If you cannot connect:
# throw CannotConnect
# If the authentication is wrong:
# InvalidAuth
# Return info that you want to store in the config entry.
return {"title": "Name of the device"}
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for NEW_NAME."""
VERSION = 1
# TODO pick one of the available connection classes in homeassistant/config_entries.py
CONNECTION_CLASS = config_entries.CONN_CLASS_UNKNOWN
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> dict[str, Any]:
"""Handle the initial step."""
if user_input is None:
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA
)
errors = {}
try:
info = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth."""
|
{
"content_hash": "1e4647d6a1a12e35e0a3753d20cc70e3",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 90,
"avg_line_length": 30.49514563106796,
"alnum_prop": 0.6545686087233366,
"repo_name": "adrienbrault/home-assistant",
"id": "eea7d73b54c1752541cbeef294517ce3ad24fb88",
"size": "3141",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "script/scaffold/templates/config_flow/integration/config_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "32021043"
},
{
"name": "Shell",
"bytes": "4900"
}
],
"symlink_target": ""
}
|
import os
import ssl
import time
import datetime
import ipaddress
import sys
from pyasn1.type import univ, constraint, char, namedtype, tag
from pyasn1.codec.der.decoder import decode
from pyasn1.error import PyAsn1Error
import OpenSSL
from mitmproxy.types import serializable
# Default expiry must not be too long: https://github.com/mitmproxy/mitmproxy/issues/815
DEFAULT_EXP = 94608000 # = 24 * 60 * 60 * 365 * 3
# Generated with "openssl dhparam". It's too slow to generate this on startup.
DEFAULT_DHPARAM = b"""
-----BEGIN DH PARAMETERS-----
MIICCAKCAgEAyT6LzpwVFS3gryIo29J5icvgxCnCebcdSe/NHMkD8dKJf8suFCg3
O2+dguLakSVif/t6dhImxInJk230HmfC8q93hdcg/j8rLGJYDKu3ik6H//BAHKIv
j5O9yjU3rXCfmVJQic2Nne39sg3CreAepEts2TvYHhVv3TEAzEqCtOuTjgDv0ntJ
Gwpj+BJBRQGG9NvprX1YGJ7WOFBP/hWU7d6tgvE6Xa7T/u9QIKpYHMIkcN/l3ZFB
chZEqVlyrcngtSXCROTPcDOQ6Q8QzhaBJS+Z6rcsd7X+haiQqvoFcmaJ08Ks6LQC
ZIL2EtYJw8V8z7C0igVEBIADZBI6OTbuuhDwRw//zU1uq52Oc48CIZlGxTYG/Evq
o9EWAXUYVzWkDSTeBH1r4z/qLPE2cnhtMxbFxuvK53jGB0emy2y1Ei6IhKshJ5qX
IB/aE7SSHyQ3MDHHkCmQJCsOd4Mo26YX61NZ+n501XjqpCBQ2+DfZCBh8Va2wDyv
A2Ryg9SUz8j0AXViRNMJgJrr446yro/FuJZwnQcO3WQnXeqSBnURqKjmqkeFP+d8
6mk2tqJaY507lRNqtGlLnj7f5RNoBFJDCLBNurVgfvq9TCVWKDIFD4vZRjCrnl6I
rD693XKIHUCWOjMh1if6omGXKHH40QuME2gNa50+YPn1iYDl88uDbbMCAQI=
-----END DH PARAMETERS-----
"""
def create_ca(o, cn, exp):
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
cert = OpenSSL.crypto.X509()
cert.set_serial_number(int(time.time() * 10000))
cert.set_version(2)
cert.get_subject().CN = cn
cert.get_subject().O = o
cert.gmtime_adj_notBefore(-3600 * 48)
cert.gmtime_adj_notAfter(exp)
cert.set_issuer(cert.get_subject())
cert.set_pubkey(key)
cert.add_extensions([
OpenSSL.crypto.X509Extension(
b"basicConstraints",
True,
b"CA:TRUE"
),
OpenSSL.crypto.X509Extension(
b"nsCertType",
False,
b"sslCA"
),
OpenSSL.crypto.X509Extension(
b"extendedKeyUsage",
False,
b"serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC"
),
OpenSSL.crypto.X509Extension(
b"keyUsage",
True,
b"keyCertSign, cRLSign"
),
OpenSSL.crypto.X509Extension(
b"subjectKeyIdentifier",
False,
b"hash",
subject=cert
),
])
cert.sign(key, "sha256")
return key, cert
def dummy_cert(privkey, cacert, commonname, sans):
"""
Generates a dummy certificate.
privkey: CA private key
cacert: CA certificate
commonname: Common name for the generated certificate.
sans: A list of Subject Alternate Names.
Returns cert if operation succeeded, None if not.
"""
ss = []
for i in sans:
try:
ipaddress.ip_address(i.decode("ascii"))
except ValueError:
ss.append(b"DNS: %s" % i)
else:
ss.append(b"IP: %s" % i)
ss = b", ".join(ss)
cert = OpenSSL.crypto.X509()
cert.gmtime_adj_notBefore(-3600 * 48)
cert.gmtime_adj_notAfter(DEFAULT_EXP)
cert.set_issuer(cacert.get_subject())
if commonname is not None and len(commonname) < 64:
cert.get_subject().CN = commonname
cert.set_serial_number(int(time.time() * 10000))
if ss:
cert.set_version(2)
cert.add_extensions(
[OpenSSL.crypto.X509Extension(b"subjectAltName", False, ss)])
cert.set_pubkey(cacert.get_pubkey())
cert.sign(privkey, "sha256")
return SSLCert(cert)
# DNTree did not pass TestCertStore.test_sans_change and is temporarily replaced by a simple dict.
#
# class _Node(UserDict.UserDict):
# def __init__(self):
# UserDict.UserDict.__init__(self)
# self.value = None
#
#
# class DNTree:
# """
# Domain store that knows about wildcards. DNS wildcards are very
# restricted - the only valid variety is an asterisk on the left-most
# domain component, i.e.:
#
# *.foo.com
# """
# def __init__(self):
# self.d = _Node()
#
# def add(self, dn, cert):
# parts = dn.split(".")
# parts.reverse()
# current = self.d
# for i in parts:
# current = current.setdefault(i, _Node())
# current.value = cert
#
# def get(self, dn):
# parts = dn.split(".")
# current = self.d
# for i in reversed(parts):
# if i in current:
# current = current[i]
# elif "*" in current:
# return current["*"].value
# else:
# return None
# return current.value
class CertStoreEntry:
def __init__(self, cert, privatekey, chain_file):
self.cert = cert
self.privatekey = privatekey
self.chain_file = chain_file
class CertStore:
"""
Implements an in-memory certificate store.
"""
STORE_CAP = 100
def __init__(
self,
default_privatekey,
default_ca,
default_chain_file,
dhparams):
self.default_privatekey = default_privatekey
self.default_ca = default_ca
self.default_chain_file = default_chain_file
self.dhparams = dhparams
self.certs = dict()
self.expire_queue = []
def expire(self, entry):
self.expire_queue.append(entry)
if len(self.expire_queue) > self.STORE_CAP:
d = self.expire_queue.pop(0)
for k, v in list(self.certs.items()):
if v == d:
del self.certs[k]
@staticmethod
def load_dhparam(path):
# mitmproxy<=0.10 doesn't generate a dhparam file.
# Create it now if neccessary.
if not os.path.exists(path):
with open(path, "wb") as f:
f.write(DEFAULT_DHPARAM)
bio = OpenSSL.SSL._lib.BIO_new_file(path.encode(sys.getfilesystemencoding()), b"r")
if bio != OpenSSL.SSL._ffi.NULL:
bio = OpenSSL.SSL._ffi.gc(bio, OpenSSL.SSL._lib.BIO_free)
dh = OpenSSL.SSL._lib.PEM_read_bio_DHparams(
bio,
OpenSSL.SSL._ffi.NULL,
OpenSSL.SSL._ffi.NULL,
OpenSSL.SSL._ffi.NULL)
dh = OpenSSL.SSL._ffi.gc(dh, OpenSSL.SSL._lib.DH_free)
return dh
@classmethod
def from_store(cls, path, basename):
ca_path = os.path.join(path, basename + "-ca.pem")
if not os.path.exists(ca_path):
key, ca = cls.create_store(path, basename)
else:
with open(ca_path, "rb") as f:
raw = f.read()
ca = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM,
raw)
key = OpenSSL.crypto.load_privatekey(
OpenSSL.crypto.FILETYPE_PEM,
raw)
dh_path = os.path.join(path, basename + "-dhparam.pem")
dh = cls.load_dhparam(dh_path)
return cls(key, ca, ca_path, dh)
@staticmethod
def create_store(path, basename, o=None, cn=None, expiry=DEFAULT_EXP):
if not os.path.exists(path):
os.makedirs(path)
o = o or basename
cn = cn or basename
key, ca = create_ca(o=o, cn=cn, exp=expiry)
# Dump the CA plus private key
with open(os.path.join(path, basename + "-ca.pem"), "wb") as f:
f.write(
OpenSSL.crypto.dump_privatekey(
OpenSSL.crypto.FILETYPE_PEM,
key))
f.write(
OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM,
ca))
# Dump the certificate in PEM format
with open(os.path.join(path, basename + "-ca-cert.pem"), "wb") as f:
f.write(
OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM,
ca))
# Create a .cer file with the same contents for Android
with open(os.path.join(path, basename + "-ca-cert.cer"), "wb") as f:
f.write(
OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM,
ca))
# Dump the certificate in PKCS12 format for Windows devices
with open(os.path.join(path, basename + "-ca-cert.p12"), "wb") as f:
p12 = OpenSSL.crypto.PKCS12()
p12.set_certificate(ca)
p12.set_privatekey(key)
f.write(p12.export())
with open(os.path.join(path, basename + "-dhparam.pem"), "wb") as f:
f.write(DEFAULT_DHPARAM)
return key, ca
def add_cert_file(self, spec, path):
with open(path, "rb") as f:
raw = f.read()
cert = SSLCert(
OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM,
raw))
try:
privatekey = OpenSSL.crypto.load_privatekey(
OpenSSL.crypto.FILETYPE_PEM,
raw)
except Exception:
privatekey = self.default_privatekey
self.add_cert(
CertStoreEntry(cert, privatekey, path),
spec
)
def add_cert(self, entry, *names):
"""
Adds a cert to the certstore. We register the CN in the cert plus
any SANs, and also the list of names provided as an argument.
"""
if entry.cert.cn:
self.certs[entry.cert.cn] = entry
for i in entry.cert.altnames:
self.certs[i] = entry
for i in names:
self.certs[i] = entry
@staticmethod
def asterisk_forms(dn):
if dn is None:
return []
parts = dn.split(b".")
parts.reverse()
curr_dn = b""
dn_forms = [b"*"]
for part in parts[:-1]:
curr_dn = b"." + part + curr_dn # .example.com
dn_forms.append(b"*" + curr_dn) # *.example.com
if parts[-1] != b"*":
dn_forms.append(parts[-1] + curr_dn)
return dn_forms
def get_cert(self, commonname, sans):
"""
Returns an (cert, privkey, cert_chain) tuple.
commonname: Common name for the generated certificate. Must be a
valid, plain-ASCII, IDNA-encoded domain name.
sans: A list of Subject Alternate Names.
"""
potential_keys = self.asterisk_forms(commonname)
for s in sans:
potential_keys.extend(self.asterisk_forms(s))
potential_keys.append((commonname, tuple(sans)))
name = next(
filter(lambda key: key in self.certs, potential_keys),
None
)
if name:
entry = self.certs[name]
else:
entry = CertStoreEntry(
cert=dummy_cert(
self.default_privatekey,
self.default_ca,
commonname,
sans),
privatekey=self.default_privatekey,
chain_file=self.default_chain_file)
self.certs[(commonname, tuple(sans))] = entry
self.expire(entry)
return entry.cert, entry.privatekey, entry.chain_file
class _GeneralName(univ.Choice):
# We are only interested in dNSNames. We use a default handler to ignore
# other types.
# TODO: We should also handle iPAddresses.
componentType = namedtype.NamedTypes(
namedtype.NamedType('dNSName', char.IA5String().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)
)
),
)
class _GeneralNames(univ.SequenceOf):
componentType = _GeneralName()
sizeSpec = univ.SequenceOf.sizeSpec + \
constraint.ValueSizeConstraint(1, 1024)
class SSLCert(serializable.Serializable):
def __init__(self, cert):
"""
Returns a (common name, [subject alternative names]) tuple.
"""
self.x509 = cert
def __eq__(self, other):
return self.digest("sha256") == other.digest("sha256")
def __ne__(self, other):
return not self.__eq__(other)
def get_state(self):
return self.to_pem()
def set_state(self, state):
self.x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, state)
@classmethod
def from_state(cls, state):
return cls.from_pem(state)
@classmethod
def from_pem(cls, txt):
x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, txt)
return cls(x509)
@classmethod
def from_der(cls, der):
pem = ssl.DER_cert_to_PEM_cert(der)
return cls.from_pem(pem)
def to_pem(self):
return OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM,
self.x509)
def digest(self, name):
return self.x509.digest(name)
@property
def issuer(self):
return self.x509.get_issuer().get_components()
@property
def notbefore(self):
t = self.x509.get_notBefore()
return datetime.datetime.strptime(t.decode("ascii"), "%Y%m%d%H%M%SZ")
@property
def notafter(self):
t = self.x509.get_notAfter()
return datetime.datetime.strptime(t.decode("ascii"), "%Y%m%d%H%M%SZ")
@property
def has_expired(self):
return self.x509.has_expired()
@property
def subject(self):
return self.x509.get_subject().get_components()
@property
def serial(self):
return self.x509.get_serial_number()
@property
def keyinfo(self):
pk = self.x509.get_pubkey()
types = {
OpenSSL.crypto.TYPE_RSA: "RSA",
OpenSSL.crypto.TYPE_DSA: "DSA",
}
return (
types.get(pk.type(), "UNKNOWN"),
pk.bits()
)
@property
def cn(self):
c = None
for i in self.subject:
if i[0] == b"CN":
c = i[1]
return c
@property
def altnames(self):
"""
Returns:
All DNS altnames.
"""
# tcp.TCPClient.convert_to_ssl assumes that this property only contains DNS altnames for hostname verification.
altnames = []
for i in range(self.x509.get_extension_count()):
ext = self.x509.get_extension(i)
if ext.get_short_name() == b"subjectAltName":
try:
dec = decode(ext.get_data(), asn1Spec=_GeneralNames())
except PyAsn1Error:
continue
for i in dec[0]:
altnames.append(i[0].asOctets())
return altnames
|
{
"content_hash": "6e55c01862ff3808598e5adec340ce34",
"timestamp": "",
"source": "github",
"line_count": 481,
"max_line_length": 119,
"avg_line_length": 30.83783783783784,
"alnum_prop": 0.5714959886739028,
"repo_name": "dwfreed/mitmproxy",
"id": "4b939c80593c4adce0b45a487760e6dc5edbea6e",
"size": "14833",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mitmproxy/certs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "208058"
},
{
"name": "HTML",
"bytes": "4270"
},
{
"name": "JavaScript",
"bytes": "2149949"
},
{
"name": "PowerShell",
"bytes": "494"
},
{
"name": "Python",
"bytes": "1378470"
},
{
"name": "Shell",
"bytes": "3660"
}
],
"symlink_target": ""
}
|
import socket
import sys
__all__ = ['Nest']
class Nest(object):
def __init__(self, **settings):
self._clients = {}
# Bind settings
host = settings.get('host', '0.0.0.0')
port = settings.get('port', 8000)
self.addr = (host, port)
self.sock = socket.socket()
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.bind((host, port))
# App to host
self.app = settings.get('app')
# Use asynio by default for Python 3.4 and above
self.async = settings.get('async', (sys.version_info[:2] >= (3, 4)))
# Do not use this in production!
self.auto_reload = settings.get('auto_reload', False)
self.reloader = None
def run(self):
if self.auto_reload:
from .reloader import Reloader
self.reloader = Reloader.activate(app=self)
if self.async:
from .handlers.async import AsyncHandler
self.handler = AsyncHandler(
nest=self, app=self.app, socket=self.sock)
else:
from .handlers.sync import SyncHandler
self.handler = SyncHandler(
nest=self, app=self.app, socket=self.sock)
self.handler.run()
def shutdown(self):
if self.reloader:
self.reloader.shutdown()
self.handler.shutdown()
self.handler.wait()
self.sock.close()
|
{
"content_hash": "516f1eecbe8bba0912c4c849d80740ef",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 76,
"avg_line_length": 27.28301886792453,
"alnum_prop": 0.5643153526970954,
"repo_name": "drongo-framework/drongo-nest",
"id": "139e8774bfeb05e837ef4f8e1198540cfb07b3ac",
"size": "1446",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nest/nest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "39651"
}
],
"symlink_target": ""
}
|
from os import walk, utime, makedirs
from os.path import (
getmtime, getctime, expanduser, join, abspath, splitext, isdir, exists
)
from re import compile as re_compile
from shutil import copyfile
DEFAULT_IPATH = '.' #: default input directory path.
DEFAULT_OPATH = 'organized' #: default output directory path.
DEFAULT_PREFIX = 'organized' #: default organized file prefix.
__all__ = ['mdateorganize']
def mdateorganize(
ipath=DEFAULT_IPATH, opath=DEFAULT_OPATH, prefix=DEFAULT_PREFIX,
regex=None, extensions=None, followlinks=False, keepname=False,
overwrite=False
):
"""Organize files from a directory and sub-directories by modification date
.
:param str ipath: input path. Default is DEFAULT_IPATH.
:param str opath: output path. Default is DEFAULT_OPATH.
:param str prefix: prefix of organized file names. Default is
DEFAULT_PREFIX.
:param str regex: file name regex to organize.
:param list extensions: file name extensions to organize.
:param bool followlinks: follow links while pathing sub-directories.
:param bool keepname: keep source file name in organized file.
:param bool overwrite: if True (False by default) overwrite existing files.
"""
path = abspath(expanduser(ipath)) # get input path
opath = abspath(expanduser(opath)) # get output path
try:
makedirs(opath)
except OSError:
if not isdir(opath):
raise
# compile regex
compiled_regex = None if regex is None else re_compile(regex)
for dirname, _, files in walk(path, followlinks=followlinks):
if dirname == opath:
continue
for name in files:
filepath = join(dirname, name) # get absolute file path
_, extension = splitext(filepath) # get file extension
if extension: # get extension
extension = extension[1:]
# check extension and regex
if extensions is None or extension in extensions:
if regex is None or compiled_regex.match(regex):
ctime = getctime(filepath)
mtime = getmtime(filepath) # get modified time
# new filename is prefix
ofilename = '{0}-{1}'.format(prefix, mtime).replace(
'.', '_'
)
if keepname: # add filename
ofilename = '{0}-{1}'.format(ofilename, name)
elif extension: # add extension
ofilename = '{0}.{1}'.format(ofilename, extension)
newfilepath = join(opath, ofilename) # get output filepath
if overwrite or not exists(newfilepath): # copy file
copyfile(filepath, newfilepath)
times = ctime, mtime # get output times
utime(newfilepath, times) # set times
|
{
"content_hash": "65185cbb0e577e0cbbce7a7e65e0cccb",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 79,
"avg_line_length": 34.51162790697674,
"alnum_prop": 0.6027628032345014,
"repo_name": "b3j0f/stap",
"id": "a217ee10ef880b841e3911b49a5f710fda7fc225",
"size": "4288",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "b3j0f/stap/organize.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16118"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import os
import sys
import time
import boto
from boto.s3.key import Key
from pymongo import MongoClient
from pymongo.cursor import Cursor
from pymongo.errors import AutoReconnect
# Monkey-patch PyMongo to avoid throwing AutoReconnect
# errors. We try to reconnect a couple times before giving up.
def reconnect(f):
# https://gist.github.com/aliang/1393029
def f_retry(*args, **kwargs):
N_RECONNECT_TRIALS = 3
for i in range(N_RECONNECT_TRIALS):
try:
return f(*args, **kwargs)
except AutoReconnect as e:
print('Fail to execute %s [%s] (attempt %d/%d)' % (
f.__name__, e, i, N_RECONNECT_TRIALS),
file=sys.stderr)
time.sleep(1)
raise RuntimeError('AutoReconnect failed. Fail to '
'execute %s [%s]' % (f.__name__, e))
return f_retry
Cursor._Cursor__send_message = reconnect(Cursor._Cursor__send_message)
MongoClient._send_message = reconnect(MongoClient._send_message)
MongoClient._send_message_with_response = reconnect(MongoClient._send_message_with_response)
MongoClient._MongoClient__try_node = reconnect(MongoClient._MongoClient__try_node)
def connect_mongo():
uri = 'mongodb://%s:%s@%s' % (os.environ['MONGO_USER'],
os.environ['MONGO_PASSWORD'],
os.environ['MONGO_URL'])
client = MongoClient(uri, safe=True)
cursor = getattr(client, os.environ['MONGO_DATABASE'])
collection = getattr(cursor, os.environ['MONGO_COLLECTION'])
return collection
def upload_s3(prefix, filenames):
conn = boto.connect_s3(os.environ['AWS_ACCESS_KEY_ID'],
os.environ['AWS_SECRET_ACCESS_KEY'])
bucket = conn.get_bucket(os.environ['AWS_S3_BUCKET_NAME'])
for filename in filenames:
if os.path.exists(filename):
k = Key(bucket)
k.key = os.path.join(prefix, filename)
k.set_contents_from_filename(filename)
else:
print('%s does not exist!' % filename)
|
{
"content_hash": "463f7bc0d307f4c0f7182c01a9731f15",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 92,
"avg_line_length": 36.20338983050848,
"alnum_prop": 0.6165730337078652,
"repo_name": "rmcgibbo/mongo-task",
"id": "dbe7499d25e5826f2387d8fe33e3fb89d0fca6f6",
"size": "2136",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mongo_task/services.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11851"
}
],
"symlink_target": ""
}
|
import codecs
import os
from setuptools import setup
grlc_base = 'src'
grlc_base_dir = os.path.join(grlc_base, '')
grlc_data = []
for root,dirs,files in os.walk(grlc_base):
if root != grlc_base:
root_dir = root.replace(grlc_base_dir, '')
data_files = os.path.join(root_dir, '*')
grlc_data.append(data_files)
# To update the package version number, edit CITATION.cff
with open('CITATION.cff', 'r') as cff:
for line in cff:
if 'version:' in line:
version = line.replace('version:', '').strip().strip('"')
with codecs.open('requirements.txt', mode='r') as f:
install_requires = f.read().splitlines()
with codecs.open('requirements-test.txt', mode='r') as f:
tests_require = f.read().splitlines()
with codecs.open('README.md', mode='r', encoding='utf-8') as f:
long_description = f.read()
setup(
name="grlc",
description='grlc, the git repository linked data API constructor',
long_description=long_description,
long_description_content_type='text/markdown',
license="Copyright 2017 Albert Meroño",
author='Albert Meroño',
author_email='albert.merono@vu.nl',
url='https://github.com/CLARIAH/grlc',
version=version,
py_modules=['grlc'],
packages=['grlc'],
package_dir = {'grlc': grlc_base},
scripts=['bin/grlc-server'],
install_requires=install_requires,
setup_requires=[
# dependency for `python setup.py test`
'pytest-runner',
# dependencies for `python setup.py build_sphinx`
'sphinx',
'recommonmark'
],
tests_require=tests_require,
package_data = { 'grlc': grlc_data },
include_package_data=True,
data_files=[('citation/grlc', ['CITATION.cff'])],
python_requires='>=3.7, <=3.8',
)
|
{
"content_hash": "01f5651bcbd821b9b19e8fee4a60e990",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 71,
"avg_line_length": 31.678571428571427,
"alnum_prop": 0.636978579481398,
"repo_name": "c-martinez/grlc",
"id": "21f524a2bd94172efcfe94c5c7a32b07124e7ff5",
"size": "1829",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3127"
},
{
"name": "Dockerfile",
"bytes": "1542"
},
{
"name": "HTML",
"bytes": "9212"
},
{
"name": "JavaScript",
"bytes": "4107"
},
{
"name": "Python",
"bytes": "112289"
},
{
"name": "Shell",
"bytes": "7404"
},
{
"name": "TeX",
"bytes": "8107"
}
],
"symlink_target": ""
}
|
"""Demo for hosting a Concurrence application within a Syncless process."""
__author__ = 'pts@fazekas.hu (Peter Szabo)'
# It would work even with and without these imports, regardless of the import
# order.
#from syncless.best_stackless import stackless
#from syncless import coio
import sys
import socket
from concurrence import dispatch, Tasklet
from concurrence.io import BufferedStream, Socket
class Lprng(object):
__slots__ = ['seed']
def __init__(self, seed=0):
self.seed = int(seed) & 0xffffffff
def next(self):
"""Generate a 32-bit unsigned random number."""
# http://en.wikipedia.org/wiki/Linear_congruential_generator
self.seed = (
((1664525 * self.seed) & 0xffffffff) + 1013904223) & 0xffffffff
return self.seed
def __iter__(self):
return self
def handler(client_socket):
print >>sys.stderr, 'info: connection from %r' % (
client_socket.socket.getpeername(),)
stream = BufferedStream(client_socket)
reader = stream.reader # Strips \r\n and \n from the end.
writer = stream.writer
# Read HTTP request.
line1 = None
try:
while True:
line = reader.read_line()
if not line: # Empty line, end of HTTP request.
break
if line1 is None:
line1 = line
except EOFError:
pass
# Parse HTTP request.
# Please note that an assertion here doesn't abort the server.
items = line1.split(' ')
assert 3 == len(items)
assert items[2] in ('HTTP/1.0', 'HTTP/1.1')
assert items[0] == 'GET'
assert items[1].startswith('/')
try:
num = int(items[1][1:])
except ValueError:
num = None
# Write HTTP response.
if num is None:
writer.write_bytes('HTTP/1.0 200 OK\r\nContent-Type: text/html\r\n\r\n')
writer.write_bytes('<a href="/0">start at 0</a><p>Hello, World!\n')
else:
next_num = Lprng(num).next()
writer.write_bytes('HTTP/1.0 200 OK\r\nContent-Type: text/html\r\n\r\n')
writer.write_bytes('<a href="/%d">continue with %d</a>\n' %
(next_num, next_num))
writer.flush()
stream.close()
def server():
server_socket = Socket.new()
server_socket.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_address = ('127.0.0.1', 8080)
print >>sys.stderr, 'info: starting to listen on: %r' % (server_address,)
server_socket.bind(server_address)
server_socket.listen(128)
print >>sys.stderr, 'info: listening on: %r' % (
server_socket.socket.getsockname(),)
while True:
client_socket = server_socket.accept()
Tasklet.new(handler)(client_socket)
def ProgressReporter(delta_sec):
from syncless import coio
while True:
sys.stderr.write('.')
coio.sleep(delta_sec)
if __name__ == '__main__':
#from concurrence import _event
#assert 0, _event.method()
from syncless import coio
from syncless import patch
patch.patch_concurrence()
coio.stackless.tasklet(ProgressReporter)(0.2)
# !! Disable the Syncless main loop here if Concurrence is unpatched.
# Both call if/while stackless.getruncount() > 1: stackless.schedule()
dispatch(server)
|
{
"content_hash": "3d30d741ffde2e8010518080aa73fbfe",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 77,
"avg_line_length": 30.75,
"alnum_prop": 0.6660162601626016,
"repo_name": "tectronics/syncless",
"id": "8bc609dd32299ca894cbb0f926162d883867876f",
"size": "3160",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "examples/demo_concurrence.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1099541"
},
{
"name": "C++",
"bytes": "21035"
},
{
"name": "JavaScript",
"bytes": "1098"
},
{
"name": "Makefile",
"bytes": "1531"
},
{
"name": "Python",
"bytes": "528007"
}
],
"symlink_target": ""
}
|
import pytest
from datetime import time, timedelta
import numpy as np
import pandas as pd
import pandas.util.testing as tm
from pandas.util.testing import assert_series_equal
from pandas import (Series, Timedelta, to_timedelta, isnull,
TimedeltaIndex)
from pandas._libs.tslib import iNaT
class TestTimedeltas(object):
_multiprocess_can_split_ = True
def test_to_timedelta(self):
def conv(v):
return v.astype('m8[ns]')
d1 = np.timedelta64(1, 'D')
assert (to_timedelta('1 days 06:05:01.00003', box=False) ==
conv(d1 + np.timedelta64(6 * 3600 + 5 * 60 + 1, 's') +
np.timedelta64(30, 'us')))
assert (to_timedelta('15.5us', box=False) ==
conv(np.timedelta64(15500, 'ns')))
# empty string
result = to_timedelta('', box=False)
assert result.astype('int64') == iNaT
result = to_timedelta(['', ''])
assert isnull(result).all()
# pass thru
result = to_timedelta(np.array([np.timedelta64(1, 's')]))
expected = pd.Index(np.array([np.timedelta64(1, 's')]))
tm.assert_index_equal(result, expected)
# ints
result = np.timedelta64(0, 'ns')
expected = to_timedelta(0, box=False)
assert result == expected
# Series
expected = Series([timedelta(days=1), timedelta(days=1, seconds=1)])
result = to_timedelta(Series(['1d', '1days 00:00:01']))
tm.assert_series_equal(result, expected)
# with units
result = TimedeltaIndex([np.timedelta64(0, 'ns'), np.timedelta64(
10, 's').astype('m8[ns]')])
expected = to_timedelta([0, 10], unit='s')
tm.assert_index_equal(result, expected)
# single element conversion
v = timedelta(seconds=1)
result = to_timedelta(v, box=False)
expected = np.timedelta64(timedelta(seconds=1))
assert result == expected
v = np.timedelta64(timedelta(seconds=1))
result = to_timedelta(v, box=False)
expected = np.timedelta64(timedelta(seconds=1))
assert result == expected
# arrays of various dtypes
arr = np.array([1] * 5, dtype='int64')
result = to_timedelta(arr, unit='s')
expected = TimedeltaIndex([np.timedelta64(1, 's')] * 5)
tm.assert_index_equal(result, expected)
arr = np.array([1] * 5, dtype='int64')
result = to_timedelta(arr, unit='m')
expected = TimedeltaIndex([np.timedelta64(1, 'm')] * 5)
tm.assert_index_equal(result, expected)
arr = np.array([1] * 5, dtype='int64')
result = to_timedelta(arr, unit='h')
expected = TimedeltaIndex([np.timedelta64(1, 'h')] * 5)
tm.assert_index_equal(result, expected)
arr = np.array([1] * 5, dtype='timedelta64[s]')
result = to_timedelta(arr)
expected = TimedeltaIndex([np.timedelta64(1, 's')] * 5)
tm.assert_index_equal(result, expected)
arr = np.array([1] * 5, dtype='timedelta64[D]')
result = to_timedelta(arr)
expected = TimedeltaIndex([np.timedelta64(1, 'D')] * 5)
tm.assert_index_equal(result, expected)
# Test with lists as input when box=false
expected = np.array(np.arange(3) * 1000000000, dtype='timedelta64[ns]')
result = to_timedelta(range(3), unit='s', box=False)
tm.assert_numpy_array_equal(expected, result)
result = to_timedelta(np.arange(3), unit='s', box=False)
tm.assert_numpy_array_equal(expected, result)
result = to_timedelta([0, 1, 2], unit='s', box=False)
tm.assert_numpy_array_equal(expected, result)
# Tests with fractional seconds as input:
expected = np.array(
[0, 500000000, 800000000, 1200000000], dtype='timedelta64[ns]')
result = to_timedelta([0., 0.5, 0.8, 1.2], unit='s', box=False)
tm.assert_numpy_array_equal(expected, result)
def test_to_timedelta_invalid(self):
# bad value for errors parameter
msg = "errors must be one of"
tm.assert_raises_regex(ValueError, msg, to_timedelta,
['foo'], errors='never')
# these will error
pytest.raises(ValueError, lambda: to_timedelta([1, 2], unit='foo'))
pytest.raises(ValueError, lambda: to_timedelta(1, unit='foo'))
# time not supported ATM
pytest.raises(ValueError, lambda: to_timedelta(time(second=1)))
assert to_timedelta(time(second=1), errors='coerce') is pd.NaT
pytest.raises(ValueError, lambda: to_timedelta(['foo', 'bar']))
tm.assert_index_equal(TimedeltaIndex([pd.NaT, pd.NaT]),
to_timedelta(['foo', 'bar'], errors='coerce'))
tm.assert_index_equal(TimedeltaIndex(['1 day', pd.NaT, '1 min']),
to_timedelta(['1 day', 'bar', '1 min'],
errors='coerce'))
# gh-13613: these should not error because errors='ignore'
invalid_data = 'apple'
assert invalid_data == to_timedelta(invalid_data, errors='ignore')
invalid_data = ['apple', '1 days']
tm.assert_numpy_array_equal(
np.array(invalid_data, dtype=object),
to_timedelta(invalid_data, errors='ignore'))
invalid_data = pd.Index(['apple', '1 days'])
tm.assert_index_equal(invalid_data, to_timedelta(
invalid_data, errors='ignore'))
invalid_data = Series(['apple', '1 days'])
tm.assert_series_equal(invalid_data, to_timedelta(
invalid_data, errors='ignore'))
def test_to_timedelta_via_apply(self):
# GH 5458
expected = Series([np.timedelta64(1, 's')])
result = Series(['00:00:01']).apply(to_timedelta)
tm.assert_series_equal(result, expected)
result = Series([to_timedelta('00:00:01')])
tm.assert_series_equal(result, expected)
def test_to_timedelta_on_missing_values(self):
# GH5438
timedelta_NaT = np.timedelta64('NaT')
actual = pd.to_timedelta(Series(['00:00:01', np.nan]))
expected = Series([np.timedelta64(1000000000, 'ns'),
timedelta_NaT], dtype='<m8[ns]')
assert_series_equal(actual, expected)
actual = pd.to_timedelta(Series(['00:00:01', pd.NaT]))
assert_series_equal(actual, expected)
actual = pd.to_timedelta(np.nan)
assert actual.value == timedelta_NaT.astype('int64')
actual = pd.to_timedelta(pd.NaT)
assert actual.value == timedelta_NaT.astype('int64')
def test_to_timedelta_on_nanoseconds(self):
# GH 9273
result = Timedelta(nanoseconds=100)
expected = Timedelta('100ns')
assert result == expected
result = Timedelta(days=1, hours=1, minutes=1, weeks=1, seconds=1,
milliseconds=1, microseconds=1, nanoseconds=1)
expected = Timedelta(694861001001001)
assert result == expected
result = Timedelta(microseconds=1) + Timedelta(nanoseconds=1)
expected = Timedelta('1us1ns')
assert result == expected
result = Timedelta(microseconds=1) - Timedelta(nanoseconds=1)
expected = Timedelta('999ns')
assert result == expected
result = Timedelta(microseconds=1) + 5 * Timedelta(nanoseconds=-2)
expected = Timedelta('990ns')
assert result == expected
pytest.raises(TypeError, lambda: Timedelta(nanoseconds='abc'))
|
{
"content_hash": "276e0cd738572325d932ad7bcab801ca",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 79,
"avg_line_length": 37.67164179104478,
"alnum_prop": 0.590729001584786,
"repo_name": "mbayon/TFG-MachineLearning",
"id": "a991b7bbe140a72ad9dd6f550a848c000511fb5d",
"size": "7572",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "venv/lib/python3.6/site-packages/pandas/tests/indexes/timedeltas/test_tools.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "24787"
},
{
"name": "Julia",
"bytes": "11103"
},
{
"name": "Matlab",
"bytes": "98571"
},
{
"name": "Perl",
"bytes": "716"
},
{
"name": "Python",
"bytes": "115284"
},
{
"name": "Shell",
"bytes": "643"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.