code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
#
# Tuxemon
# Copyright (C) 2014, William Edwards <shadowapex@gmail.com>,
# Benjamin Bean <superman2k5@gmail.com>
#
# This file is part of Tuxemon.
#
# Tuxemon is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tuxemon is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tuxemon. If not, see <http://www.gnu.org/licenses/>.
#
# Contributor(s):
#
# William Edwards <shadowapex@gmail.com>
# Leif Theden <leif.theden@gmail.com>
#
# core.states.world Handles the world map and player movement.
#
#
from __future__ import division
import itertools
import logging
from os.path import join
import pygame
from six.moves import map as imap
from core import prepare, state
from core.components import map, networking
from core.components.game_event import GAME_EVENT, INPUT_EVENT
# Create a logger for optional handling of debug messages.
logger = logging.getLogger(__name__)
class WorldState(state.State):
""" The state responsible for the world game play
"""
preloaded_maps = {}
def startup(self):
# Provide access to the screen surface
self.screen = self.game.screen
self.screen_rect = prepare.SCREEN_RECT
# Set the native tile size so we know how much to scale
self.tile_size = prepare.TILE_SIZE
# Set the status icon size so we know how much to scale
self.icon_size = prepare.ICON_SIZE
# Get the screen's resolution
self.resolution = prepare.SCREEN_SIZE
# Native resolution is similar to the old gameboy resolution. This is
# used for scaling.
self.native_resolution = prepare.NATIVE_RESOLUTION
######################################################################
# Map #
######################################################################
# Set the tiles and map size variables
self.map_size = []
# load the starting map
map_name = join(prepare.BASEDIR, 'resources', 'maps', prepare.CONFIG.starting_map)
self.change_map(map_name)
# Keep a map of preloaded maps for fast map switching.
self.preloaded_maps = {}
######################################################################
# Player Details #
######################################################################
self.player1 = prepare.player1
self.npcs = {}
self.npcs_off_map = {}
self.wants_duel = False
# Set the global coordinates used to pan the screen.
self.start_position = prepare.CONFIG.starting_position
self.global_x = self.player1.position[0] - \
(self.start_position[0] * self.tile_size[0])
self.global_y = self.player1.position[1] - \
(self.start_position[1] * self.tile_size[1]) + self.tile_size[0]
######################################################################
# Transitions #
######################################################################
# default variables for transition
self.transition_alpha = 0
self.transition_surface = None
self.in_transition = False
# The delayed teleport variable is used to perform a teleport in the
# middle of a transition. For example, fading to black, then
# teleporting the player, and fading back in again.
self.delayed_teleport = False
# The delayed facing variable used to change the player's facing in
# the middle of a transition.
self.delayed_facing = None
######################################################################
# Collision Map #
######################################################################
# If we want to display the collision map for debug purposes
if prepare.CONFIG.collision_map == "1":
# For drawing the collision map
self.collision_tile = pygame.Surface(
(self.tile_size[0], self.tile_size[1]))
self.collision_tile.set_alpha(128)
self.collision_tile.fill((255, 0, 0))
######################################################################
# Fullscreen Animations #
######################################################################
# The cinema bars are used for cinematic moments.
# The cinema state can be: "off", "on", "turning on" or "turning off"
self.cinema_state = "off"
self.cinema_speed = 15 * prepare.SCALE # Pixels per second speed of the animation.
self.cinema_top = {}
self.cinema_bottom = {}
# Create a surface that we'll use as black bars for a cinematic
# experience
self.cinema_top['surface'] = pygame.Surface(
(self.resolution[0], self.resolution[1] / 6))
self.cinema_bottom['surface'] = pygame.Surface(
(self.resolution[0], self.resolution[1] / 6))
# Fill our empty surface with black
self.cinema_top['surface'].fill((0, 0, 0))
self.cinema_bottom['surface'].fill((0, 0, 0))
# When cinema mode is off, this will be the position we'll draw the
# black bar.
self.cinema_top['off_position'] = [
0, -self.cinema_top['surface'].get_height()]
self.cinema_bottom['off_position'] = [0, self.resolution[1]]
self.cinema_top['position'] = list(self.cinema_top['off_position'])
self.cinema_bottom['position'] = list(
self.cinema_bottom['off_position'])
# When cinema mode is ON, this will be the position we'll draw the
# black bar.
self.cinema_top['on_position'] = [0, 0]
self.cinema_bottom['on_position'] = [
0, self.resolution[1] - self.cinema_bottom['surface'].get_height()]
self.map_animations = dict()
def fade_and_teleport(self, duration=2):
""" Fade out, teleport, fade in
:return:
"""
def cleanup():
self.in_transition = False
def fade_in():
self.trigger_fade_in(duration)
self.task(cleanup, duration)
# stop player movement
self.player1.moving = False
# cancel any fades that may be going one
self.remove_animations_of(self)
self.remove_animations_of(cleanup)
self.in_transition = True
self.trigger_fade_out(duration)
task = self.task(self.handle_delayed_teleport, duration)
task.chain(fade_in, duration + .5)
def trigger_fade_in(self, duration=2):
""" World state has own fade code b/c moving maps doesn't change state
:returns: None
"""
self.set_transition_surface()
self.animate(self, transition_alpha=0, initial=255, duration=duration, round_values=True)
def trigger_fade_out(self, duration=2):
""" World state has own fade code b/c moving maps doesn't change state
* will cause player to teleport if set somewhere else
:returns: None
"""
self.set_transition_surface()
self.animate(self, transition_alpha=255, initial=0, duration=duration, round_values=True)
def handle_delayed_teleport(self):
""" Call to teleport player if delayed_teleport is set
* load a map
* move player
* send data to network about teleport
:return: None
"""
if self.delayed_teleport:
self.global_x = self.delayed_x
self.global_y = self.delayed_y
if self.delayed_facing:
self.player1.facing = self.delayed_facing
self.delayed_facing = None
# check if map has changed, and if so, change it
map_name = prepare.BASEDIR + "resources/maps/" + self.delayed_mapname
if map_name != self.current_map.filename:
self.change_map(map_name)
self.delayed_teleport = False
def set_transition_surface(self, color=(0, 0, 0)):
self.transition_surface = pygame.Surface(self.game.screen.get_size())
self.transition_surface.fill(color)
def broadcast_player_teleport_change(self):
""" Tell clients/host that player has moved or changed map after teleport
:return:
"""
# Set the transition variable in event_data to false when we're done
self.game.event_data["transition"] = False
# Update the server/clients of our new map and populate any other players.
if self.game.isclient or self.game.ishost:
self.game.add_clients_to_map(self.game.client.client.registry)
self.game.client.update_player(self.player1.facing)
# Update the location of the npcs. Doesn't send network data.
for npc in self.npcs.values():
char_dict = {"tile_pos": npc.tile_pos}
networking.update_client(npc, char_dict, self.game)
for npc in self.npcs_off_map.values():
char_dict = {"tile_pos": npc.tile_pos}
networking.update_client(npc, char_dict, self.game)
def update(self, time_delta):
"""The primary game loop that executes the world's game functions every frame.
:param time_delta: Amount of time passed since last frame.
:type time_delta: Float
:rtype: None
:returns: None
"""
super(WorldState, self).update(time_delta)
logger.debug("*** Game Loop Started ***")
logger.debug("Player Variables:" + str(self.player1.game_variables))
# Get the player's tile position based on the global_x/y variables. Since the player's sprite is 1 x 2
# tiles in size, we add 1 to the 'y' position so the player's actual position will be on the bottom
# portion of the sprite.
self.player1.tile_pos = (float((self.player1.position[0] - self.global_x)) / float(
self.tile_size[0]), (float((self.player1.position[1] - self.global_y)) / float(self.tile_size[1])) + 1)
def draw(self, surface):
""" Draw the game world to the screen
:param surface:
:return:
"""
self.screen = surface
self.map_drawing(surface)
self.player_movement()
self.move_npcs()
self.fullscreen_animations(surface)
def process_event(self, event):
"""Handles player input events. This function is only called when the
player provides input
such as pressing a key or clicking the mouse.
:param event: A pygame key event from pygame.event.get()
:type event: PyGame Event
:rtype: None
:returns: None
"""
# Handle Key DOWN events
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
logger.info("Opening main menu!")
self.game.push_state("WorldMenuState")
# If we receive an arrow key press, set the facing and
# moving direction to that direction
if event.key == pygame.K_UP:
self.player1.direction["up"] = True
self.player1.facing = "up"
if event.key == pygame.K_DOWN:
self.player1.direction["down"] = True
self.player1.facing = "down"
if event.key == pygame.K_LEFT:
self.player1.direction["left"] = True
self.player1.facing = "left"
if event.key == pygame.K_RIGHT:
self.player1.direction["right"] = True
self.player1.facing = "right"
if event.key == pygame.K_SPACE or event.key == pygame.K_RETURN:
# TODO: Check to see if we have network players to interact with.
multiplayer = False
if multiplayer:
self.check_interactable_space()
# Handle Key UP events
if event.type == pygame.KEYUP:
# If the player lets go of the key, set the moving
# direction to false
if event.key == pygame.K_UP:
self.player1.direction["up"] = False
if event.key == pygame.K_DOWN:
self.player1.direction["down"] = False
if event.key == pygame.K_LEFT:
self.player1.direction["left"] = False
if event.key == pygame.K_RIGHT:
self.player1.direction["right"] = False
# Handle text input events
if event.type == GAME_EVENT and event.event_type == INPUT_EVENT:
self.player1.name = event.text
return None
self.game.client.set_key_condition(event)
# by default, just pass every event down, since we assume
# that the world state will be the last running state, before
# the event engine.
return event
def get_all_players(self):
"""Retrieves a list of all npcs and the player.
:rtype: Dictionary
:returns: Dictionary of all Player objects keyed by their slug.
"""
players = dict(world.npcs)
players[game.player1.slug] = game.player1
return players
####################################################
# Map Drawing #
####################################################
def map_drawing(self, surface):
"""Draws the map tiles in a layered order.
:param: None
:rtype: None
:returns: None
"""
# center the camera on the player sprite
sx, sy = prepare.SCREEN_SIZE
self.current_map.renderer.center((-self.global_x + sx / 2,
-self.global_y + sy / 2))
# interlace player sprites with tiles surfaces.
# eventually, maybe use pygame sprites or something similar
surfaces = self.player1.get_sprites()
# get npc surfaces/sprites
for npc in self.npcs:
surfaces.extend(self.npcs[npc].get_sprites())
# get map_animation
ox, oy = self.current_map.renderer.get_center_offset()
for anim_data in self.map_animations.values():
anim = anim_data['animation']
if not anim.isFinished() and anim.visibility:
x, y = anim_data["position"]
x += ox
y += oy
frame = (anim.getCurrentFrame(), (x, y), anim_data['layer'])
surfaces.append(frame)
# draw the map and sprites
self.current_map.renderer.draw(surface, surface.get_rect(), surfaces)
# If we want to draw the collision map for debug purposes
if prepare.CONFIG.collision_map == "1":
self.debug_drawing(surface)
####################################################
# Player Movement #
####################################################
def player_movement(self):
"""Handles player's movement, collision, and drawing. Also draws map
tiles that are on a layer above the player.
:param: None
:rtype: None
:returns: None
"""
# We need to keep track of the global_x/y that we used to draw the bottom tiles so we use
# the same values for the higher layer tiles. We have to do this because when we draw the
# player's movement, we modify the global_x/y values to start moving the map.
self.orig_global_x = self.global_x
self.orig_global_y = self.global_y
# Get all the keys pressed for modifiers only!
pressed = list(pygame.key.get_pressed())
self.ctrl_held = pressed[pygame.K_LCTRL] or pressed[pygame.K_RCTRL]
self.alt_held = pressed[pygame.K_LALT] or pressed[pygame.K_RALT]
self.shift_held = pressed[pygame.K_LSHIFT] or pressed[pygame.K_RSHIFT]
# TODO: phase out in favor of a global game clock
self.time_passed_seconds = self.game.time_passed_seconds
# Handle tile based movement for the player
if self.shift_held:
self.player1.moverate = self.player1.runrate
else:
self.player1.moverate = self.player1.walkrate
# Set the global_x/y when the player moves around
self.global_x, self.global_y = self.player1.move(
self.screen, self.tile_size, self.time_passed_seconds, (self.global_x, self.global_y), self)
# Find out how many pixels we've moved since we started moving
self.global_x_diff = self.orig_global_x - self.global_x
self.global_y_diff = self.orig_global_y - self.global_y
def move_npcs(self):
""" Move NPCs and Players around according to their state
This function may be moved to a server
:return:
"""
# Draw any game NPC's
for npc in self.npcs.values():
if npc.running:
npc.moverate = npc.runrate
else:
npc.moverate = npc.walkrate
# Get the NPC's tile position based on his pixel position. Since the NPC's sprite is 1 x 2
# tiles in size, we add 1 to the 'y' position so the NPC's actual position will be on the bottom
# portion of the sprite.
npc.tile_pos = (float((npc.position[0] - self.global_x)) / float(
self.tile_size[0]), (float((npc.position[1] - self.global_y)) / float(self.tile_size[1])) + 1)
# Move the NPC with the map as it moves
npc.position[0] -= self.global_x_diff
npc.position[1] -= self.global_y_diff
# if the npc has a path, move it along its path
if npc.path:
npc.move_by_path()
npc.move(self.tile_size, self.time_passed_seconds, self)
# Reset our directions after moving.
if not npc.isplayer:
npc.direction["up"] = False
npc.direction["down"] = False
npc.direction["left"] = False
npc.direction["right"] = False
if npc.update_location:
char_dict = {"tile_pos": npc.final_move_dest}
networking.update_client(npc, char_dict, self.game)
npc.update_location = False
# Move any multiplayer characters that are off map so we know where they should be when we change maps.
for npc in self.npcs_off_map.values():
if npc.running:
npc.moverate = npc.runrate
else:
npc.moverate = npc.walkrate
# Get the NPC's tile position based on his pixel position. Since the NPC's sprite is 1 x 2
# tiles in size, we add 1 to the 'y' position so the NPC's actual position will be on the bottom
# portion of the sprite.
npc.tile_pos = (float((npc.position[0] - self.global_x)) / float(
self.tile_size[0]), (float((npc.position[1] - self.global_y)) / float(self.tile_size[1])) + 1)
# Move the NPC with the map as it moves
npc.position[0] -= self.global_x_diff
npc.position[1] -= self.global_y_diff
# if the npc has a path, move it along its path
if npc.path:
npc.move_by_path()
npc.move(self.tile_size, self.time_passed_seconds, self)
def _collision_box_to_pgrect(self, box):
"""Returns a pygame.Rect (in screen-coords) version of a collision box (in world-coords).
"""
# For readability
x = box[0]
y = box[1]
tw = self.tile_size[0]
th = self.tile_size[1]
return pygame.Rect(x * tw + self.global_x,
y * th + self.global_y,
tw,
th)
def _npc_to_pgrect(self, npc):
"""Returns a pygame.Rect (in screen-coords) version of an NPC's bounding box.
"""
return pygame.Rect(npc.position, self.tile_size)
def debug_drawing(self, surface):
# We need to iterate over all collidable objects. So, let's start
# with the walls/collision boxes.
box_iter = imap(self._collision_box_to_pgrect, self.collision_map)
# Next, deal with solid NPCs.
npc_iter = imap(self._npc_to_pgrect, self.npcs.values())
# draw noc and wall collision tiles
for item in itertools.chain(box_iter, npc_iter):
surface.blit(self.collision_tile, (item[0], item[1]))
# draw events
for event in self.game.events:
rect = self._collision_box_to_pgrect((event.x, event.y))
surface.fill((0, 255, 255, 128), rect)
# draw collision check boxes
if self.player1.direction["up"]:
surface.blit(self.collision_tile, (
self.player1.position[0], self.player1.position[1] - self.tile_size[1]))
elif self.player1.direction["down"]:
surface.blit(self.collision_tile, (
self.player1.position[0], self.player1.position[1] + self.tile_size[1]))
elif self.player1.direction["left"]:
surface.blit(self.collision_tile, (
self.player1.position[0] - self.tile_size[0], self.player1.position[1]))
elif self.player1.direction["right"]:
surface.blit(self.collision_tile, (
self.player1.position[0] + self.tile_size[0], self.player1.position[1]))
def midscreen_animations(self, surface):
"""Handles midscreen animations that will be drawn UNDER menus and dialog.
:param surface: surface to draw on
:rtype: None
:returns: None
"""
if self.cinema_state == "turning on":
self.cinema_top['position'][
1] += self.cinema_speed * self.time_passed_seconds
self.cinema_bottom['position'][
1] -= self.cinema_speed * self.time_passed_seconds
# If we've reached our target position, stop the animation.
if self.cinema_top['position'] >= self.cinema_top['on_position']:
self.cinema_top['position'] = list(
self.cinema_top['on_position'])
self.cinema_bottom['position'] = list(
self.cinema_bottom['on_position'])
self.cinema_state = "on"
# Draw the cinema bars
surface.blit(
self.cinema_top['surface'], self.cinema_top['position'])
surface.blit(
self.cinema_bottom['surface'], self.cinema_bottom['position'])
elif self.cinema_state == "on":
# Draw the cinema bars
surface.blit(
self.cinema_top['surface'], self.cinema_top['position'])
surface.blit(
self.cinema_bottom['surface'], self.cinema_bottom['position'])
elif self.cinema_state == "turning off":
self.cinema_top['position'][1] -= (
self.cinema_speed * self.time_passed_seconds)
self.cinema_bottom['position'][
1] += self.cinema_speed * self.time_passed_seconds
# If we've reached our target position, stop the animation.
if self.cinema_top['position'][1] <= self.cinema_top['off_position'][1]:
self.cinema_top['position'] = list(
self.cinema_top['off_position'])
self.cinema_bottom['position'] = list(
self.cinema_bottom['off_position'])
self.cinema_state = "off"
# Draw the cinema bars
surface.blit(
self.cinema_top['surface'], self.cinema_top['position'])
surface.blit(
self.cinema_bottom['surface'], self.cinema_bottom['position'])
####################################################
# Full Screen Animations Functions #
####################################################
def fullscreen_animations(self, surface):
"""Handles fullscreen animations such as transitions, cutscenes, etc.
:param surface: Surface to draw onto
:rtype: None
:returns: None
"""
if self.in_transition:
self.transition_surface.set_alpha(self.transition_alpha)
surface.blit(self.transition_surface, (0, 0))
####################################################
# Map Change/Load Functions #
####################################################
def change_map(self, map_name):
# Set the currently loaded map. This is needed because the event
# engine loads event conditions and event actions from the currently
# loaded map. If we change maps, we need to update this.
if map_name not in self.preloaded_maps.keys():
logger.debug("Map was not preloaded. Loading from disk.")
map_data = self.load_map(map_name)
else:
logger.debug("%s was found in preloaded maps." % map_name)
map_data = self.preloaded_maps[map_name]
self.clear_preloaded_maps()
# reset controls and stop moving to prevent player from
# moving after the teleport and being out of control
self.game.reset_controls()
try:
self.player1.direction['up'] = False
self.player1.direction['down'] = False
self.player1.direction['left'] = False
self.player1.direction['right'] = False
self.player1.moving = False
except AttributeError: # will be raised if this is first map change
pass
self.current_map = map_data["data"]
self.collision_map = map_data["collision_map"]
self.collision_lines_map = map_data["collision_lines_map"]
self.map_size = map_data["map_size"]
# TODO: remove this monkey [patching!] business for the main control/game
self.game.events = map_data["events"]
self.game.inits = map_data["inits"]
self.game.interacts = map_data["interacts"]
self.game.event_engine.current_map = map_data
# Clear out any existing NPCs
self.npcs = {}
self.npcs_off_map = {}
def load_map(self, map_name):
"""Returns map data as a dictionary to be used for map changing and preloading
"""
map_data = {}
map_data["data"] = map.Map(map_name)
map_data["events"] = map_data["data"].events
map_data["inits"] = map_data["data"].inits
map_data["interacts"] = map_data["data"].interacts
map_data["collision_map"], map_data["collision_lines_map"], map_data["map_size"] = \
map_data["data"].loadfile(self.tile_size)
return map_data
def preload_map(self, map_name):
""" Preload a map for quicker access
:param map_name:
:return: None
"""
self.preloaded_maps[map_name] = self.load_map(map_name)
def clear_preloaded_maps(self):
""" Clear the proloaded maps cache
:return: None
"""
self.preloaded_maps = {}
def get_pos_from_tilepos(self, tile_position):
"""Returns the screen coordinate based on tile position.
:param tile_position: An [x, y] tile position.
:type tile_position: List
:rtype: List
:returns: The pixel coordinates to draw at the given tile position.
"""
x = (self.tile_size[0] * tile_position[0]) + self.global_x
y = (self.tile_size[1] * tile_position[1]) + self.global_y
return x, y
def check_interactable_space(self):
"""Checks to see if any Npc objects around the player are interactable. It then populates a menu
of possible actions.
:param: None
:rtype: Bool
:returns: True if there is an Npc to interact with.
"""
collision_dict = self.player1.get_collision_dict(self)
player_tile_pos = (int(round(self.player1.tile_pos[0])), int(round(self.player1.tile_pos[1])))
collisions = self.player1.collision_check(player_tile_pos, collision_dict, self.collision_lines_map)
if not collisions:
pass
else:
for direction in collisions:
if self.player1.facing == direction:
if direction == "up":
tile = (player_tile_pos[0], player_tile_pos[1] - 1)
elif direction == "down":
tile = (player_tile_pos[0], player_tile_pos[1] + 1)
elif direction == "left":
tile = (player_tile_pos[0] - 1, player_tile_pos[1])
elif direction == "right":
tile = (player_tile_pos[0] + 1, player_tile_pos[1])
for npc in self.npcs.values():
tile_pos = (int(round(npc.tile_pos[0])), int(round(npc.tile_pos[1])))
if tile_pos == tile:
logger.info("Opening interaction menu!")
self.game.push_state("InteractionMenu")
return True
else:
continue
def handle_interaction(self, event_data, registry):
"""Presents options window when another player has interacted with this player.
:param event_data: Information on the type of interaction and who sent it.
:param registry:
:type event_data: Dictionary
:type registry: Dictionary
:rtype: None
:returns: None
"""
target = registry[event_data["target"]]["sprite"]
target_name = str(target.name)
networking.update_client(target, event_data["char_dict"], self.game)
if event_data["interaction"] == "DUEL":
if not event_data["response"]:
self.interaction_menu.visible = True
self.interaction_menu.interactable = True
self.interaction_menu.player = target
self.interaction_menu.interaction = "DUEL"
self.interaction_menu.menu_items = [target_name + " would like to Duel!", "Accept", "Decline"]
else:
if self.wants_duel:
if event_data["response"] == "Accept":
world = self.game.current_state
pd = world.player1.__dict__
event_data = {"type": "CLIENT_INTERACTION",
"interaction": "START_DUEL",
"target": [event_data["target"]],
"response": None,
"char_dict": {"monsters": pd["monsters"],
"inventory": pd["inventory"]
}
}
self.game.server.notify_client_interaction(cuuid, event_data)
|
treetrnk/Tuxemon
|
tuxemon/core/states/world/worldstate.py
|
Python
|
gpl-3.0
| 31,699
|
from math import *
#import psycho
#psyco.full()
class memoize:
def __init__(self, function):
self.function = function
self.memoized = {}
def __call__(self, *args):
try:
return self.memoized[args]
except KeyError:
if(len(self.memoized) > 100000):
return self.function(*args)
self.memoized[args] = self.function(*args)
return self.memoized[args]
@memoize
def factor(target):
a = []
for i in range(2, int(sqrt(target))+1 ):
if target % i == 0:
return [i, target / i] + factor(i) + factor(target/i)
return []
old = -1
acc = 0
for i in xrange(10000000,0,-1):
if i % 10000 == 0:
print i / 10. ** 7, acc
new = len(factor(i))
if old == new:
acc += 1
old = new
print acc
|
jdavidberger/project-euler
|
prob179.py
|
Python
|
lgpl-3.0
| 791
|
from django import forms
from django.forms.widgets import Textarea, DateInput, NumberInput
from core import forms as cf
from core import models as cm
from widgets import forms as wf
class ParticipateForm(forms.Form):
def __init__(self, item, *args, **kwargs):
super(ParticipateForm, self).__init__(*args, **kwargs)
povs = item.participation_project.ballotdeciderproject.points_of_view.all()
for pov in povs:
self.fields["pov_weight_"+str(pov.id)] = forms.IntegerField(widget = NumberInput(attrs={'type': 'range', 'step': '1'}))
class CreateProjectForm(forms.Form):
measure_name = forms.CharField()
ballot_text = forms.CharField(widget = Textarea)
election_date = forms.DateField(widget = wf.DatePickerJQueryWidget)
election_website = forms.URLField()
basics_notes = forms.CharField(widget = Textarea, required=False)
basics1 = forms.IntegerField(min_value = 0, required=False)
basics2 = forms.IntegerField(min_value = 0, required=False)
basics3 = forms.IntegerField(min_value = 0, required=False)
effects_notes = forms.CharField(widget = Textarea, required=False)
effects1 = forms.IntegerField(min_value = 0, required=False)
effects2 = forms.IntegerField(min_value = 0, required=False)
effects3 = forms.IntegerField(min_value = 0, required=False)
pov_quote_1 = forms.CharField(widget=Textarea, required=False)
pov_is_favorable_1 = forms.BooleanField(required = False)
pov_quote_2 = forms.CharField(widget=Textarea, required=False)
pov_is_favorable_2 = forms.BooleanField(required = False)
pov_quote_3 = forms.CharField(widget=Textarea, required=False)
pov_is_favorable_3 = forms.BooleanField(required = False)
tag1 = cf.tag_aac.get_new_form_field(required=False)
tag2 = cf.tag_aac.get_new_form_field(required=False)
tag3 = cf.tag_aac.get_new_form_field(required=False)
def clean(self):
cleaned_data = super(CreateProjectForm, self).clean()
# ensure pov's are completely filled in or not at all
for i in range(1,4):
quote = cleaned_data.get("pov_quote_"+str(i))
is_favorable_defined = not (cleaned_data.get("pov_is_favorable_"+str(i)) is None)
if any([quote]) and not all([quote, is_favorable_defined]):
raise forms.ValidationError("Each POV must either be left blank or filled in completely: " + str(i))
# Ensure participation items are valid
def clean_basics1(self):
data = self.cleaned_data['basics1']
if data is None or data == "":
return
try:
ref = cm.ParticipationItem.objects.get(id=data, is_active=True)
except:
raise forms.ValidationError("There is no active participation item with that ID")
return data
def clean_basics2(self):
data = self.cleaned_data['basics2']
if data is None or data == "":
return
try:
ref = cm.ParticipationItem.objects.get(id=data, is_active=True)
except:
raise forms.ValidationError("There is no active participation item with that ID")
return data
def clean_basics3(self):
data = self.cleaned_data['basics3']
if data is None or data == "":
return
try:
ref = cm.ParticipationItem.objects.get(id=data, is_active=True)
except:
raise forms.ValidationError("There is no active participation item with that ID")
return data
def clean_effects1(self):
data = self.cleaned_data['effects1']
if data is None or data == "":
return
try:
ref = cm.ParticipationItem.objects.get(id=data, is_active=True)
except:
raise forms.ValidationError("There is no active participation item with that ID")
return data
def clean_effects2(self):
data = self.cleaned_data['effects2']
if data is None or data == "":
return
try:
ref = cm.ParticipationItem.objects.get(id=data, is_active=True)
except:
raise forms.ValidationError("There is no active participation item with that ID")
return data
def clean_effects3(self):
data = self.cleaned_data['effects3']
if data is None or data == "":
return
try:
ref = cm.ParticipationItem.objects.get(id=data, is_active=True)
except:
raise forms.ValidationError("There is no active participation item with that ID")
return data
class EditProjectForm(CreateProjectForm):
def __init__(self, project, *args, **kwargs):
super(EditProjectForm, self).__init__(*args, **kwargs)
povs = project.points_of_view.all()
for pov in povs:
self.fields["delete_pov_"+str(pov.id)] = forms.BooleanField(help_text=str(pov.quote), required=False)
|
better-dem/portal
|
ballot_decider/forms.py
|
Python
|
agpl-3.0
| 4,917
|
# striplog documentation build configuration file.
#
import sys
import os
sys.path.insert(0, os.path.abspath('..'))
# -- Setup function ----------------------------------------------------------
# Defines custom steps in the process.
def autodoc_skip_member(app, what, name, obj, skip, options):
"""Exclude all private attributes, methods, and dunder methods from Sphinx."""
import re
exclude = re.findall(r'\._.*', str(obj))
return skip or exclude
def remove_module_docstring(app, what, name, obj, options, lines):
"""Remove everything after 'Author: '."""
if what == "module":
keep = [i for i, line in enumerate(lines) if line.startswith("Author: ")]
if keep:
del lines[keep[0]:]
return
def setup(app):
app.connect('autodoc-skip-member', autodoc_skip_member)
app.connect("autodoc-process-docstring", remove_module_docstring)
return
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinxcontrib.apidoc',
'sphinx.ext.githubpages',
'sphinx.ext.coverage',
'sphinx.ext.napoleon',
'myst_nb'
]
myst_enable_extensions = ["dollarmath", "amsmath"]
# Apidoc automation
# https://pypi.org/project/sphinxcontrib-apidoc/
# The apidoc extension and this code automatically update apidoc.
apidoc_module_dir = '../striplog'
apidoc_output_dir = './api'
apidoc_excluded_paths = []
apidoc_toc_file = False
apidoc_separate_modules = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# General information about the project.
project = 'striplog'
copyright = '2022, The Striplog Authors'
author = 'The Striplog Authors'
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build',]
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'furo'
html_theme_options = {
"sidebar_hide_name": True,
}
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '_static/striplog_logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = '_static/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_css_files = [
'custom.css',
]
|
agile-geoscience/striplog
|
docs/conf.py
|
Python
|
apache-2.0
| 3,309
|
#!/usr/bin/env python3
from linklist import *
class Solution:
def removeElements(self, head, val):
if head == None:
return head
if head.val == val:
head = self.removeElements(head.next, val)
else:
head.next = self.removeElements(head.next, val)
return head
nodeString = '[1,2,6,3,4,5,6]'
nodeString = '[6]'
nodeString = '[]'
head = linkListBuilder(nodeString)
val = 6
traverse(head)
sol = Solution()
traverse(sol.removeElements(head, 6))
|
eroicaleo/LearningPython
|
interview/leet/203_Remove_Linked_List_Elements.py
|
Python
|
mit
| 513
|
import plugin_super_class
import threading
import time
from PyQt5 import QtCore, QtWidgets
from subprocess import check_output
import json
class InvokeEvent(QtCore.QEvent):
EVENT_TYPE = QtCore.QEvent.Type(QtCore.QEvent.registerEventType())
def __init__(self, fn, *args, **kwargs):
QtCore.QEvent.__init__(self, InvokeEvent.EVENT_TYPE)
self.fn = fn
self.args = args
self.kwargs = kwargs
class Invoker(QtCore.QObject):
def event(self, event):
event.fn(*event.args, **event.kwargs)
return True
_invoker = Invoker()
def invoke_in_main_thread(fn, *args, **kwargs):
QtCore.QCoreApplication.postEvent(_invoker, InvokeEvent(fn, *args, **kwargs))
class AutoAwayStatusLinux(plugin_super_class.PluginSuperClass):
def __init__(self, *args):
super().__init__('AutoAwayStatusLinux', 'awayl', *args)
self._thread = None
self._exec = None
self._active = False
self._time = json.loads(self.load_settings())['time']
self._prev_status = 0
def close(self):
self.stop()
def stop(self):
self._exec = False
if self._active:
self._thread.join()
def start(self):
self._exec = True
self._thread = threading.Thread(target=self.loop)
self._thread.start()
def save(self):
self.save_settings('{"time": ' + str(self._time) + '}')
def change_status(self, status=1):
if self._profile.status in (0, 2):
self._prev_status = self._profile.status
if status is not None:
invoke_in_main_thread(self._profile.set_status, status)
def get_window(self):
inst = self
class Window(QtWidgets.QWidget):
def __init__(self):
super(Window, self).__init__()
self.setGeometry(QtCore.QRect(450, 300, 350, 100))
self.label = QtWidgets.QLabel(self)
self.label.setGeometry(QtCore.QRect(20, 0, 310, 35))
self.label.setText(QtWidgets.QApplication.translate("AutoAwayStatusLinux", "Auto away time in minutes\n(0 - to disable)"))
self.time = QtWidgets.QLineEdit(self)
self.time.setGeometry(QtCore.QRect(20, 40, 310, 25))
self.time.setText(str(inst._time))
self.setWindowTitle("AutoAwayStatusLinux")
self.ok = QtWidgets.QPushButton(self)
self.ok.setGeometry(QtCore.QRect(20, 70, 310, 25))
self.ok.setText(
QtWidgets.QApplication.translate("AutoAwayStatusLinux", "Save"))
self.ok.clicked.connect(self.update)
def update(self):
try:
t = int(self.time.text())
except:
t = 0
inst._time = t
inst.save()
self.close()
return Window()
def loop(self):
self._active = True
while self._exec:
time.sleep(5)
d = check_output(['xprintidle'])
d = int(d) // 1000
if self._time:
if d > 60 * self._time:
self.change_status()
elif self._profile.status == 1:
self.change_status(self._prev_status)
|
ingvar1995/toxygen_plugins
|
AutoAwayStatusLinux/awayl.py
|
Python
|
gpl-3.0
| 3,320
|
from django.views.generic import CreateView, DetailView
from .models import TestModel
class TestCreateView(CreateView):
template_name = 'test_tinymce/create.html'
fields = ('content',)
model = TestModel
class TestDisplayView(DetailView):
template_name = 'test_tinymce/display.html'
context_object_name = 'test_model'
model = TestModel
|
romanvm/django-tinymce4-lite
|
test_tinymce/views.py
|
Python
|
mit
| 363
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
try:
from django.contrib.auth import get_user_model
except ImportError: # django < 1.5
from django.contrib.auth.models import User
else:
User = get_user_model()
user_orm_label = '%s.%s' % (User._meta.app_label, User._meta.object_name)
user_model_label = '%s.%s' % (User._meta.app_label, User._meta.module_name)
user_ptr_name = '%s_ptr' % User._meta.object_name.lower()
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'PlayerDB.db_cmdset_storage'
db.add_column('players_playerdb', 'db_cmdset_storage', self.gf('django.db.models.fields.TextField')(null=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'PlayerDB.db_cmdset_storage'
db.delete_column('players_playerdb', 'db_cmdset_storage')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': User.__name__, 'db_table': "'%s'" % User._meta.db_table},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'objects.objectdb': {
'Meta': {'object_name': 'ObjectDB'},
'db_cmdset_storage': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_destination': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'destinations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}),
'db_home': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'homes_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'db_location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}),
'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'db_player': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['players.PlayerDB']", 'null': 'True', 'blank': 'True'}),
'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'players.playerattribute': {
'Meta': {'object_name': 'PlayerAttribute'},
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['players.PlayerDB']"}),
'db_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'players.playerdb': {
'Meta': {'object_name': 'PlayerDB'},
'db_cmdset_storage': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['objects.ObjectDB']", 'null': 'True'}),
'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'unique': 'True'})
}
}
complete_apps = ['players']
|
google-code-export/evennia
|
src/players/migrations/0003_auto__add_field_playerdb_db_cmdset_storage.py
|
Python
|
bsd-3-clause
| 7,359
|
import parsley, hexchat
from random import randint
__module_name__ = 'dice'
__module_version__ = '1.0.0'
__module_description__ = 'Allows one to do emote dice rolls'
__module_author__ = 'Vlek'
def say(msg):
"""Says msg in chat within current context"""
context = hexchat.find_context()
context.command('say {}'.format(msg))
return
def calculate(start, pairs):
result = start
for op, value in pairs:
if op == '+':
result += value
elif op == '-':
result -= value
elif op == '*':
result *= value
elif op == '/':
result /= value
return result
def roll(num_dice, dice_sides):
num_dice = int(num_dice) if num_dice != '' else 1
return sum([randint(1, int(dice_sides)) for i in range(num_dice)])
x = parsley.makeGrammar("""
number = <digit+>:ds -> int(ds)
parens = '(' ws expr:e ws ')' -> e
die = <digit*>:dice_num 'd' <digit+>:dice_sides -> roll(dice_num, dice_sides)
value = die | number | parens
ws = ' '*
add = '+' ws expr2:n -> ('+', n)
sub = '-' ws expr2:n -> ('-', n)
mul = '*' ws value:n -> ('*', n)
div = '/' ws value:n -> ('/', n)
addsub = ws (add | sub)
muldiv = ws (mul | div)
expr = expr2:left addsub*:right -> calculate(left, right)
expr2 = value:left muldiv*:right -> calculate(left, right)
""", {"calculate": calculate, "roll": roll})
def dice(word, word_to_eol, userdata):
if len(word) == 1:
say('//help dice')
roll_expr = ''.join(word[1:])
context = hexchat.find_context()
context.command('me {}'.format("rolls {}: {}".format(roll_expr, x(roll_expr).expr())))
return hexchat.EAT_ALL
for command in ['dice', 'roll']:
hexchat.hook_command(command, dice, help="/roll 1d20 + 3")
|
Vlek/plugins
|
HexChat/dice.py
|
Python
|
mit
| 1,742
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-28 10:57
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('courses', '0014_course_views'),
]
operations = [
migrations.AlterField(
model_name='course',
name='title',
field=models.CharField(max_length=160),
),
]
|
sonic182/portfolio3
|
courses/migrations/0015_auto_20161028_1057.py
|
Python
|
mit
| 445
|
"""
=======================================================
Reconstruction with Constrained Spherical Deconvolution
=======================================================
This example shows how to use Constrained Spherical Deconvolution (CSD)
introduced by Tournier et al. [Tournier2007]_.
This method is mainly useful with datasets with gradient directions acquired on
a spherical grid.
The basic idea with this method is that if we could estimate the response function of a
single fiber then we could deconvolve the measured signal and obtain the underlying
fiber distribution.
Lets first load the data. We will use a dataset with 10 b0s and 150 non-b0s with b-value 2000.
"""
import numpy as np
from dipy.data import fetch_stanford_hardi, read_stanford_hardi
fetch_stanford_hardi()
img, gtab = read_stanford_hardi()
"""
You can verify the b-values of the datasets by looking at the attribute `gtab.bvals`.
In CSD there is an important pre-processing step: the estimation of the fiber response function. In order to
do this we look for voxel with very anisotropic configurations. For example here we use an ROI (20x20x20) at the center
of the volume and store the signal values for the voxels with FA values higher than 0.7. Of course, if we haven't
precalculated FA we need to fit a Tensor model to the datasets. Which is what we do here.
"""
from dipy.reconst.dti import TensorModel
data = img.get_data()
print('data.shape (%d, %d, %d, %d)' % data.shape)
affine = img.get_affine()
zooms = img.get_header().get_zooms()[:3]
mask = data[..., 0] > 50
tenmodel = TensorModel(gtab)
ci, cj, ck = np.array(data.shape[:3]) / 2
w = 10
roi = data[ci - w: ci + w,
cj - w: cj + w,
ck - w: ck + w]
tenfit = tenmodel.fit(roi)
from dipy.reconst.dti import fractional_anisotropy
FA = fractional_anisotropy(tenfit.evals)
FA[np.isnan(FA)] = 0
indices = np.where(FA > 0.7)
lambdas = tenfit.evals[indices][:, :2]
"""
Using `gtab.b0s_mask()` we can find all the S0 volumes (which correspond to b-values equal 0) in the dataset.
"""
S0s = roi[indices][:, np.nonzero(gtab.b0s_mask)[0]]
"""
The response function in this example consists of a prolate tensor created
by averaging the highest and second highest eigenvalues. We also include the
average S0s.
"""
S0 = np.mean(S0s)
l01 = np.mean(lambdas, axis=0)
evals = np.array([l01[0], l01[1], l01[1]])
response = (evals, S0)
"""
Now we are ready to import the CSD model and fit the datasets.
"""
from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel
csd_model = ConstrainedSphericalDeconvModel(gtab, response)
"""
For illustration purposes we will fit only a slice of the datasets.
"""
data_small = data[20:50, 55:85, 38:39]
csd_fit = csd_model.fit(data_small)
"""
Show the CSD-based ODFs also known as FODFs (fiber ODFs).
"""
from dipy.data import get_sphere
sphere = get_sphere('symmetric724')
csd_odf = csd_fit.odf(sphere)
from dipy.viz import fvtk
r = fvtk.ren()
"""
Here we visualize only a 30x30 region.
"""
fodf_spheres = fvtk.sphere_funcs(csd_odf, sphere, scale=1.3, norm=False)
fvtk.add(r, fodf_spheres)
print('Saving illustration as csd_odfs.png')
fvtk.record(r, n_frames=1, out_path='csd_odfs.png', size=(600, 600))
"""
.. figure:: csd_odfs.png
:align: center
**CSD ODFs**.
.. [Tournier2007] J-D. Tournier, F. Calamante and A. Connelly, "Robust determination of the fibre orientation distribution in diffusion MRI: Non-negativity constrained super-resolved spherical deconvolution", Neuroimage, vol. 35, no. 4, pp. 1459-1472, 2007.
.. include:: ../links_names.inc
"""
|
maurozucchelli/dipy
|
doc/examples/reconst_csd.py
|
Python
|
bsd-3-clause
| 3,601
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import logging
import threading
import octoprint.vendor.sockjs.tornado
import octoprint.vendor.sockjs.tornado.session
import octoprint.vendor.sockjs.tornado.proto
import octoprint.vendor.sockjs.tornado.util
import time
import octoprint.timelapse
import octoprint.server
import octoprint.events
import octoprint.plugin
import octoprint.access.users
from octoprint.events import Events
from octoprint.settings import settings
from octoprint.access.permissions import Permissions
from octoprint.access.users import LoginStatusListener
from octoprint.access.groups import GroupChangeListener
from octoprint.util.json import JsonEncoding
import octoprint.printer
import wrapt
import json
class ThreadSafeSession(octoprint.vendor.sockjs.tornado.session.Session):
def __init__(self, conn, server, session_id, expiry=None):
octoprint.vendor.sockjs.tornado.session.Session.__init__(self, conn, server, session_id, expiry=expiry)
def set_handler(self, handler, start_heartbeat=True):
if getattr(handler, "__orig_send_pack", None) is None:
orig_send_pack = handler.send_pack
mutex = threading.RLock()
def send_pack(*args, **kwargs):
with mutex:
return orig_send_pack(*args, **kwargs)
handler.send_pack = send_pack
setattr(handler, "__orig_send_pack", orig_send_pack)
return octoprint.vendor.sockjs.tornado.session.Session.set_handler(self, handler, start_heartbeat=start_heartbeat)
def remove_handler(self, handler):
result = octoprint.vendor.sockjs.tornado.session.Session.remove_handler(self, handler)
if getattr(handler, "__orig_send_pack", None) is not None:
handler.send_pack = getattr(handler, "__orig_send_pack")
delattr(handler, "__orig_send_pack")
return result
class JsonEncodingSessionWrapper(wrapt.ObjectProxy):
def send_message(self, msg, stats=True, binary=False):
self.send_jsonified(json.dumps(octoprint.vendor.sockjs.tornado.util.bytes_to_str(msg),
separators=(',', ':'),
default=JsonEncoding.encode),
stats)
class PrinterStateConnection(octoprint.vendor.sockjs.tornado.SockJSConnection,
octoprint.printer.PrinterCallback,
LoginStatusListener,
GroupChangeListener):
_event_permissions = {Events.USER_LOGGED_IN: [Permissions.ADMIN],
Events.USER_LOGGED_OUT: [Permissions.ADMIN],
"*": []}
_event_payload_processors = {Events.CLIENT_OPENED: [lambda user, payload: payload if user.has_permission(Permissions.ADMIN) else dict()],
Events.CLIENT_AUTHED: [lambda user, payload: payload if user.has_permission(Permissions.ADMIN) else dict()],
"*": []}
_emit_permissions = {"connected": [],
"reauthRequired": [],
"*": [Permissions.STATUS]}
_unauthed_backlog_max = 100
def __init__(self, printer, fileManager, analysisQueue, userManager, groupManager, eventManager, pluginManager, session):
if isinstance(session, octoprint.vendor.sockjs.tornado.session.Session):
session = JsonEncodingSessionWrapper(session)
octoprint.vendor.sockjs.tornado.SockJSConnection.__init__(self, session)
self._logger = logging.getLogger(__name__)
self._temperatureBacklog = []
self._temperatureBacklogMutex = threading.Lock()
self._logBacklog = []
self._logBacklogMutex = threading.Lock()
self._messageBacklog = []
self._messageBacklogMutex = threading.Lock()
self._unauthed_backlog = []
self._unauthed_backlog_mutex = threading.RLock()
self._printer = printer
self._fileManager = fileManager
self._analysisQueue = analysisQueue
self._userManager = userManager
self._groupManager = groupManager
self._eventManager = eventManager
self._pluginManager = pluginManager
self._remoteAddress = None
self._user = self._userManager.anonymous_user_factory()
self._throttleFactor = 1
self._lastCurrent = 0
self._baseRateLimit = 0.5
self._register_hooks = self._pluginManager.get_hooks("octoprint.server.sockjs.register")
self._authed_hooks = self._pluginManager.get_hooks("octoprint.server.sockjs.authed")
self._emit_hooks = self._pluginManager.get_hooks("octoprint.server.sockjs.emit")
self._registered = False
self._authed = False
@staticmethod
def _get_remote_address(info):
forwarded_for = info.headers.get("X-Forwarded-For")
if forwarded_for is not None:
return forwarded_for.split(",")[0]
return info.ip
def __str__(self):
if self._remoteAddress:
return "{!r} connected to {}".format(self, self._remoteAddress)
else:
return "Unconnected {!r}".format(self)
def on_open(self, info):
self._pluginManager.register_message_receiver(self.on_plugin_message)
self._remoteAddress = self._get_remote_address(info)
self._logger.info("New connection from client: %s" % self._remoteAddress)
self._userManager.register_login_status_listener(self)
self._groupManager.register_listener(self)
plugin_signature = lambda impl: "{}:{}".format(impl._identifier, impl._plugin_version)
template_plugins = list(map(plugin_signature, self._pluginManager.get_implementations(octoprint.plugin.TemplatePlugin)))
asset_plugins = list(map(plugin_signature, self._pluginManager.get_implementations(octoprint.plugin.AssetPlugin)))
ui_plugins = sorted(set(template_plugins + asset_plugins))
import hashlib
plugin_hash = hashlib.md5()
plugin_hash.update(",".join(ui_plugins).encode('utf-8'))
config_hash = settings().config_hash
# connected => update the API key, might be necessary if the client was left open while the server restarted
self._emit("connected", dict(version=octoprint.server.VERSION,
display_version=octoprint.server.DISPLAY_VERSION,
branch=octoprint.server.BRANCH,
plugin_hash=plugin_hash.hexdigest(),
config_hash=config_hash,
debug=octoprint.server.debug,
safe_mode=octoprint.server.safe_mode,
permissions=[permission.as_dict() for permission in Permissions.all()]))
self._eventManager.fire(Events.CLIENT_OPENED, {"remoteAddress": self._remoteAddress})
self._register()
def on_close(self):
self._user = self._userManager.anonymous_user_factory()
self._groupManager.unregister_listener(self)
self._userManager.unregister_login_status_listener(self)
self._unregister()
self._eventManager.fire(Events.CLIENT_CLOSED, {"remoteAddress": self._remoteAddress})
self._logger.info("Client connection closed: %s" % self._remoteAddress)
self._on_logout()
self._remoteAddress = None
self._pluginManager.unregister_message_receiver(self.on_plugin_message)
def on_message(self, message):
try:
import json
message = json.loads(message)
except Exception:
self._logger.warning("Invalid JSON received from client {}, ignoring: {!r}".format(self._remoteAddress, message))
return
if "auth" in message:
try:
parts = message["auth"].split(":")
if not len(parts) == 2:
raise ValueError()
except ValueError:
self._logger.warning("Got invalid auth message from client {}, ignoring: {!r}".format(self._remoteAddress, message["auth"]))
else:
user_id, user_session = parts
user = self._userManager.find_user(userid=user_id, session=user_session)
if user is not None:
self._on_login(user)
else:
self._logger.warn("Unknown user/session combo: {}:{}".format(user_id, user_session))
self._on_logout()
self._register()
elif "throttle" in message:
try:
throttle = int(message["throttle"])
if throttle < 1:
raise ValueError()
except ValueError:
self._logger.warning("Got invalid throttle factor from client {}, ignoring: {!r}".format(self._remoteAddress, message["throttle"]))
else:
self._throttleFactor = throttle
self._logger.debug("Set throttle factor for client {} to {}".format(self._remoteAddress, self._throttleFactor))
def on_printer_send_current_data(self, data):
if not self._user.has_permission(Permissions.STATUS):
return
# make sure we rate limit the updates according to our throttle factor
now = time.time()
if now < self._lastCurrent + self._baseRateLimit * self._throttleFactor:
return
self._lastCurrent = now
# add current temperature, log and message backlogs to sent data
with self._temperatureBacklogMutex:
temperatures = self._temperatureBacklog
self._temperatureBacklog = []
with self._logBacklogMutex:
logs = self._logBacklog
self._logBacklog = []
with self._messageBacklogMutex:
messages = self._messageBacklog
self._messageBacklog = []
busy_files = [dict(origin=v[0], path=v[1]) for v in self._fileManager.get_busy_files()]
if "job" in data and data["job"] is not None \
and "file" in data["job"] and "path" in data["job"]["file"] and "origin" in data["job"]["file"] \
and data["job"]["file"]["path"] is not None and data["job"]["file"]["origin"] is not None \
and (self._printer.is_printing() or self._printer.is_paused()):
busy_files.append(dict(origin=data["job"]["file"]["origin"], path=data["job"]["file"]["path"]))
data.update({
"serverTime": time.time(),
"temps": temperatures,
"logs": logs,
"messages": messages,
"busyFiles": busy_files,
})
self._emit("current", payload=data)
def on_printer_send_initial_data(self, data):
data_to_send = dict(data)
data_to_send["serverTime"] = time.time()
self._emit("history", payload=data_to_send)
def sendEvent(self, type, payload=None):
permissions = self._event_permissions.get(type, self._event_permissions["*"])
permissions = [x(self._user) if callable(x) else x for x in permissions]
if not self._user or not all(map(lambda p: self._user.has_permission(p), permissions)):
return
processors = self._event_payload_processors.get(type, self._event_payload_processors["*"])
for processor in processors:
payload = processor(self._user, payload)
self._emit("event", payload=dict(type=type, payload=payload))
def sendTimelapseConfig(self, timelapseConfig):
self._emit("timelapse", payload=timelapseConfig)
def sendSlicingProgress(self, slicer, source_location, source_path, dest_location, dest_path, progress):
self._emit("slicingProgress", payload=dict(slicer=slicer,
source_location=source_location,
source_path=source_path,
dest_location=dest_location,
dest_path=dest_path,
progress=progress))
def sendRenderProgress(self, progress):
self._emit("renderProgress",
dict(progress=progress)
)
def on_plugin_message(self, plugin, data, permissions=None):
self._emit("plugin", payload=dict(plugin=plugin, data=data), permissions=permissions)
def on_printer_add_log(self, data):
with self._logBacklogMutex:
self._logBacklog.append(data)
def on_printer_add_message(self, data):
with self._messageBacklogMutex:
self._messageBacklog.append(data)
def on_printer_add_temperature(self, data):
with self._temperatureBacklogMutex:
self._temperatureBacklog.append(data)
def on_user_logged_out(self, user, stale=False):
if user.get_id() == self._user.get_id() and hasattr(user, "session") and hasattr(self._user, "session") and user.session == self._user.session:
self._logger.info("User {} logged out, logging out on socket".format(user.get_id()))
self._on_logout()
if stale:
self._sendReauthRequired("stale")
else:
self._sendReauthRequired("logout")
def on_user_modified(self, user):
if user.get_id() == self._user.get_id():
self._sendReauthRequired("modified")
def on_user_removed(self, userid):
if self._user.get_id() == userid:
self._logger.info("User {} deleted, logging out on socket".format(userid))
self._on_logout()
self._sendReauthRequired("removed")
def on_group_permissions_changed(self, group, added=None, removed=None):
if self._user.is_anonymous and group == self._groupManager.guest_group:
self._sendReauthRequired("modified")
def on_group_subgroups_changed(self, group, added=None, removed=None):
if self._user.is_anonymous and group == self._groupManager.guest_group:
self._sendReauthRequired("modified")
def _onEvent(self, event, payload):
self.sendEvent(event, payload)
def _register(self):
"""Register this socket with the system if STATUS permission is available."""
proceed = True
for name, hook in self._register_hooks.items():
try:
proceed = proceed and hook(self, self._user)
except Exception:
self._logger.exception("Error processing register hook handler for plugin {}".format(name),
extra=dict(plugin=name))
if not proceed:
return
if self._registered:
return
if not self._user.has_permission(Permissions.STATUS):
return
# printer
self._printer.register_callback(self)
self._printer.send_initial_callback(self)
# files
self._fileManager.register_slicingprogress_callback(self)
# events
for event in octoprint.events.all_events():
self._eventManager.subscribe(event, self._onEvent)
# timelapse
octoprint.timelapse.register_callback(self)
octoprint.timelapse.notify_callback(self, timelapse=octoprint.timelapse.current)
if octoprint.timelapse.current_render_job is not None:
# This is a horrible hack for now to allow displaying a notification that a render job is still
# active in the backend on a fresh connect of a client. This needs to be substituted with a proper
# job management for timelapse rendering, analysis stuff etc that also gets cancelled when prints
# start and so on.
#
# For now this is the easiest way though to at least inform the user that a timelapse is still ongoing.
#
# TODO remove when central job management becomes available and takes care of this for us
self.sendEvent(Events.MOVIE_RENDERING, payload=octoprint.timelapse.current_render_job)
self._registered = True
def _unregister(self):
"""Unregister this socket from the system"""
self._printer.unregister_callback(self)
self._fileManager.unregister_slicingprogress_callback(self)
octoprint.timelapse.unregister_callback(self)
for event in octoprint.events.all_events():
self._eventManager.unsubscribe(event, self._onEvent)
def _reregister(self):
"""Unregister and register again"""
self._unregister()
self._register()
def _sendReauthRequired(self, reason):
self._emit("reauthRequired", payload=dict(reason=reason))
def _emit(self, type, payload=None, permissions=None):
proceed = True
for name, hook in self._emit_hooks.items():
try:
proceed = proceed and hook(self, self._user, type, payload)
except Exception:
self._logger.exception("Error processing emit hook handler from plugin {}".format(name),
extra=dict(plugin=name))
if not proceed:
return
if permissions is None:
permissions = self._emit_permissions.get(type, self._emit_permissions["*"])
permissions = [x() if callable(x) else x for x in permissions]
if not self._user or not all(map(lambda p: self._user.has_permission(p), permissions)):
if not self._authed:
with self._unauthed_backlog_mutex:
if len(self._unauthed_backlog) < self._unauthed_backlog_max:
self._unauthed_backlog.append((type, payload))
self._logger.debug("Socket message held back until permissions cleared, added to backlog: {}".format(type))
else:
self._logger.debug("Socket message held back, but backlog full. Throwing message away: {}".format(type))
return
self._do_emit(type, payload)
def _do_emit(self, type, payload):
try:
self.send({type: payload})
except Exception as e:
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.exception("Could not send message to client {}".format(self._remoteAddress))
else:
self._logger.warning("Could not send message to client {}: {}".format(self._remoteAddress, e))
def _on_login(self, user):
self._user = user
self._logger.info("User {} logged in on the socket from client {}".format(user.get_name(),
self._remoteAddress))
self._authed = True
for name, hook in self._authed_hooks.items():
try:
hook(self, self._user)
except Exception:
self._logger.exception("Error processing authed hook handler for plugin {}".format(name),
extra=dict(plugin=name))
# if we have a backlog from being unauthed, process that now
with self._unauthed_backlog_mutex:
backlog = self._unauthed_backlog
self._unauthed_backlog = []
if len(backlog):
self._logger.debug("Sending {} messages on the socket that were held back".format(len(backlog)))
for message, payload in backlog:
self._do_emit(message, payload)
# trigger ClientAuthed event
octoprint.events.eventManager().fire(octoprint.events.Events.CLIENT_AUTHED,
payload=dict(username=user.get_name(),
remoteAddress=self._remoteAddress))
def _on_logout(self):
self._user = self._userManager.anonymous_user_factory()
self._authed = False
for name, hook in self._authed_hooks.items():
try:
hook(self, self._user)
except Exception:
self._logger.exception("Error processing authed hook handler for plugin {}".format(name),
extra=dict(plugin=name))
|
foosel/OctoPrint
|
src/octoprint/server/util/sockjs.py
|
Python
|
agpl-3.0
| 18,118
|
#!/usr/bin/env python3
import os
from pathlib import Path
import numpy as np
from pysisyphus.helpers import geom_from_xyz_file
from pysisyphus.stocastic.align import matched_rmsd
THIS_DIR = Path(os.path.dirname(os.path.realpath(__file__)))
def test_matched_rmsd():
geom1 = geom_from_xyz_file(THIS_DIR / "eins.xyz")
# Calling with the identical geometries should return RMSD of 0.
min_rmsd, (geom1_matched, geom2_matched) = matched_rmsd(geom1, geom1)
np.testing.assert_allclose(min_rmsd, 0.0, atol=1e-10)
np.testing.assert_allclose(geom1_matched.coords, geom2_matched.coords)
geom2 = geom_from_xyz_file(THIS_DIR / "zwei.xyz")
min_rmsd, _ = matched_rmsd(geom1, geom2)
np.testing.assert_allclose(min_rmsd, 0.057049, atol=1e-5)
if __name__ == "__main__":
test_matched_rmsd()
|
eljost/pysisyphus
|
tests_staging/test_matched_rmsd/test_matched_rmsd.py
|
Python
|
gpl-3.0
| 816
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-09 20:24
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('compta', '0004_compte_epargne'),
]
operations = [
migrations.AlterField(
model_name='operationepargne',
name='epargne',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='compta.Epargne', verbose_name='Epargne'),
),
]
|
mfalaize/carnet-entretien
|
compta/migrations/0005_auto_20170809_2224.py
|
Python
|
gpl-3.0
| 577
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2013-2014 Savoir-faire Linux
# (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
travel,
travel_passenger,
res_config,
)
|
ingadhoc/openerp-travel
|
travel/__init__.py
|
Python
|
agpl-3.0
| 1,092
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
# Nexcess.net TwoFactorAuth Extension for Magento
# Copyright (C) 2014 Nexcess.net L.L.C.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Script to generate Magento Extension package files
Run as: build_package.py <package_description.xml>
You can override the xmllint and PHP binaries used for syntax checks with the
TURPENTINE_BIN_PHP and TURPENTINE_BIN_XMLLINT environment variables. Useful for
checking with a non-default version of PHP.
"""
__title__ = 'build_package.py'
__version__ = '0.0.3'
__author__ = 'Alex Headley <aheadley@nexcess.net>'
__license__ = 'GPLv2'
__copyright__ = 'Copyright (C) 2012 Nexcess.net L.L.C.'
import os
import xml.etree.ElementTree as ElementTree
import logging
import datetime
import hashlib
import re
import tarfile
import subprocess
class Magento_Packager(object):
BIN_PHP = os.environ.get('TURPENTINE_BIN_PHP', 'php')
BIN_XMLLINT = os.environ.get('TURPENTINE_BIN_XMLLINT', 'xmllint')
BIN_BASH = os.environ.get('TURPENTINE_BIN_BASH', 'bash')
BIN_GCC = os.environ.get('TURPENTINE_BIN_GCC', 'gcc')
TARGET_DIRS = {
'magelocal': 'app/code/local',
'magecommunity': 'app/code/community',
'magecore': 'app/code/core',
'magedesign': 'app/design',
'mageetc': 'app/etc',
}
MAGE_PKG_XML_FILENAME = 'package.xml'
def __init__(self, base_dir, debug=False):
self._base_dir = base_dir
self._logger = logging.getLogger(self.__class__.__name__)
if debug:
self._logger.setLevel(logging.DEBUG)
else:
self._logger.setLevel(logging.INFO)
self._file_list = []
self._logger.debug('Packager init with base dir: %s', self._base_dir)
self._logger.debug('Using PHP binary: %s', self.BIN_PHP)
self._logger.debug('Using xmllint binary: %s', self.BIN_XMLLINT)
def do_syntax_check(self):
self._logger.info('Running syntax check on %d files', len(self._file_list))
result = True
syntax_map = {
'.php': self._php_syntax_check,
'.phtml': self._php_syntax_check,
'.xml': self._xml_syntax_check,
'.sh': self._bash_syntax_check,
'.bash': self._bash_syntax_check,
'.c': self._gcc_syntax_check,
}
def unsupported_syntax_check(filename):
self._logger.debug('Skipping syntax check for unsupported file: %s',
filename)
return True
for filename in self._file_list:
syntax_check = syntax_map.get(os.path.splitext(filename)[1].lower(),
unsupported_syntax_check)
if not syntax_check(filename):
self._logger.warning('Syntax check failed for file: %s', filename)
result = False
return result
def build_package_xml(self, connect_file):
self._logger.info('Building package from connect file: %s', connect_file)
connect_dom = ElementTree.parse(connect_file)
ext_name = connect_dom.find('name').text
self._logger.debug('Using "%s" as extension name', ext_name)
config_dom = self._get_config_dom(ext_name, connect_dom.find('channel').text)
module_dom = self._get_module_dom(ext_name)
self._logger.info('Building extension %s version %s', ext_name,
config_dom.find('modules/%s/version' % ext_name).text)
if connect_dom.find('channel').text != \
module_dom.find('modules/%s/codePool' % ext_name).text:
self._logger.warning('Connect file code pool (%s) does not match module code pool (%s)',
connect_dom.find('channel').text,
module_dom.find('modules/%s/codePool' % ext_name).text)
pkg_dom = self._build_package_dom(ElementTree.Element('package'),
connect_dom, config_dom, module_dom)
self._logger.info('Finished building extension package XML')
return pkg_dom
def build_tarball(self, pkg_xml, tarball_name=None, keep_pkg_xml=False):
manifest_filename = '%s/build/manifest-%s.xml' % \
(self._base_dir, pkg_xml.findtext('./version'))
if tarball_name is None:
tarball_name = '%s/build/%s-%s.tgz' % (self._base_dir,
pkg_xml.findtext('./name'), pkg_xml.findtext('./version'))
self._logger.info('Writing tarball to: %s', tarball_name)
cdir = os.getcwd()
os.chdir(self._base_dir)
with open(manifest_filename, 'w') as xml_file:
ElementTree.ElementTree(pkg_xml).write(xml_file, 'utf-8', True)
self._logger.debug('Wrote package XML')
with tarfile.open(tarball_name, 'w:gz') as tarball:
for filename in self._file_list:
alt_filename = filename.replace(self._base_dir + '/', '')
self._logger.debug('Adding file to tarball: %s', alt_filename)
tarball.add(filename, alt_filename)
self._logger.debug('Adding file to tarball: %s',
self.MAGE_PKG_XML_FILENAME)
tarball.add(manifest_filename, self.MAGE_PKG_XML_FILENAME)
self._logger.info('Finished writing tarball')
if not keep_pkg_xml:
os.unlink(manifest_filename)
os.chdir(cdir)
return tarball_name
def _build_package_dom(self, pkg_dom, connect_dom, config_dom, module_dom):
ext_name = connect_dom.find('name').text
now = datetime.datetime.now()
commit_hash = self._get_git_hash()
self._logger.debug('Using commit hash: %s', commit_hash)
extension = {
'name': ext_name,
'version': config_dom.find('modules/%s/version' % ext_name).text,
'stability': connect_dom.find('stability').text,
'license': connect_dom.find('license').text,
'channel': connect_dom.find('channel').text,
'extends': None,
'summary': connect_dom.find('summary').text,
'description': connect_dom.find('description').text,
'notes': connect_dom.find('notes').text,
'authors': None,
'date': now.date().isoformat(),
'time': now.time().strftime('%H:%M:%S'),
'contents': None,
'compatibile': None,
'dependencies': None,
'__packager': '%s v%s' % (__title__, __version__),
'__commit_hash': commit_hash,
}
for key, value in extension.iteritems():
tag = ElementTree.SubElement(pkg_dom, key)
if value:
tag.text = value
self._logger.debug('Added package element <%s> = "%s"', key, value)
pkg_dom.find('license').set('uri', connect_dom.find('license_uri').text)
self._build_authors_tag(pkg_dom.find('authors'), connect_dom)
self._build_contents_tag(pkg_dom.find('contents'), connect_dom)
self._build_dependencies_tag(pkg_dom.find('dependencies'), connect_dom)
return pkg_dom
def _build_authors_tag(self, authors_tag, connect_dom):
for i, _ in enumerate(connect_dom.findall('authors/name/name')):
author_tag = ElementTree.SubElement(authors_tag, 'author')
name_tag = ElementTree.SubElement(author_tag, 'name')
name_tag.text = list(connect_dom.findall('authors/name/name'))[i].text
user_tag = ElementTree.SubElement(author_tag, 'user')
user_tag.text = list(connect_dom.findall('authors/user/user'))[i].text
email_tag = ElementTree.SubElement(author_tag, 'email')
email_tag.text = list(connect_dom.findall('authors/email/email'))[i].text
self._logger.info('Added author %s (%s) <%s>', name_tag.text,
user_tag.text, email_tag.text)
return authors_tag
def _build_contents_tag(self, contents_tag, connect_dom):
used_target_paths = list(set(el.text for el in connect_dom.findall('contents/target/target')))
targets = list(self._iterate_targets(connect_dom))
for target_path_name in used_target_paths:
target_tag = ElementTree.SubElement(contents_tag, 'target')
target_tag.set('name', target_path_name)
self._logger.debug('Adding objects for target: %s', target_path_name)
for target in (t for t in targets if t['target'] == target_path_name):
if target['type'] == 'dir':
self._logger.info('Recursively adding dir: %s::%s',
target['target'], target['path'])
for obj_path, obj_name, obj_hash in self._walk_path(os.path.join(
self._base_dir, self.TARGET_DIRS[target['target']], target['path']),
target['include'], target['ignore']):
parent_tag = self._make_parent_tags(target_tag, obj_path.replace(
os.path.join(self._base_dir, self.TARGET_DIRS[target['target']]), '').strip('/'))
if obj_hash is None:
obj_tag = ElementTree.SubElement(parent_tag, 'dir')
obj_tag.set('name', obj_name)
self._logger.debug('Added directory: %s', obj_name)
else:
obj_tag = ElementTree.SubElement(parent_tag, 'file')
obj_tag.set('name', obj_name)
obj_tag.set('hash', obj_hash)
self._file_list.append(os.path.join(obj_path, obj_name))
self._logger.debug('Added file: %s (%s)', obj_name, obj_hash)
else:
parent_tag = self._make_parent_tags(target_tag, os.path.dirname(target['path']))
obj_name = os.path.basename(target['path'])
obj_hash = self._get_file_hash(os.path.join(
self._base_dir, self.TARGET_DIRS[target['target']],
target['path']))
obj_tag = ElementTree.SubElement(parent_tag, 'file')
obj_tag.set('name', obj_name)
obj_tag.set('hash', obj_hash)
self._file_list.append(os.path.join(self._base_dir,
self.TARGET_DIRS[target['target']], target['path']))
self._logger.info('Added single file: %s::%s (%s)',
target['target'], target['path'], obj_hash)
self._logger.debug('Finished adding targets')
return contents_tag
def _make_parent_tags(self, target_tag, tag_path):
if tag_path:
parts = tag_path.split('/')
current_node = target_tag
for part in parts:
new_node = current_node.find('dir[@name=\'%s\']' % part)
if new_node is None:
new_node = ElementTree.SubElement(current_node, 'dir')
new_node.set('name', part)
current_node = new_node
return current_node
else:
return target_tag
def _iterate_targets(self, connect_dom):
for i, el in enumerate(connect_dom.findall('contents/target/target')):
yield {
'target': connect_dom.find('contents/target').getchildren()[i].text,
'path': connect_dom.find('contents/path').getchildren()[i].text,
'type': connect_dom.find('contents/type').getchildren()[i].text,
'include': connect_dom.find('contents/include').getchildren()[i].text,
'ignore': connect_dom.find('contents/ignore').getchildren()[i].text,
}
def _get_file_hash(self, filename):
with open(filename, 'rb') as f:
return hashlib.md5(f.read()).hexdigest()
def _walk_path(self, path, include, ignore):
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
if (include and re.match(include[1:-1], filename) and not \
(ignore and re.match(ignore[1:-1], filename))):
yield dirpath, filename, self._get_file_hash(os.path.join(dirpath, filename))
for dirname in dirnames:
if (include and re.match(include[1:-1], dirname) and not \
(ignore and re.match(ignore[1:-1], dirname))):
yield dirpath, dirname, None
def _build_dependencies_tag(self, dependencies_tag, connect_dom):
req_tag = ElementTree.SubElement(dependencies_tag, 'required')
php_tag = ElementTree.SubElement(req_tag, 'php')
min_tag = ElementTree.SubElement(php_tag, 'min')
min_tag.text = connect_dom.findtext('depends_php_min')
max_tag = ElementTree.SubElement(php_tag, 'max')
max_tag.text = connect_dom.findtext('depends_php_max')
self._logger.debug('Finished adding dependancies')
return dependencies_tag
def _get_module_dom(self, ext_name):
fn = os.path.join(self._base_dir, 'app/etc/modules', ext_name + '.xml')
self._logger.debug('Using extension config file: %s', fn)
return ElementTree.parse(fn)
def _get_config_dom(self, ext_name, codepool):
ns, ext = ext_name.split('_', 2)
fn = os.path.join(self._base_dir, 'app/code', codepool, ns, ext, 'etc', 'config.xml')
self._logger.debug('Using extension module file: %s', fn)
return ElementTree.parse(fn)
def _get_git_hash(self):
"""Get the current git commit hash
Blatently stolen from:
https://github.com/overviewer/Minecraft-Overviewer/blob/master/overviewer_core/util.py#L40
"""
try:
with open(os.path.join(self._base_dir, '.git', 'HEAD'), 'r') as head_file:
ref = head_file.read().strip()
if ref[:5] == 'ref: ':
with open(os.path.join(self._base_dir, '.git', ref[5:]), 'r') as commit_file:
return commit_file.read().strip()
else:
return ref[5:]
except Exception as err:
self._logger.warning('Couldnt read the git commit hash: %s :: %s',
err.__class__.__name__, err)
return 'UNKNOWN'
def _php_syntax_check(self, filename):
self._logger.debug('Checking PHP syntax for file: %s', filename)
return self._run_quiet(self.BIN_PHP, '-l', filename)
def _xml_syntax_check(self, filename):
self._logger.debug('Checking XML syntax for file: %s', filename)
return self._run_quiet(self.BIN_XMLLINT, '--format', filename)
def _bash_syntax_check(self, filename):
self._logger.debug('Checking Bash syntax for file: %s', filename)
return self._run_quiet(self.BIN_BASH, '-n', filename)
def _gcc_syntax_check(self, filename):
self._logger.debug('Checking C syntax for file: %s', filename)
return self._run_quiet(self.BIN_GCC, '-fsyntax-only', filename)
def _run_quiet(self, *pargs):
with open('/dev/null', 'w') as dev_null:
return not bool(subprocess.call(pargs, stdin=None, stdout=dev_null,
stderr=dev_null))
def main(base_path, pkg_desc_file, skip_tarball=False, tarball=None, keep_package_xml=False,
debug=False, skip_syntax_check=False, **kwargs):
pkgr = Magento_Packager(base_path, debug=debug)
pkg_xml = pkgr.build_package_xml(pkg_desc_file)
if not skip_syntax_check:
if not pkgr.do_syntax_check():
raise SystemExit('Syntax check failed!')
if not skip_tarball:
pkgr.build_tarball(pkg_xml, tarball_name=tarball,
keep_pkg_xml=keep_package_xml)
if __name__ == '__main__':
import sys
import optparse
logging.basicConfig()
parser = optparse.OptionParser()
parser.add_option('-d', '--debug', action='store_true',
default=os.environ.get('MPKG_DEV', False))
parser.add_option('-p', '--keep-package-xml', action='store_true', default=False)
parser.add_option('-t', '--tarball', action='store', default=None)
parser.add_option('-T', '--skip-tarball', action='store_true', default=False)
parser.add_option('-S', '--skip-syntax-check', action='store_true', default=False)
opts, args = parser.parse_args()
base_path = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0])))
if len(args):
main(base_path, args[0], **vars(opts))
else:
print 'Missing package definition file argument (mage-package.xml)!'
|
nexcess/magento-two-factor-auth
|
build/build_package.py
|
Python
|
gpl-2.0
| 17,309
|
from __future__ import unicode_literals
import json
import re
import time
import warnings
import hashlib
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import login as django_login, get_backends
from django.contrib.auth import logout as django_logout, authenticate
from django.core.cache import cache
from django.core.mail import EmailMultiAlternatives, EmailMessage
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.template.loader import render_to_string
from django.template import TemplateDoesNotExist
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
try:
from django.utils.encoding import force_text
except ImportError:
from django.utils.encoding import force_unicode as force_text
from ..utils import (build_absolute_uri, get_current_site,
generate_unique_username,
get_user_model, import_attribute,
resolve_url)
from . import app_settings
class DefaultAccountAdapter(object):
# Don't bother turning this into a setting, as changing this also
# requires changing the accompanying form error message. So if you
# need to change any of this, simply override clean_username().
username_regex = re.compile(r'^[\w.@+-]+$')
error_messages = {
'invalid_username':
_('Usernames can only contain letters, digits and @/./+/-/_.'),
'username_blacklisted':
_('Username can not be used. Please use other username.'),
'username_taken':
_('This username is already taken. Please choose another.'),
'too_many_login_attempts':
_('Too many failed login attempts. Try again later.')
}
def __init__(self, request=None):
self.request = request
def stash_verified_email(self, request, email):
request.session['account_verified_email'] = email
def unstash_verified_email(self, request):
ret = request.session.get('account_verified_email')
request.session['account_verified_email'] = None
return ret
def stash_user(self, request, user):
request.session['account_user'] = user
def unstash_user(self, request):
return request.session.pop('account_user', None)
def is_email_verified(self, request, email):
"""
Checks whether or not the email address is already verified
beyond allauth scope, for example, by having accepted an
invitation before signing up.
"""
ret = False
verified_email = request.session.get('account_verified_email')
if verified_email:
ret = verified_email.lower() == email.lower()
return ret
def format_email_subject(self, subject):
prefix = app_settings.EMAIL_SUBJECT_PREFIX
if prefix is None:
site = get_current_site(self.request)
prefix = "[{name}] ".format(name=site.name)
return prefix + force_text(subject)
def render_mail(self, template_prefix, email, context):
"""
Renders an e-mail to `email`. `template_prefix` identifies the
e-mail that is to be sent, e.g. "account/email/email_confirmation"
"""
subject = render_to_string('{0}_subject.txt'.format(template_prefix),
context)
# remove superfluous line breaks
subject = " ".join(subject.splitlines()).strip()
subject = self.format_email_subject(subject)
bodies = {}
for ext in ['html', 'txt']:
try:
template_name = '{0}_message.{1}'.format(template_prefix, ext)
bodies[ext] = render_to_string(template_name,
context).strip()
except TemplateDoesNotExist:
if ext == 'txt' and not bodies:
# We need at least one body
raise
if 'txt' in bodies:
msg = EmailMultiAlternatives(subject,
bodies['txt'],
settings.DEFAULT_FROM_EMAIL,
[email])
if 'html' in bodies:
msg.attach_alternative(bodies['html'], 'text/html')
else:
msg = EmailMessage(subject,
bodies['html'],
settings.DEFAULT_FROM_EMAIL,
[email])
msg.content_subtype = 'html' # Main content is now text/html
return msg
def send_mail(self, template_prefix, email, context):
msg = self.render_mail(template_prefix, email, context)
msg.send()
def get_login_redirect_url(self, request):
"""
Returns the default URL to redirect to after logging in. Note
that URLs passed explicitly (e.g. by passing along a `next`
GET parameter) take precedence over the value returned here.
"""
assert request.user.is_authenticated()
url = getattr(settings, "LOGIN_REDIRECT_URLNAME", None)
if url:
warnings.warn("LOGIN_REDIRECT_URLNAME is deprecated, simply"
" use LOGIN_REDIRECT_URL with a URL name",
DeprecationWarning)
else:
url = settings.LOGIN_REDIRECT_URL
return resolve_url(url)
def get_logout_redirect_url(self, request):
"""
Returns the URL to redirect to after the user logs out. Note that
this method is also invoked if you attempt to log out while no users
is logged in. Therefore, request.user is not guaranteed to be an
authenticated user.
"""
return resolve_url(app_settings.LOGOUT_REDIRECT_URL)
def get_email_confirmation_redirect_url(self, request):
"""
The URL to return to after successful e-mail confirmation.
"""
if request.user.is_authenticated():
if app_settings.EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL:
return \
app_settings.EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL
else:
return self.get_login_redirect_url(request)
else:
return app_settings.EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL
def is_open_for_signup(self, request):
"""
Checks whether or not the site is open for signups.
Next to simply returning True/False you can also intervene the
regular flow by raising an ImmediateHttpResponse
"""
return True
def new_user(self, request):
"""
Instantiates a new User instance.
"""
user = get_user_model()()
return user
def populate_username(self, request, user):
"""
Fills in a valid username, if required and missing. If the
username is already present it is assumed to be valid
(unique).
"""
from .utils import user_username, user_email, user_field
first_name = user_field(user, 'first_name')
last_name = user_field(user, 'last_name')
email = user_email(user)
username = user_username(user)
if app_settings.USER_MODEL_USERNAME_FIELD:
user_username(user,
username
or self.generate_unique_username([first_name,
last_name,
email,
'user']))
def generate_unique_username(self, txts, regex=None):
return generate_unique_username(txts, regex)
def save_user(self, request, user, form, commit=True):
"""
Saves a new `User` instance using information provided in the
signup form.
"""
from .utils import user_username, user_email, user_field
data = form.cleaned_data
first_name = data.get('first_name')
last_name = data.get('last_name')
email = data.get('email')
username = data.get('username')
user_email(user, email)
user_username(user, username)
if first_name:
user_field(user, 'first_name', first_name)
if last_name:
user_field(user, 'last_name', last_name)
if 'password1' in data:
user.set_password(data["password1"])
else:
user.set_unusable_password()
self.populate_username(request, user)
if commit:
# Ability not to commit makes it easier to derive from
# this adapter by adding
user.save()
return user
def clean_username(self, username, shallow=False):
"""
Validates the username. You can hook into this if you want to
(dynamically) restrict what usernames can be chosen.
"""
if not self.username_regex.match(username):
raise forms.ValidationError(
self.error_messages['invalid_username'])
# TODO: Add regexp support to USERNAME_BLACKLIST
username_blacklist_lower = [ub.lower()
for ub in app_settings.USERNAME_BLACKLIST]
if username.lower() in username_blacklist_lower:
raise forms.ValidationError(
self.error_messages['username_blacklisted'])
# Skipping database lookups when shallow is True, needed for unique
# username generation.
if not shallow:
username_field = app_settings.USER_MODEL_USERNAME_FIELD
assert username_field
user_model = get_user_model()
try:
query = {username_field + '__iexact': username}
user_model.objects.get(**query)
except user_model.DoesNotExist:
return username
raise forms.ValidationError(
self.error_messages['username_taken'])
return username
def clean_email(self, email):
"""
Validates an email value. You can hook into this if you want to
(dynamically) restrict what email addresses can be chosen.
"""
return email
def clean_password(self, password):
"""
Validates a password. You can hook into this if you want to
restric the allowed password choices.
"""
min_length = app_settings.PASSWORD_MIN_LENGTH
if len(password) < min_length:
raise forms.ValidationError(_("Password must be a minimum of {0} "
"characters.").format(min_length))
return password
def add_message(self, request, level, message_template,
message_context=None, extra_tags=''):
"""
Wrapper of `django.contrib.messages.add_message`, that reads
the message text from a template.
"""
if 'django.contrib.messages' in settings.INSTALLED_APPS:
try:
if message_context is None:
message_context = {}
message = render_to_string(message_template,
message_context).strip()
if message:
messages.add_message(request, level, message,
extra_tags=extra_tags)
except TemplateDoesNotExist:
pass
def ajax_response(self, request, response, redirect_to=None, form=None):
data = {}
status = response.status_code
if redirect_to:
status = 200
data['location'] = redirect_to
if form:
if form.is_valid():
status = 200
else:
status = 400
data['form_errors'] = form._errors
if hasattr(response, 'render'):
response.render()
data['html'] = response.content.decode('utf8')
return HttpResponse(json.dumps(data),
status=status,
content_type='application/json')
def login(self, request, user):
# HACK: This is not nice. The proper Django way is to use an
# authentication backend
if not hasattr(user, 'backend'):
from .auth_backends import AuthenticationBackend
backends = get_backends()
for backend in backends:
if isinstance(backend, AuthenticationBackend):
# prefer our own backend
break
else:
# Pick one
backend = backends[0]
backend_path = '.'.join([backend.__module__,
backend.__class__.__name__])
user.backend = backend_path
django_login(request, user)
def logout(self, request):
django_logout(request)
def confirm_email(self, request, email_address):
"""
Marks the email address as confirmed on the db
"""
email_address.verified = True
email_address.set_as_primary(conditional=True)
email_address.save()
def set_password(self, user, password):
user.set_password(password)
user.save()
def get_user_search_fields(self):
user = get_user_model()()
return filter(lambda a: a and hasattr(user, a),
[app_settings.USER_MODEL_USERNAME_FIELD,
'first_name', 'last_name', 'email'])
def is_safe_url(self, url):
from django.utils.http import is_safe_url
return is_safe_url(url)
def get_email_confirmation_url(self, request, emailconfirmation):
"""Constructs the email confirmation (activation) url.
Note that if you have architected your system such that email
confirmations are sent outside of the request context `request`
can be `None` here.
"""
url = reverse(
"account_confirm_email",
args=[emailconfirmation.key])
ret = build_absolute_uri(
request,
url)
return ret
def send_confirmation_mail(self, request, emailconfirmation, signup):
current_site = get_current_site(request)
activate_url = self.get_email_confirmation_url(
request,
emailconfirmation)
ctx = {
"user": emailconfirmation.email_address.user,
"activate_url": activate_url,
"current_site": current_site,
"key": emailconfirmation.key,
}
if signup:
email_template = 'account/email/email_confirmation_signup'
else:
email_template = 'account/email/email_confirmation'
self.send_mail(email_template,
emailconfirmation.email_address.email,
ctx)
def respond_user_inactive(self, request, user):
return HttpResponseRedirect(
reverse('account_inactive'))
def respond_email_verification_sent(self, request, user):
return HttpResponseRedirect(
reverse('account_email_verification_sent'))
def _get_login_attempts_cache_key(self, request, **credentials):
site = get_current_site(request)
login = credentials.get('email', credentials.get('username', ''))
login_key = hashlib.sha256(login.encode('utf8')).hexdigest()
return 'allauth/login_attempts@{site_id}:{login}'.format(
site_id=site.pk,
login=login_key)
def pre_authenticate(self, request, **credentials):
if app_settings.LOGIN_ATTEMPTS_LIMIT:
cache_key = self._get_login_attempts_cache_key(
request, **credentials)
login_data = cache.get(cache_key, None)
if login_data:
dt = timezone.now()
current_attempt_time = time.mktime(dt.timetuple())
if len(login_data) >= app_settings.LOGIN_ATTEMPTS_LIMIT and current_attempt_time < \
(login_data[-1] + app_settings.LOGIN_ATTEMPTS_TIMEOUT):
raise forms.ValidationError(
self.error_messages['too_many_login_attempts'])
def authenticate(self, request, **credentials):
"""Only authenticates, does not actually login. See `login`"""
self.pre_authenticate(request, **credentials)
user = authenticate(**credentials)
if user:
cache_key = self._get_login_attempts_cache_key(
request, **credentials)
cache.delete(cache_key)
else:
self.authentication_failed(request, **credentials)
return user
def authentication_failed(self, request, **credentials):
cache_key = self._get_login_attempts_cache_key(request, **credentials)
data = cache.get(cache_key, [])
dt = timezone.now()
data.append(time.mktime(dt.timetuple()))
cache.set(cache_key, data, app_settings.LOGIN_ATTEMPTS_TIMEOUT)
def get_adapter(request=None):
return import_attribute(app_settings.ADAPTER)(request)
|
Alexander-M-Waldman/local_currency_site
|
lib/python2.7/site-packages/allauth/account/adapter.py
|
Python
|
gpl-3.0
| 17,306
|
"""Functions for reading/writing to protobufs."""
import struct
from typing import Union
from typing.io import BinaryIO
from google.protobuf.reflection import GeneratedProtocolMessageType
import numpy
def read_proto(
path: str,
Proto: GeneratedProtocolMessageType
) -> 'Protobuf':
"""Reads a protobuf from a .proto file.
Parameters
----------
path
Path to the .proto file.
Proto:
Protocol message class (from the generated protobuf module).
Returns
-------
GeneratedProtocolMessageType
The parsed protobuf.
"""
proto = Proto()
with open(path, 'rb') as proto_file:
proto.ParseFromString(proto_file.read())
return proto
def write_protos(path: str, metadata: bytes=b''):
"""Serialises many protobufs to a file.
Parameters
----------
path
Path to binary file. Will be overwritten.
metadata
Optional bytestring to prepend to the file.
Notes
-----
Coroutine. Accepts protobufs, or None to terminate and close file.
"""
with open(path, 'wb') as proto_file:
# Write metadata.
proto_file.write(struct.pack('<Q', len(metadata)))
proto_file.write(metadata)
# Write protobufs.
proto = yield
while proto:
proto = proto.SerializeToString()
# Protobufs are not self-delimiting, so we need to store the length
# of each protobuf that we write. We will do this with an unsigned
# long long (Q).
length = struct.pack('<Q', len(proto))
proto_file.write(length)
proto_file.write(proto)
proto = yield
while True:
proto = yield
if proto:
raise RuntimeError('Cannot write protobuf to closed file.')
def write_proto(path: str, proto: 'Protobuf'):
"""Serialises a protobuf to a file.
Parameters
----------
path
Path to binary file. Will be overwritten.
proto
Protobuf to write to file.
"""
with open(path, 'wb') as proto_file:
proto = proto.SerializeToString()
proto_file.write(proto)
def _read_metadata(proto_file: BinaryIO) -> bytes:
"""Reads metadata from a protobufs file.
Notes
-----
Internal use. For external API, use read_metadata.
Parameters
----------
proto_file
Binary file.
Returns
-------
bytes
Metadata.
"""
metadata_length = proto_file.read(8) # Long long
metadata_length, = struct.unpack('<Q', metadata_length)
return proto_file.read(metadata_length)
def read_metadata(file: Union[str, BinaryIO]) -> bytes:
"""Reads metadata from a protobufs file.
Parameters
----------
file
Path to binary file, or file itself.
Returns
-------
bytes
Metadata.
"""
try:
return _read_metadata(file)
except AttributeError:
# Not a file-like object, so open the file.
with open(file, 'rb') as proto_file:
return _read_metadata(proto_file)
def _read_protos(
proto_file: BinaryIO,
Proto: GeneratedProtocolMessageType
) -> 'GeneratedProtocolMessageType()':
"""Reads many protobufs from a file.
Notes
-----
Internal use. For external API, use read_protos.
Parameters
----------
proto_file
Binary file.
Proto:
Protocol message class (from the generated protobuf module).
Yields
-------
GeneratedProtocolMessageType
A parsed protobuf.
"""
# This is essentially the inverse of the write_protos function.
# Skip the metadata.
metadata_length = proto_file.read(8) # Long long
metadata_length, = struct.unpack('<Q', metadata_length)
proto_file.read(metadata_length)
length = proto_file.read(8) # long long
while length:
length, = struct.unpack('<Q', length)
proto = Proto()
proto.ParseFromString(proto_file.read(length))
yield proto
length = proto_file.read(8)
def read_protos(
file: Union[str, BinaryIO],
Proto: GeneratedProtocolMessageType
) -> 'GeneratedProtocolMessageType()':
"""Reads many protobufs from a file.
Parameters
----------
file
Path to binary file, or file itself.
Proto:
Protocol message class (from the generated protobuf module).
Yields
-------
GeneratedProtocolMessageType
A parsed protobuf.
"""
try:
yield from _read_protos(file, Proto)
except AttributeError:
# Not a file-like object, so open the file.
with open(file, 'rb') as proto_file:
yield from _read_protos(proto_file, Proto)
def get_ndarray(data: list, shape: tuple, dtype: str) -> numpy.ndarray:
"""Converts a list of values into an array.
Parameters
----------
data
Raw array data.
shape:
Shape of the resulting array.
dtype:
Data type of the resulting array.
Returns
-------
numpy.ndarray
Array with the given data, shape, and dtype.
"""
return numpy.array(data, dtype=dtype).reshape(tuple(shape))
|
chengsoonong/acton
|
acton/proto/io.py
|
Python
|
bsd-3-clause
| 5,189
|
from GuitarScene import *
|
EdPassos/fofix
|
src/views/GuitarScene/__init__.py
|
Python
|
gpl-2.0
| 25
|
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Django middleware for NDB."""
__author__ = 'James A. Morrison'
from . import eventloop, tasklets
class NdbDjangoMiddleware(object):
"""Django middleware for NDB.
To use NDB with django, add
'ndb.NdbDjangoMiddleware',
to the MIDDLEWARE_CLASSES entry in your Django settings.py file.
Or, if you are using the ndb version from the SDK, use
'google.appengine.ext.ndb.NdbDjangoMiddleware',
It's best to insert it in front of any other middleware classes,
since some other middleware may make datastore calls and those won't be
handled properly if that middleware is invoked before this middleware.
See http://docs.djangoproject.com/en/dev/topics/http/middleware/.
"""
def process_request(self, unused_request):
"""Called by Django before deciding which view to execute."""
# Compare to the first half of toplevel() in context.py.
tasklets._state.clear_all_pending()
# Create and install a new context.
ctx = tasklets.make_default_context()
tasklets.set_context(ctx)
@staticmethod
def _finish():
# Compare to the finally clause in toplevel() in context.py.
ctx = tasklets.get_context()
tasklets.set_context(None)
ctx.flush().check_success()
eventloop.run() # Ensure writes are flushed, etc.
def process_response(self, request, response):
"""Called by Django just before returning a response."""
self._finish()
return response
def process_exception(self, unused_request, unused_exception):
"""Called by Django when a view raises an exception."""
self._finish()
return None
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/google/appengine/ext/ndb/django_middleware.py
|
Python
|
bsd-3-clause
| 2,158
|
# Copyright (C) 2008-2010 Adam Olsen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
"""
Collection of useful stock MenuItems for use with xlgui.widgets.menu
"""
# TODO: how should we document standardization of context's
# selected-(items|tracks) ?
from gi.repository import Gio
from gi.repository import GLib
from gi.repository import Gtk
from xl import common, player, playlist, settings, trax
from xl.nls import gettext as _
from xlgui.widgets import dialogs, rating, menu
from xlgui import properties
### TRACKS ITEMS ###
# These items act on a set of Tracks, by default 'selected-tracks' from
# the parent's context, but custom accessors are allowed via the
# get_tracks_func kwarg
def generic_get_playlist_func(parent, context):
return context.get('selected-playlist', None)
def generic_get_tracks_func(parent, context):
return context.get('selected-tracks', [])
class RatingMenuItem(menu.MenuItem):
"""
A menu item displaying rating images
and allowing for selection of ratings
"""
def __init__(self, name, after, get_tracks_func=generic_get_tracks_func):
menu.MenuItem.__init__(self, name, self.factory, after)
self.get_tracks_func = get_tracks_func
self.rating_set = False
# TODO: For accessibility it would be nice to add mnemonics or some
# other key shortcut thing to the RatingMenu, e.g. "+" and "-"
def factory(self, menu, parent, context):
# don't show rating widget for computed track selections (see #340)
if context.get('needs-computing'):
return
item = rating.RatingMenuItem()
item.connect('show', self.on_show, menu, parent, context)
self._rating_changed_id = item.connect(
'rating-changed', self.on_rating_changed, menu, parent, context
)
return item
# @common.threaded
# TODO: ASYNC
# most of this function isn't safe to use from a (interacts with UI elements)
def on_show(self, widget, menu, parent, context):
"""
Updates the menu item on show
"""
tracks = self.get_tracks_func(parent, context)
rating = trax.util.get_rating_from_tracks(tracks)
widget.disconnect(self._rating_changed_id)
widget.props.rating = rating
self._rating_changed_id = widget.connect(
'rating-changed', self.on_rating_changed, menu, parent, context
)
def on_rating_changed(self, widget, rating, menu, parent, context):
"""
Passes the 'rating-changed' signal
"""
tracks = self.get_tracks_func(parent, context)
for track in tracks:
track.set_rating(rating)
def _enqueue_cb(widget, name, parent, context, get_tracks_func):
tracks = get_tracks_func(parent, context)
player.QUEUE.extend(tracks)
def EnqueueMenuItem(name, after, get_tracks_func=generic_get_tracks_func):
return menu.simple_menu_item(
name,
after,
_("En_queue"),
'list-add',
_enqueue_cb,
callback_args=[get_tracks_func],
)
# TODO: move logic into (GUI?) playlist
def _append_cb(widget, name, parent, context, get_tracks_func, replace=False):
from xlgui import main
page = main.get_selected_playlist()
if not page:
return
pl = page.playlist
if replace:
pl.clear()
offset = len(pl)
tracks = get_tracks_func(parent, context)
# This is well-intentioned, but it leads to odd effects (see #147)
# -> this is more proof that the playlist needs to handle this logic
# sort_by, reverse = page.view.get_sort_by()
# tracks = trax.sort_tracks(sort_by, tracks, reverse=reverse,
# artist_compilations=True)
pl.extend(tracks)
if settings.get_option('playlist/append_menu_starts_playback', False):
if not player.PLAYER.current:
page.view.play_track_at(offset, tracks[0])
def ReplaceCurrentMenuItem(name, after, get_tracks_func=generic_get_tracks_func):
return menu.simple_menu_item(
name,
after,
_("_Replace Current"),
None,
_append_cb,
callback_args=[get_tracks_func, True],
)
def AppendMenuItem(name, after, get_tracks_func=generic_get_tracks_func):
return menu.simple_menu_item(
name,
after,
_("_Append to Current"),
'list-add',
_append_cb,
callback_args=[get_tracks_func],
)
def _properties_cb(widget, name, parent, context, get_tracks_func, dialog_parent):
tracks = get_tracks_func(parent, context)
if tracks:
properties.TrackPropertiesDialog(dialog_parent, tracks)
def PropertiesMenuItem(
name, after, get_tracks_func=generic_get_tracks_func, dialog_parent=None
):
return menu.simple_menu_item(
name,
after,
_("_Track Properties"),
'document-properties',
_properties_cb,
callback_args=[get_tracks_func, dialog_parent],
)
def _open_directory_cb(widget, name, parent, context, get_tracks_func):
try:
track = get_tracks_func(parent, context)[0]
except IndexError:
return
common.open_file_directory(track.get_loc_for_io())
def OpenDirectoryMenuItem(name, after, get_tracks_func=generic_get_tracks_func):
return menu.simple_menu_item(
name,
after,
_("_Open Directory"),
'folder-open',
_open_directory_cb,
callback_args=[get_tracks_func],
)
def generic_trash_tracks_func(parent, context, tracks):
for track in tracks:
gfile = Gio.File.new_for_uri(track.get_loc_for_io())
gfile.trash()
def generic_delete_tracks_func(parent, context, tracks):
for track in tracks:
gfile = Gio.File.new_for_uri(track.get_loc_for_io())
gfile.delete()
def _on_trash_tracks(
widget,
name,
parent,
context,
get_tracks_func,
trash_tracks_func,
delete_tracks_func,
):
tracks = get_tracks_func(parent, context)
try:
trash_tracks_func(parent, context, tracks)
except GLib.GError:
dialog = Gtk.MessageDialog(
parent=parent.parent,
message_type=Gtk.MessageType.WARNING,
text=_(
'The files cannot be moved to the Trash. '
'Delete them permanently from the disk?'
),
)
dialog.add_buttons(
Gtk.STOCK_CANCEL,
Gtk.ResponseType.CANCEL,
Gtk.STOCK_DELETE,
Gtk.ResponseType.OK,
)
dialog.set_alternative_button_order_from_array(
(Gtk.ResponseType.OK, Gtk.ResponseType.CANCEL)
)
if dialog.run() == Gtk.ResponseType.OK:
delete_tracks_func(parent, context, tracks)
dialog.destroy()
def TrashMenuItem(
name,
after,
get_tracks_func=generic_get_tracks_func,
trash_tracks_func=generic_trash_tracks_func,
delete_tracks_func=generic_delete_tracks_func,
):
return menu.simple_menu_item(
name,
after,
_('_Move to Trash'),
'user-trash',
_on_trash_tracks,
callback_args=[get_tracks_func, trash_tracks_func, delete_tracks_func],
)
### END TRACKS ITEMS ###
### PLAYLIST ITEMS ###
# These items act on a playlist, by default 'selected-playlist' from
# the parent's context, but custom accessors are allowed via the
# get_pl_func kwarg
def RenamePlaylistMenuItem(name, after, get_pl_func=generic_get_playlist_func):
return menu.simple_menu_item(
name,
after,
_('_Rename'),
'accessories-text-editor',
lambda w, n, o, c: o.rename_playlist(get_pl_func(o, c)),
condition_fn=lambda n, p, c: not isinstance(
c['selected-playlist'], playlist.SmartPlaylist
),
)
def EditPlaylistMenuItem(name, after, get_pl_func=generic_get_playlist_func):
return menu.simple_menu_item(
name,
after,
_('_Edit'),
'accessories-text-editor',
lambda w, n, o, c: o.edit_smart_playlist(get_pl_func(o, c)),
condition_fn=lambda n, p, c: isinstance(
c['selected-playlist'], playlist.SmartPlaylist
),
)
def ExportPlaylistMenuItem(name, after, get_pl_func=generic_get_playlist_func):
return menu.simple_menu_item(
name,
after,
_('E_xport Playlist'),
'document-save-as',
lambda w, n, o, c: dialogs.export_playlist_dialog(get_pl_func(o, c)),
)
def ExportPlaylistFilesMenuItem(name, after, get_pl_func=generic_get_playlist_func):
return menu.simple_menu_item(
name,
after,
_('Export _Files'),
'document-save-as',
lambda w, n, o, c: dialogs.export_playlist_files(get_pl_func(o, c)),
)
def DeletePlaylistMenuItem(name, after, get_pl_func=generic_get_playlist_func):
return menu.simple_menu_item(
name,
after,
_('_Delete Playlist'),
'edit-delete',
lambda w, n, o, c: o.remove_playlist(get_pl_func(o, c)),
)
### END PLAYLIST ITEMS ###
|
sjohannes/exaile
|
xlgui/widgets/menuitems.py
|
Python
|
gpl-2.0
| 10,232
|
def answer(population, x, y, strength):
max_y = len(population)
max_x = len(population[0])
visited = []
to_visit = [(y, x)]
def next_coords():
for coords in to_visit:
yield coords
for y, x in next_coords():
if population[y][x] <= strength:
print population
visited.append((y, x))
population[y][x] = -1
if x + 1 < max_x and (y, x+1) not in visited:
to_visit.append((y, x+1))
if y + 1 < max_y and (y+1, x) not in visited:
to_visit.append((y+1, x))
if x - 1 >= 0 and (y, x-1) not in visited:
to_visit.append((y, x-1))
if y - 1 >= 0 and (y-1, x) not in visited:
to_visit.append((y-1, x))
return population
if __name__ == '__main__':
print answer([[1, 2, 3], [2, 3, 4], [3, 2, 1]], 0, 0, 2)
print answer([[6, 7, 2, 7, 6], [6, 3, 1, 4, 7], [0, 2, 4, 1, 10], [8, 1, 1, 4, 9], [8, 7, 4, 9, 9]], 2, 1, 5)
|
smartypants2712/python
|
zombie_infection.py
|
Python
|
mit
| 1,015
|
# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Utilities for writing code that runs on Python 2 and 3"""
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.10.0"
# pylint: disable-all
# pylint: skip-file
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
|
endlessm/chromium-browser
|
third_party/catapult/third_party/six/six.py
|
Python
|
bsd-3-clause
| 30,142
|
# Copyright (C) 2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.dnssec
import dns.name
import dns.rdata
import dns.rdataclass
import dns.rdatatype
import dns.rrset
abs_dnspython_org = dns.name.from_text('dnspython.org')
abs_keys = { abs_dnspython_org :
dns.rrset.from_text('dnspython.org.', 3600, 'IN', 'DNSKEY',
'257 3 5 AwEAAenVTr9L1OMlL1/N2ta0Qj9LLLnnmFWIr1dJoAsWM9BQfsbV7kFZ XbAkER/FY9Ji2o7cELxBwAsVBuWn6IUUAJXLH74YbC1anY0lifjgt29z SwDzuB7zmC7yVYZzUunBulVW4zT0tg1aePbpVL2EtTL8VzREqbJbE25R KuQYHZtFwG8S4iBxJUmT2Bbd0921LLxSQgVoFXlQx/gFV2+UERXcJ5ce iX6A6wc02M/pdg/YbJd2rBa0MYL3/Fz/Xltre0tqsImZGxzi6YtYDs45 NC8gH+44egz82e2DATCVM1ICPmRDjXYTLldQiWA2ZXIWnK0iitl5ue24 7EsWJefrIhE=',
'256 3 5 AwEAAdSSghOGjU33IQZgwZM2Hh771VGXX05olJK49FxpSyuEAjDBXY58 LGU9R2Zgeecnk/b9EAhFu/vCV9oECtiTCvwuVAkt9YEweqYDluQInmgP NGMJCKdSLlnX93DkjDw8rMYv5dqXCuSGPlKChfTJOLQxIAxGloS7lL+c 0CTZydAF')
}
abs_keys_duplicate_keytag = { abs_dnspython_org :
dns.rrset.from_text('dnspython.org.', 3600, 'IN', 'DNSKEY',
'257 3 5 AwEAAenVTr9L1OMlL1/N2ta0Qj9LLLnnmFWIr1dJoAsWM9BQfsbV7kFZ XbAkER/FY9Ji2o7cELxBwAsVBuWn6IUUAJXLH74YbC1anY0lifjgt29z SwDzuB7zmC7yVYZzUunBulVW4zT0tg1aePbpVL2EtTL8VzREqbJbE25R KuQYHZtFwG8S4iBxJUmT2Bbd0921LLxSQgVoFXlQx/gFV2+UERXcJ5ce iX6A6wc02M/pdg/YbJd2rBa0MYL3/Fz/Xltre0tqsImZGxzi6YtYDs45 NC8gH+44egz82e2DATCVM1ICPmRDjXYTLldQiWA2ZXIWnK0iitl5ue24 7EsWJefrIhE=',
'256 3 5 AwEAAdSSg++++THIS/IS/NOT/THE/CORRECT/KEY++++++++++++++++ ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ AaOSydAF',
'256 3 5 AwEAAdSSghOGjU33IQZgwZM2Hh771VGXX05olJK49FxpSyuEAjDBXY58 LGU9R2Zgeecnk/b9EAhFu/vCV9oECtiTCvwuVAkt9YEweqYDluQInmgP NGMJCKdSLlnX93DkjDw8rMYv5dqXCuSGPlKChfTJOLQxIAxGloS7lL+c 0CTZydAF')
}
rel_keys = { dns.name.empty :
dns.rrset.from_text('@', 3600, 'IN', 'DNSKEY',
'257 3 5 AwEAAenVTr9L1OMlL1/N2ta0Qj9LLLnnmFWIr1dJoAsWM9BQfsbV7kFZ XbAkER/FY9Ji2o7cELxBwAsVBuWn6IUUAJXLH74YbC1anY0lifjgt29z SwDzuB7zmC7yVYZzUunBulVW4zT0tg1aePbpVL2EtTL8VzREqbJbE25R KuQYHZtFwG8S4iBxJUmT2Bbd0921LLxSQgVoFXlQx/gFV2+UERXcJ5ce iX6A6wc02M/pdg/YbJd2rBa0MYL3/Fz/Xltre0tqsImZGxzi6YtYDs45 NC8gH+44egz82e2DATCVM1ICPmRDjXYTLldQiWA2ZXIWnK0iitl5ue24 7EsWJefrIhE=',
'256 3 5 AwEAAdSSghOGjU33IQZgwZM2Hh771VGXX05olJK49FxpSyuEAjDBXY58 LGU9R2Zgeecnk/b9EAhFu/vCV9oECtiTCvwuVAkt9YEweqYDluQInmgP NGMJCKdSLlnX93DkjDw8rMYv5dqXCuSGPlKChfTJOLQxIAxGloS7lL+c 0CTZydAF')
}
when = 1290250287
abs_soa = dns.rrset.from_text('dnspython.org.', 3600, 'IN', 'SOA',
'howl.dnspython.org. hostmaster.dnspython.org. 2010020047 3600 1800 604800 3600')
abs_other_soa = dns.rrset.from_text('dnspython.org.', 3600, 'IN', 'SOA',
'foo.dnspython.org. hostmaster.dnspython.org. 2010020047 3600 1800 604800 3600')
abs_soa_rrsig = dns.rrset.from_text('dnspython.org.', 3600, 'IN', 'RRSIG',
'SOA 5 2 3600 20101127004331 20101119213831 61695 dnspython.org. sDUlltRlFTQw5ITFxOXW3TgmrHeMeNpdqcZ4EXxM9FHhIlte6V9YCnDw t6dvM9jAXdIEi03l9H/RAd9xNNW6gvGMHsBGzpvvqFQxIBR2PoiZA1mX /SWHZFdbt4xjYTtXqpyYvrMK0Dt7bUYPadyhPFCJ1B+I8Zi7B5WJEOd0 8vs=')
rel_soa = dns.rrset.from_text('@', 3600, 'IN', 'SOA',
'howl hostmaster 2010020047 3600 1800 604800 3600')
rel_other_soa = dns.rrset.from_text('@', 3600, 'IN', 'SOA',
'foo hostmaster 2010020047 3600 1800 604800 3600')
rel_soa_rrsig = dns.rrset.from_text('@', 3600, 'IN', 'RRSIG',
'SOA 5 2 3600 20101127004331 20101119213831 61695 @ sDUlltRlFTQw5ITFxOXW3TgmrHeMeNpdqcZ4EXxM9FHhIlte6V9YCnDw t6dvM9jAXdIEi03l9H/RAd9xNNW6gvGMHsBGzpvvqFQxIBR2PoiZA1mX /SWHZFdbt4xjYTtXqpyYvrMK0Dt7bUYPadyhPFCJ1B+I8Zi7B5WJEOd0 8vs=')
sep_key = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.DNSKEY,
'257 3 5 AwEAAenVTr9L1OMlL1/N2ta0Qj9LLLnnmFWIr1dJoAsWM9BQfsbV7kFZ XbAkER/FY9Ji2o7cELxBwAsVBuWn6IUUAJXLH74YbC1anY0lifjgt29z SwDzuB7zmC7yVYZzUunBulVW4zT0tg1aePbpVL2EtTL8VzREqbJbE25R KuQYHZtFwG8S4iBxJUmT2Bbd0921LLxSQgVoFXlQx/gFV2+UERXcJ5ce iX6A6wc02M/pdg/YbJd2rBa0MYL3/Fz/Xltre0tqsImZGxzi6YtYDs45 NC8gH+44egz82e2DATCVM1ICPmRDjXYTLldQiWA2ZXIWnK0iitl5ue24 7EsWJefrIhE=')
good_ds = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.DS,
'57349 5 2 53A79A3E7488AB44FFC56B2D1109F0699D1796DD977E72108B841F96 E47D7013')
when2 = 1290425644
abs_example = dns.name.from_text('example')
abs_dsa_keys = { abs_example :
dns.rrset.from_text('example.', 86400, 'IN', 'DNSKEY',
'257 3 3 CI3nCqyJsiCJHTjrNsJOT4RaszetzcJPYuoH3F9ZTVt3KJXncCVR3bwn 1w0iavKljb9hDlAYSfHbFCp4ic/rvg4p1L8vh5s8ToMjqDNl40A0hUGQ Ybx5hsECyK+qHoajilUX1phYSAD8d9WAGO3fDWzUPBuzR7o85NiZCDxz yXuNVfni0uhj9n1KYhEO5yAbbruDGN89wIZcxMKuQsdUY2GYD93ssnBv a55W6XRABYWayKZ90WkRVODLVYLSn53Pj/wwxGH+XdhIAZJXimrZL4yl My7rtBsLMqq8Ihs4Tows7LqYwY7cp6y/50tw6pj8tFqMYcPUjKZV36l1 M/2t5BVg3i7IK61Aidt6aoC3TDJtzAxg3ZxfjZWJfhHjMJqzQIfbW5b9 q1mjFsW5EUv39RaNnX+3JWPRLyDqD4pIwDyqfutMsdk/Py3paHn82FGp CaOg+nicqZ9TiMZURN/XXy5JoXUNQ3RNvbHCUiPUe18KUkY6mTfnyHld 1l9YCWmzXQVClkx/hOYxjJ4j8Ife58+Obu5X',
'256 3 3 CJE1yb9YRQiw5d2xZrMUMR+cGCTt1bp1KDCefmYKmS+Z1+q9f42ETVhx JRiQwXclYwmxborzIkSZegTNYIV6mrYwbNB27Q44c3UGcspb3PiOw5TC jNPRYEcdwGvDZ2wWy+vkSV/S9tHXY8O6ODiE6abZJDDg/RnITyi+eoDL R3KZ5n/V1f1T1b90rrV6EewhBGQJpQGDogaXb2oHww9Tm6NfXyo7SoMM pbwbzOckXv+GxRPJIQNSF4D4A9E8XCksuzVVdE/0lr37+uoiAiPia38U 5W2QWe/FJAEPLjIp2eTzf0TrADc1pKP1wrA2ASpdzpm/aX3IB5RPp8Ew S9U72eBFZJAUwg635HxJVxH1maG6atzorR566E+e0OZSaxXS9o1o6QqN 3oPlYLGPORDiExilKfez3C/x/yioOupW9K5eKF0gmtaqrHX0oq9s67f/ RIM2xVaKHgG9Vf2cgJIZkhv7sntujr+E4htnRmy9P9BxyFxsItYxPI6Z bzygHAZpGhlI/7ltEGlIwKxyTK3ZKBm67q7B')
}
abs_dsa_soa = dns.rrset.from_text('example.', 86400, 'IN', 'SOA',
'ns1.example. hostmaster.example. 2 10800 3600 604800 86400')
abs_other_dsa_soa = dns.rrset.from_text('example.', 86400, 'IN', 'SOA',
'ns1.example. hostmaster.example. 2 10800 3600 604800 86401')
abs_dsa_soa_rrsig = dns.rrset.from_text('example.', 86400, 'IN', 'RRSIG',
'SOA 3 1 86400 20101129143231 20101122112731 42088 example. CGul9SuBofsktunV8cJs4eRs6u+3NCS3yaPKvBbD+pB2C76OUXDZq9U=')
example_sep_key = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.DNSKEY,
'257 3 3 CI3nCqyJsiCJHTjrNsJOT4RaszetzcJPYuoH3F9ZTVt3KJXncCVR3bwn 1w0iavKljb9hDlAYSfHbFCp4ic/rvg4p1L8vh5s8ToMjqDNl40A0hUGQ Ybx5hsECyK+qHoajilUX1phYSAD8d9WAGO3fDWzUPBuzR7o85NiZCDxz yXuNVfni0uhj9n1KYhEO5yAbbruDGN89wIZcxMKuQsdUY2GYD93ssnBv a55W6XRABYWayKZ90WkRVODLVYLSn53Pj/wwxGH+XdhIAZJXimrZL4yl My7rtBsLMqq8Ihs4Tows7LqYwY7cp6y/50tw6pj8tFqMYcPUjKZV36l1 M/2t5BVg3i7IK61Aidt6aoC3TDJtzAxg3ZxfjZWJfhHjMJqzQIfbW5b9 q1mjFsW5EUv39RaNnX+3JWPRLyDqD4pIwDyqfutMsdk/Py3paHn82FGp CaOg+nicqZ9TiMZURN/XXy5JoXUNQ3RNvbHCUiPUe18KUkY6mTfnyHld 1l9YCWmzXQVClkx/hOYxjJ4j8Ife58+Obu5X')
example_ds_sha1 = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.DS,
'18673 3 1 71b71d4f3e11bbd71b4eff12cde69f7f9215bbe7')
example_ds_sha256 = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.DS,
'18673 3 2 eb8344cbbf07c9d3d3d6c81d10c76653e28d8611a65e639ef8f716e4e4e5d913')
class DNSSECValidatorTestCase(unittest.TestCase):
def testAbsoluteRSAGood(self):
dns.dnssec.validate(abs_soa, abs_soa_rrsig, abs_keys, None, when)
def testDuplicateKeytag(self):
dns.dnssec.validate(abs_soa, abs_soa_rrsig, abs_keys_duplicate_keytag, None, when)
def testAbsoluteRSABad(self):
def bad():
dns.dnssec.validate(abs_other_soa, abs_soa_rrsig, abs_keys, None,
when)
self.failUnlessRaises(dns.dnssec.ValidationFailure, bad)
def testRelativeRSAGood(self):
dns.dnssec.validate(rel_soa, rel_soa_rrsig, rel_keys,
abs_dnspython_org, when)
def testRelativeRSABad(self):
def bad():
dns.dnssec.validate(rel_other_soa, rel_soa_rrsig, rel_keys,
abs_dnspython_org, when)
self.failUnlessRaises(dns.dnssec.ValidationFailure, bad)
def testMakeSHA256DS(self):
ds = dns.dnssec.make_ds(abs_dnspython_org, sep_key, 'SHA256')
self.failUnless(ds == good_ds)
def testAbsoluteDSAGood(self):
dns.dnssec.validate(abs_dsa_soa, abs_dsa_soa_rrsig, abs_dsa_keys, None,
when2)
def testAbsoluteDSABad(self):
def bad():
dns.dnssec.validate(abs_other_dsa_soa, abs_dsa_soa_rrsig,
abs_dsa_keys, None, when2)
self.failUnlessRaises(dns.dnssec.ValidationFailure, bad)
def testMakeExampleSHA1DS(self):
ds = dns.dnssec.make_ds(abs_example, example_sep_key, 'SHA1')
self.failUnless(ds == example_ds_sha1)
def testMakeExampleSHA256DS(self):
ds = dns.dnssec.make_ds(abs_example, example_sep_key, 'SHA256')
self.failUnless(ds == example_ds_sha256)
if __name__ == '__main__':
import_ok = False
try:
import Crypto.Util.number
import_ok = True
except:
pass
if import_ok:
unittest.main()
else:
print 'skipping DNSSEC tests because pycrypto is not installed'
|
bl4ckmesa/bound
|
dnspython-1.11.1/tests/dnssec.py
|
Python
|
mit
| 10,453
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 VMware, Inc.
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A connection to the VMware ESX platform.
**Related Flags**
:vmwareapi_host_ip: IP address or Name of VMware ESX/VC server.
:vmwareapi_host_username: Username for connection to VMware ESX/VC Server.
:vmwareapi_host_password: Password for connection to VMware ESX/VC Server.
:vmwareapi_cluster_name: Name of a VMware Cluster ComputeResource.
:vmwareapi_task_poll_interval: The interval (seconds) used for polling of
remote tasks
(default: 5.0).
:vmwareapi_api_retry_count: The API retry count in case of failure such as
network failures (socket errors etc.)
(default: 10).
:vnc_port: VNC starting port (default: 5900)
:vnc_port_total: Total number of VNC ports (default: 10000)
:vnc_password: VNC password
:use_linked_clone: Whether to use linked clone (default: True)
"""
import time
from eventlet import event
from oslo.config import cfg
from nova import exception
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova import utils
from nova.virt import driver
from nova.virt.vmwareapi import error_util
from nova.virt.vmwareapi import host
from nova.virt.vmwareapi import vim
from nova.virt.vmwareapi import vim_util
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import vmops
from nova.virt.vmwareapi import volumeops
LOG = logging.getLogger(__name__)
vmwareapi_opts = [
cfg.StrOpt('vmwareapi_host_ip',
default=None,
help='URL for connection to VMware ESX/VC host. Required if '
'compute_driver is vmwareapi.VMwareESXDriver or '
'vmwareapi.VMwareVCDriver.'),
cfg.StrOpt('vmwareapi_host_username',
default=None,
help='Username for connection to VMware ESX/VC host. '
'Used only if compute_driver is '
'vmwareapi.VMwareESXDriver or vmwareapi.VMwareVCDriver.'),
cfg.StrOpt('vmwareapi_host_password',
default=None,
help='Password for connection to VMware ESX/VC host. '
'Used only if compute_driver is '
'vmwareapi.VMwareESXDriver or vmwareapi.VMwareVCDriver.',
secret=True),
cfg.StrOpt('vmwareapi_cluster_name',
default=None,
help='Name of a VMware Cluster ComputeResource. '
'Used only if compute_driver is '
'vmwareapi.VMwareVCDriver.'),
cfg.FloatOpt('vmwareapi_task_poll_interval',
default=5.0,
help='The interval used for polling of remote tasks. '
'Used only if compute_driver is '
'vmwareapi.VMwareESXDriver or '
'vmwareapi.VMwareVCDriver.'),
cfg.IntOpt('vmwareapi_api_retry_count',
default=10,
help='The number of times we retry on failures, e.g., '
'socket error, etc. '
'Used only if compute_driver is '
'vmwareapi.VMwareESXDriver or vmwareapi.VMwareVCDriver.'),
cfg.IntOpt('vnc_port',
default=5900,
help='VNC starting port'),
cfg.IntOpt('vnc_port_total',
default=10000,
help='Total number of VNC ports'),
cfg.StrOpt('vnc_password',
default=None,
help='VNC password',
secret=True),
cfg.BoolOpt('use_linked_clone',
default=True,
help='Whether to use linked clone'),
]
CONF = cfg.CONF
CONF.register_opts(vmwareapi_opts)
TIME_BETWEEN_API_CALL_RETRIES = 2.0
class Failure(Exception):
"""Base Exception class for handling task failures."""
def __init__(self, details):
self.details = details
def __str__(self):
return str(self.details)
class VMwareESXDriver(driver.ComputeDriver):
"""The ESX host connection object."""
def __init__(self, virtapi, read_only=False, scheme="https"):
super(VMwareESXDriver, self).__init__(virtapi)
self._host_ip = CONF.vmwareapi_host_ip
host_username = CONF.vmwareapi_host_username
host_password = CONF.vmwareapi_host_password
api_retry_count = CONF.vmwareapi_api_retry_count
if not self._host_ip or host_username is None or host_password is None:
raise Exception(_("Must specify vmwareapi_host_ip,"
"vmwareapi_host_username "
"and vmwareapi_host_password to use"
"compute_driver=vmwareapi.VMwareESXDriver or "
"vmwareapi.VMwareVCDriver"))
self._session = VMwareAPISession(self._host_ip,
host_username, host_password,
api_retry_count, scheme=scheme)
self._cluster_name = CONF.vmwareapi_cluster_name
self._volumeops = volumeops.VMwareVolumeOps(self._session,
self._cluster_name)
self._vmops = vmops.VMwareVMOps(self._session, self.virtapi,
self._volumeops, self._cluster_name)
self._host = host.Host(self._session)
self._host_state = None
@property
def host_state(self):
if not self._host_state:
self._host_state = host.HostState(self._session,
self._host_ip)
return self._host_state
def init_host(self, host):
"""Do the initialization that needs to be done."""
# FIXME(sateesh): implement this
pass
def legacy_nwinfo(self):
return False
def list_instances(self):
"""List VM instances."""
return self._vmops.list_instances()
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
"""Create VM instance."""
self._vmops.spawn(context, instance, image_meta, network_info,
block_device_info)
def snapshot(self, context, instance, name, update_task_state):
"""Create snapshot from a running VM instance."""
self._vmops.snapshot(context, instance, name, update_task_state)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot VM instance."""
self._vmops.reboot(instance, network_info)
def destroy(self, instance, network_info, block_device_info=None,
destroy_disks=True):
"""Destroy VM instance."""
self._vmops.destroy(instance, network_info, destroy_disks)
def pause(self, instance):
"""Pause VM instance."""
self._vmops.pause(instance)
def unpause(self, instance):
"""Unpause paused VM instance."""
self._vmops.unpause(instance)
def suspend(self, instance):
"""Suspend the specified instance."""
self._vmops.suspend(instance)
def resume(self, instance, network_info, block_device_info=None):
"""Resume the suspended VM instance."""
self._vmops.resume(instance)
def rescue(self, context, instance, network_info, image_meta,
rescue_password):
"""Rescue the specified instance."""
self._vmops.rescue(context, instance, network_info, image_meta)
def unrescue(self, instance, network_info):
"""Unrescue the specified instance."""
self._vmops.unrescue(instance)
def power_off(self, instance):
"""Power off the specified instance."""
self._vmops.power_off(instance)
def power_on(self, instance):
"""Power on the specified instance."""
self._vmops.power_on(instance)
def poll_rebooting_instances(self, timeout, instances):
"""Poll for rebooting instances."""
self._vmops.poll_rebooting_instances(timeout, instances)
def get_info(self, instance):
"""Return info about the VM instance."""
return self._vmops.get_info(instance)
def get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
return self._vmops.get_info(instance)
def get_console_output(self, instance):
"""Return snapshot of console."""
return self._vmops.get_console_output(instance)
def get_vnc_console(self, instance):
"""Return link to instance's VNC console."""
return self._vmops.get_vnc_console(instance)
def get_volume_connector(self, instance):
"""Return volume connector information."""
return self._volumeops.get_volume_connector(instance)
def get_host_ip_addr(self):
"""Retrieves the IP address of the ESX host."""
return self._host_ip
def attach_volume(self, connection_info, instance, mountpoint):
"""Attach volume storage to VM instance."""
return self._volumeops.attach_volume(connection_info,
instance,
mountpoint)
def detach_volume(self, connection_info, instance, mountpoint):
"""Detach volume storage to VM instance."""
return self._volumeops.detach_volume(connection_info,
instance,
mountpoint)
def get_console_pool_info(self, console_type):
"""Get info about the host on which the VM resides."""
return {'address': CONF.vmwareapi_host_ip,
'username': CONF.vmwareapi_host_username,
'password': CONF.vmwareapi_host_password}
def get_available_resource(self, nodename):
"""Retrieve resource info.
This method is called when nova-compute launches, and
as part of a periodic task.
:returns: dictionary describing resources
"""
host_stats = self.get_host_stats(refresh=True)
# Updating host information
dic = {'vcpus': host_stats["vcpus"],
'memory_mb': host_stats['host_memory_total'],
'local_gb': host_stats['disk_total'],
'vcpus_used': 0,
'memory_mb_used': host_stats['host_memory_total'] -
host_stats['host_memory_free'],
'local_gb_used': host_stats['disk_used'],
'hypervisor_type': host_stats['hypervisor_type'],
'hypervisor_version': host_stats['hypervisor_version'],
'hypervisor_hostname': host_stats['hypervisor_hostname'],
'cpu_info': jsonutils.dumps(host_stats['cpu_info'])}
return dic
def update_host_status(self):
"""Update the status info of the host, and return those values
to the calling program."""
return self.host_state.update_status()
def get_host_stats(self, refresh=False):
"""Return the current state of the host. If 'refresh' is
True, run the update first."""
return self.host_state.get_host_stats(refresh=refresh)
def host_power_action(self, host, action):
"""Reboots, shuts down or powers up the host."""
return self._host.host_power_action(host, action)
def host_maintenance_mode(self, host, mode):
"""Start/Stop host maintenance window. On start, it triggers
guest VMs evacuation."""
return self._host.host_maintenance_mode(host, mode)
def set_host_enabled(self, host, enabled):
"""Sets the specified host's ability to accept new instances."""
return self._host.set_host_enabled(host, enabled)
def inject_network_info(self, instance, network_info):
"""inject network info for specified instance."""
self._vmops.inject_network_info(instance, network_info)
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks."""
self._vmops.plug_vifs(instance, network_info)
def unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks."""
self._vmops.unplug_vifs(instance, network_info)
def list_interfaces(self, instance_name):
"""
Return the IDs of all the virtual network interfaces attached to the
specified instance, as a list. These IDs are opaque to the caller
(they are only useful for giving back to this layer as a parameter to
interface_stats). These IDs only need to be unique for a given
instance.
"""
return self._vmops.list_interfaces(instance_name)
class VMwareVCDriver(VMwareESXDriver):
"""The ESX host connection object."""
def __init__(self, virtapi, read_only=False, scheme="https"):
super(VMwareVCDriver, self).__init__(virtapi)
if not self._cluster_name:
self._cluster = None
else:
self._cluster = vm_util.get_cluster_ref_from_name(
self._session, self._cluster_name)
if self._cluster is None:
raise exception.NotFound(_("VMware Cluster %s is not found")
% self._cluster_name)
self._vc_state = None
@property
def host_state(self):
if not self._vc_state:
self._vc_state = host.VCState(self._session,
self._host_ip,
self._cluster)
return self._vc_state
def migrate_disk_and_power_off(self, context, instance, dest,
instance_type, network_info,
block_device_info=None):
"""
Transfers the disk of a running instance in multiple phases, turning
off the instance before the end.
"""
return self._vmops.migrate_disk_and_power_off(context, instance,
dest, instance_type)
def confirm_migration(self, migration, instance, network_info):
"""Confirms a resize, destroying the source VM."""
self._vmops.confirm_migration(migration, instance, network_info)
def finish_revert_migration(self, instance, network_info,
block_device_info=None):
"""Finish reverting a resize, powering back on the instance."""
self._vmops.finish_revert_migration(instance)
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance=False,
block_device_info=None):
"""Completes a resize, turning on the migrated instance."""
self._vmops.finish_migration(context, migration, instance, disk_info,
network_info, image_meta, resize_instance)
def live_migration(self, context, instance_ref, dest,
post_method, recover_method, block_migration=False,
migrate_data=None):
"""Live migration of an instance to another host."""
self._vmops.live_migration(context, instance_ref, dest,
post_method, recover_method,
block_migration)
class VMwareAPISession(object):
"""
Sets up a session with the ESX host and handles all
the calls made to the host.
"""
def __init__(self, host_ip, host_username, host_password,
api_retry_count, scheme="https"):
self._host_ip = host_ip
self._host_username = host_username
self._host_password = host_password
self.api_retry_count = api_retry_count
self._scheme = scheme
self._session_id = None
self.vim = None
self._create_session()
def _get_vim_object(self):
"""Create the VIM Object instance."""
return vim.Vim(protocol=self._scheme, host=self._host_ip)
def _create_session(self):
"""Creates a session with the ESX host."""
while True:
try:
# Login and setup the session with the ESX host for making
# API calls
self.vim = self._get_vim_object()
session = self.vim.Login(
self.vim.get_service_content().sessionManager,
userName=self._host_username,
password=self._host_password)
# Terminate the earlier session, if possible ( For the sake of
# preserving sessions as there is a limit to the number of
# sessions we can have )
if self._session_id:
try:
self.vim.TerminateSession(
self.vim.get_service_content().sessionManager,
sessionId=[self._session_id])
except Exception, excep:
# This exception is something we can live with. It is
# just an extra caution on our side. The session may
# have been cleared. We could have made a call to
# SessionIsActive, but that is an overhead because we
# anyway would have to call TerminateSession.
LOG.debug(excep)
self._session_id = session.key
return
except Exception, excep:
LOG.critical(_("In vmwareapi:_create_session, "
"got this exception: %s") % excep)
raise exception.NovaException(excep)
def __del__(self):
"""Logs-out the session."""
# Logout to avoid un-necessary increase in session count at the
# ESX host
try:
self.vim.Logout(self.vim.get_service_content().sessionManager)
except Exception, excep:
# It is just cautionary on our part to do a logout in del just
# to ensure that the session is not left active.
LOG.debug(excep)
def _is_vim_object(self, module):
"""Check if the module is a VIM Object instance."""
return isinstance(module, vim.Vim)
def _call_method(self, module, method, *args, **kwargs):
"""
Calls a method within the module specified with
args provided.
"""
args = list(args)
retry_count = 0
exc = None
last_fault_list = []
while True:
try:
if not self._is_vim_object(module):
# If it is not the first try, then get the latest
# vim object
if retry_count > 0:
args = args[1:]
args = [self.vim] + args
retry_count += 1
temp_module = module
for method_elem in method.split("."):
temp_module = getattr(temp_module, method_elem)
return temp_module(*args, **kwargs)
except error_util.VimFaultException, excep:
# If it is a Session Fault Exception, it may point
# to a session gone bad. So we try re-creating a session
# and then proceeding ahead with the call.
exc = excep
if error_util.FAULT_NOT_AUTHENTICATED in excep.fault_list:
# Because of the idle session returning an empty
# RetrievePropertiesResponse and also the same is returned
# when there is say empty answer to the query for
# VMs on the host ( as in no VMs on the host), we have no
# way to differentiate.
# So if the previous response was also am empty response
# and after creating a new session, we get the same empty
# response, then we are sure of the response being supposed
# to be empty.
if error_util.FAULT_NOT_AUTHENTICATED in last_fault_list:
return []
last_fault_list = excep.fault_list
self._create_session()
else:
# No re-trying for errors for API call has gone through
# and is the caller's fault. Caller should handle these
# errors. e.g, InvalidArgument fault.
break
except error_util.SessionOverLoadException, excep:
# For exceptions which may come because of session overload,
# we retry
exc = excep
except Exception, excep:
# If it is a proper exception, say not having furnished
# proper data in the SOAP call or the retry limit having
# exceeded, we raise the exception
exc = excep
break
# If retry count has been reached then break and
# raise the exception
if retry_count > self.api_retry_count:
break
time.sleep(TIME_BETWEEN_API_CALL_RETRIES)
LOG.critical(_("In vmwareapi:_call_method, "
"got this exception: %s") % exc)
raise
def _get_vim(self):
"""Gets the VIM object reference."""
if self.vim is None:
self._create_session()
return self.vim
def _wait_for_task(self, instance_uuid, task_ref):
"""
Return a Deferred that will give the result of the given task.
The task is polled until it completes.
"""
done = event.Event()
loop = utils.FixedIntervalLoopingCall(self._poll_task, instance_uuid,
task_ref, done)
loop.start(CONF.vmwareapi_task_poll_interval)
ret_val = done.wait()
loop.stop()
return ret_val
def _poll_task(self, instance_uuid, task_ref, done):
"""
Poll the given task, and fires the given Deferred if we
get a result.
"""
try:
task_info = self._call_method(vim_util, "get_dynamic_property",
task_ref, "Task", "info")
task_name = task_info.name
if task_info.state in ['queued', 'running']:
return
elif task_info.state == 'success':
LOG.debug(_("Task [%(task_name)s] %(task_ref)s "
"status: success") % locals())
done.send("success")
else:
error_info = str(task_info.error.localizedMessage)
LOG.warn(_("Task [%(task_name)s] %(task_ref)s "
"status: error %(error_info)s") % locals())
done.send_exception(exception.NovaException(error_info))
except Exception, excep:
LOG.warn(_("In vmwareapi:_poll_task, Got this error %s") % excep)
done.send_exception(excep)
|
Triv90/Nova
|
nova/virt/vmwareapi/driver.py
|
Python
|
apache-2.0
| 23,942
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Task 11"""
ESCAPE_STRING = "\\n\'\""
|
MJVarghese/is210-week-03-warmup
|
task_11.py
|
Python
|
mpl-2.0
| 87
|
# -*- coding: utf-8 -*-
#
# diffoscope: in-depth comparison of files, archives, and directories
#
# Copyright © 2018 Chris Lamb <lamby@debian.org>
#
# diffoscope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# diffoscope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with diffoscope. If not, see <https://www.gnu.org/licenses/>.
import os
import glob
import diffoscope
BASE_DIR = os.path.dirname(os.path.abspath(diffoscope.__file__))
def test_dos_mbr():
for x in glob.iglob(os.path.join(BASE_DIR, '**', '*.py'), recursive=True):
with open(x, 'rb') as f:
assert b'DOS/MBR' not in f.read()
|
ReproducibleBuilds/diffoscope
|
tests/test_source.py
|
Python
|
gpl-3.0
| 1,066
|
# This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import platform
from flask import flash, jsonify, redirect
from requests.exceptions import HTTPError, RequestException, Timeout
from werkzeug.urls import url_join
import indico
from indico.core.config import config
from indico.core.db.sqlalchemy.util.queries import get_postgres_version
from indico.modules.admin import RHAdminBase
from indico.modules.cephalopod import cephalopod_settings
from indico.modules.cephalopod.forms import CephalopodForm
from indico.modules.cephalopod.util import register_instance, sync_instance, unregister_instance
from indico.modules.cephalopod.views import WPCephalopod
from indico.modules.core.settings import core_settings
from indico.util.i18n import _
from indico.util.system import get_os
from indico.web.flask.util import url_for
from indico.web.forms.base import FormDefaults
from indico.web.rh import RH
class RHCephalopodBase(RHAdminBase):
pass
class RHCephalopod(RHCephalopodBase):
def _process(self):
form = CephalopodForm(obj=FormDefaults(**cephalopod_settings.get_all()))
if form.validate_on_submit():
return self._process_form(form)
hub_url = url_join(config.COMMUNITY_HUB_URL, 'api/instance/{}'.format(cephalopod_settings.get('uuid')))
cephalopod_settings.set('show_migration_message', False)
return WPCephalopod.render_template('cephalopod.html', 'cephalopod',
affiliation=core_settings.get('site_organization'),
enabled=cephalopod_settings.get('joined'),
form=form,
indico_version=indico.__version__,
instance_url=config.BASE_URL,
language=config.DEFAULT_LOCALE,
operating_system=get_os(),
postgres_version=get_postgres_version(),
python_version=platform.python_version(),
hub_url=hub_url)
def _process_form(self, form):
name = form.contact_name.data
email = form.contact_email.data
enabled = form.joined.data
uuid = cephalopod_settings.get('uuid')
try:
if not enabled:
unregister_instance()
elif enabled and uuid:
sync_instance(name, email)
elif enabled and not uuid:
register_instance(name, email)
except HTTPError as err:
flash(_("Operation failed, the community hub returned: {err.message}").format(err=err), 'error')
except Timeout:
flash(_("The operation timed-out. Please try again in a while."), 'error')
except RequestException as err:
flash(_("Unexpected exception while contacting the Community Hub: {err.message}").format(err=err))
return redirect(url_for('.index'))
class RHCephalopodSync(RHCephalopodBase):
def _process(self):
if not cephalopod_settings.get('joined'):
flash(_("Synchronization is not possible if you don't join the community first."),
'error')
else:
contact_name = cephalopod_settings.get('contact_name')
contact_email = cephalopod_settings.get('contact_email')
try:
sync_instance(contact_name, contact_email)
except HTTPError as err:
flash(_("Synchronization failed, the community hub returned: {err.message}").format(err=err),
'error')
except Timeout:
flash(_("Synchronization timed-out. Please try again in a while."), 'error')
except RequestException as err:
flash(_("Unexpected exception while contacting the Community Hub: {err.message}").format(err=err))
return redirect(url_for('.index'))
class RHSystemInfo(RH):
def _process(self):
stats = {'python_version': platform.python_version(),
'indico_version': indico.__version__,
'operating_system': get_os(),
'postgres_version': get_postgres_version(),
'language': config.DEFAULT_LOCALE,
'debug': config.DEBUG}
return jsonify(stats)
|
eliasdesousa/indico
|
indico/modules/cephalopod/controllers.py
|
Python
|
gpl-3.0
| 5,171
|
import os
import re
from pip.backwardcompat import urlparse
from pip.util import rmtree, display_path, call_subprocess
from pip.log import logger
from pip.vcs import vcs, VersionControl
_svn_xml_url_re = re.compile('url="([^"]+)"')
_svn_rev_re = re.compile('committed-rev="(\d+)"')
_svn_url_re = re.compile(r'URL: (.+)')
_svn_revision_re = re.compile(r'Revision: (.+)')
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
class Subversion(VersionControl):
name = 'svn'
dirname = '.svn'
repo_name = 'checkout'
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
bundle_file = 'svn-checkout.txt'
guide = ('# This was an svn checkout; to make it a checkout again run:\n'
'svn checkout --force -r %(rev)s %(url)s .\n')
def get_info(self, location):
"""Returns (url, revision), where both are strings"""
assert not location.rstrip('/').endswith(self.dirname), \
'Bad directory: %s' % location
output = call_subprocess(
[self.cmd, 'info', location],
show_stdout=False,
extra_environ={'LANG': 'C'},
)
match = _svn_url_re.search(output)
if not match:
logger.warn(
'Cannot determine URL of svn checkout %s' %
display_path(location)
)
return None, None
url = match.group(1).strip()
match = _svn_revision_re.search(output)
if not match:
logger.warn(
'Cannot determine revision of svn checkout %s' %
display_path(location)
)
logger.info('Output that cannot be parsed: \n%s' % output)
return url, None
return url, match.group(1)
def parse_vcs_bundle_file(self, content):
for line in content.splitlines():
if not line.strip() or line.strip().startswith('#'):
continue
match = re.search(r'^-r\s*([^ ])?', line)
if not match:
return None, None
rev = match.group(1)
rest = line[match.end():].strip().split(None, 1)[0]
return rest, rev
return None, None
def export(self, location):
"""Export the svn repository at the url to the destination location"""
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
logger.notify('Exporting svn repository %s to %s' % (url, location))
logger.indent += 2
try:
if os.path.exists(location):
# Subversion doesn't like to check out over an existing
# directory --force fixes this, but was only added in svn 1.5
rmtree(location)
call_subprocess(
[self.cmd, 'export'] + rev_options + [url, location],
filter_stdout=self._filter, show_stdout=False)
finally:
logger.indent -= 2
def switch(self, dest, url, rev_options):
call_subprocess(
[self.cmd, 'switch'] + rev_options + [url, dest])
def update(self, dest, rev_options):
call_subprocess(
[self.cmd, 'update'] + rev_options + [dest])
def obtain(self, dest):
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
if rev:
rev_display = ' (to revision %s)' % rev
else:
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.notify('Checking out %s%s to %s'
% (url, rev_display, display_path(dest)))
call_subprocess(
[self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
def get_revision(self, location):
"""
Return the maximum revision for all files under a given location
"""
# Note: taken from setuptools.command.egg_info
revision = 0
for base, dirs, files in os.walk(location):
if self.dirname not in dirs:
dirs[:] = []
continue # no sense walking uncontrolled subdirs
dirs.remove(self.dirname)
entries_fn = os.path.join(base, self.dirname, 'entries')
if not os.path.exists(entries_fn):
## FIXME: should we warn?
continue
dirurl, localrev = self._get_svn_url_rev(base)
if base == location:
base_url = dirurl + '/' # save the root url
elif not dirurl or not dirurl.startswith(base_url):
dirs[:] = []
continue # not part of the same svn tree, skip it
revision = max(revision, localrev)
return revision
def get_url_rev(self):
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
url, rev = super(Subversion, self).get_url_rev()
if url.startswith('ssh://'):
url = 'svn+' + url
return url, rev
def get_url(self, location):
# In cases where the source is in a subdirectory, not alongside
# setup.py we have to look up in the location until we find a real
# setup.py
orig_location = location
while not os.path.exists(os.path.join(location, 'setup.py')):
last_location = location
location = os.path.dirname(location)
if location == last_location:
# We've traversed up to the root of the filesystem without
# finding setup.py
logger.warn(
"Could not find setup.py for directory %s (tried all "
"parent directories)" %
orig_location
)
return None
return self._get_svn_url_rev(location)[0]
def _get_svn_url_rev(self, location):
from pip.exceptions import InstallationError
f = open(os.path.join(location, self.dirname, 'entries'))
data = f.read()
f.close()
if (data.startswith('8')
or data.startswith('9')
or data.startswith('10')):
data = list(map(str.splitlines, data.split('\n\x0c\n')))
del data[0][0] # get rid of the '8'
url = data[0][3]
revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
elif data.startswith('<?xml'):
match = _svn_xml_url_re.search(data)
if not match:
raise ValueError('Badly formatted data: %r' % data)
url = match.group(1) # get repository URL
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
else:
try:
# subversion >= 1.7
xml = call_subprocess(
[self.cmd, 'info', '--xml', location],
show_stdout=False,
)
url = _svn_info_xml_url_re.search(xml).group(1)
revs = [
int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
]
except InstallationError:
url, revs = None, []
if revs:
rev = max(revs)
else:
rev = 0
return url, rev
def get_tag_revs(self, svn_tag_url):
stdout = call_subprocess(
[self.cmd, 'ls', '-v', svn_tag_url], show_stdout=False)
results = []
for line in stdout.splitlines():
parts = line.split()
rev = int(parts[0])
tag = parts[-1].strip('/')
results.append((tag, rev))
return results
def find_tag_match(self, rev, tag_revs):
best_match_rev = None
best_tag = None
for tag, tag_rev in tag_revs:
if (tag_rev > rev and
(best_match_rev is None or best_match_rev > tag_rev)):
# FIXME: Is best_match > tag_rev really possible?
# or is it a sign something is wacky?
best_match_rev = tag_rev
best_tag = tag
return best_tag
def get_src_requirement(self, dist, location, find_tags=False):
repo = self.get_url(location)
if repo is None:
return None
parts = repo.split('/')
## FIXME: why not project name?
egg_project_name = dist.egg_name().split('-', 1)[0]
rev = self.get_revision(location)
if parts[-2] in ('tags', 'tag'):
# It's a tag, perfect!
full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
elif parts[-2] in ('branches', 'branch'):
# It's a branch :(
full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
elif parts[-1] == 'trunk':
# Trunk :-/
full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
if find_tags:
tag_url = '/'.join(parts[:-1]) + '/tags'
tag_revs = self.get_tag_revs(tag_url)
match = self.find_tag_match(rev, tag_revs)
if match:
logger.notify(
'trunk checkout %s seems to be equivalent to tag %s' %
match
)
repo = '%s/%s' % (tag_url, match)
full_egg_name = '%s-%s' % (egg_project_name, match)
else:
# Don't know what it is
logger.warn(
'svn URL does not fit normal structure (tags/branches/trunk): '
'%s' % repo
)
full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
def get_rev_options(url, rev):
if rev:
rev_options = ['-r', rev]
else:
rev_options = []
r = urlparse.urlsplit(url)
if hasattr(r, 'username'):
# >= Python-2.5
username, password = r.username, r.password
else:
netloc = r[1]
if '@' in netloc:
auth = netloc.split('@')[0]
if ':' in auth:
username, password = auth.split(':', 1)
else:
username, password = auth, None
else:
username, password = None, None
if username:
rev_options += ['--username', username]
if password:
rev_options += ['--password', password]
return rev_options
vcs.register(Subversion)
|
Ivoz/pip
|
pip/vcs/subversion.py
|
Python
|
mit
| 10,491
|
import unittest
import os
import sys
from kalliope.core.Utils.FileManager import FileManager
class TestFileManager(unittest.TestCase):
"""
Class to test FileManager
"""
def setUp(self):
pass
def create_file_manager(self):
file_manager = FileManager()
self.assertIsInstance(FileManager, file_manager)
def test_create_directory(self):
"""
Test to create a new directory.
"""
# set up
cache_path = "/tmp/kalliope/tests/testDirectory"
if os.path.exists(cache_path):
os.removedirs(cache_path)
# Test FileManager.create_directory
FileManager.create_directory(cache_path)
self.assertTrue(os.path.exists(cache_path),
"Fail creating a directory to the path ")
# Remove the directory
os.removedirs(cache_path)
def test_write_in_file(self):
"""
Test to write in file.
"""
# set up the context
dir_path = "/tmp/kalliope/tests/"
file_name = "test_FileManager_writeInFile"
file_path = os.path.join(dir_path,file_name)
in_file_text = "[Kalliope] Testing the write_in_file method from Utils.FileManager"
if os.path.exists(file_path):
os.remove(file_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# Test FileManager.write_in_file
FileManager.write_in_file(file_path=file_path, content=in_file_text)
with open(file_path, 'r') as content_file:
content = content_file.read()
self.assertEqual(content, in_file_text,
"Fail writing in the file ")
if sys.version_info[0] > 2:
# Test writing of bytes object for python3
FileManager.write_in_file(file_path=file_path, content=bytes(in_file_text, 'utf-8'))
with open(file_path, 'r') as content_file:
content = content_file.read()
self.assertEqual(content, in_file_text,
"Fail writing in the file ")
# Clean up
if os.path.exists(file_path):
os.remove(file_path)
# run into IOError by trying to write something in root
dir_path = "/root/"
file_name = "test_FileManager_writeInFile"
file_path = os.path.join(dir_path, file_name)
self.assertFalse(FileManager.write_in_file(file_path=file_path, content=in_file_text))
def test_file_is_empty(self):
"""
Test that the file is empty
"""
# set up the context
dir_path = "/tmp/kalliope/tests/"
file_name = "test_FileManager_fileIsEmpty"
file_path = os.path.join(dir_path, file_name)
if os.path.exists(file_path):
os.remove(file_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# Test FileManager.file_is_empty
with open(file_path, "wb") as file_open:
file_open.write(b"")
file_open.close()
self.assertTrue(FileManager.file_is_empty(file_path=file_path),
"Fail matching to verify that file is empty ")
# Clean up
if os.path.exists(file_path):
os.remove(file_path)
def test_remove_file(self):
"""
Test to remove a file
"""
# set up the context
dir_path = "/tmp/kalliope/tests/"
file_name = "test_FileManager_fileRemove"
file_path = os.path.join(dir_path, file_name)
if os.path.exists(file_path):
os.remove(file_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# Test to remove the file
# FileManager.remove_file
with open(file_path, "wb") as file_open:
file_open.write(b"")
file_open.close()
FileManager.remove_file(file_path=file_path)
self.assertFalse(os.path.exists(file_path),
"Fail removing the file")
def test_is_path_creatable(self):
"""
Test if the path is creatable for the user
Does the user has the permission to use this path ?
"""
# set up the context
dir_path = "/tmp/kalliope/tests/"
file_name = "test_FileManager_filePathCreatable"
file_path = os.path.join(dir_path, file_name)
if os.path.exists(file_path):
os.remove(file_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# test not allowed : return False
not_allowed_root_path = "/root/"
not_allowed_path = os.path.join(not_allowed_root_path, file_name)
self.assertFalse(FileManager.is_path_creatable(not_allowed_path),
"Fail to assert not accessing this path ")
# test allowed : return True
self.assertTrue(FileManager.is_path_creatable(file_path))
def test_is_path_exists_or_creatable(self):
"""
Test the _is_path_exists_or_creatable
4 scenarii :
- the file exists and is creatable : return True
- the file does not exist but is creatable : return True
- the file exists but is not allowed : return True --> need a review !
- the file does not exist and is not allowed : return False
"""
# set up the context
dir_path = "/tmp/kalliope/tests/"
file_name = "test_FileManager_fileIsPathExistsOrCreatable"
file_path = os.path.join(dir_path, file_name)
if os.path.exists(file_path):
os.remove(file_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# Test the file exist and creatable : return True
with open(file_path, "wb") as file_open:
file_open.write(b"[Kalliope] Test Running the test_is_path_exists_or_creatable method")
file_open.close()
self.assertTrue(FileManager.is_path_exists_or_creatable(file_path),
"Fail to assert the file exist ")
# test the file not exist but creatable : return True
os.remove(file_path)
self.assertTrue(FileManager.is_path_exists_or_creatable(file_path),
"Fail asserting the file does not exist ")
# test the file exist but not creatable : return True
# file_exist_not_allowed = "/root/.ssh/known_hosts"
# self.assertTrue(FileManager.is_path_creatable(file_exist_not_allowed))
# test the file not exist and not allowed : return False
not_allowed_root_path = "/root/"
not_allowed_path = os.path.join(not_allowed_root_path, file_name)
self.assertFalse(FileManager.is_path_creatable(not_allowed_path),
"Fail to assert not accessing this path ")
if __name__ == '__main__':
unittest.main()
|
kalliope-project/kalliope
|
Tests/test_file_manager.py
|
Python
|
gpl-3.0
| 6,902
|
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import re
from operator import itemgetter
from django import forms
from django.core.exceptions import ValidationError
from django.forms import widgets
from django.utils.safestring import mark_safe
from lib.l10n_utils.dotlang import _
from lib.l10n_utils.fluent import ftl_lazy
from product_details import product_details
from bedrock.mozorg.forms import (FORMATS, EmailInput, PrivacyWidget,
strip_parenthetical)
from bedrock.newsletter import utils
_newsletters_re = re.compile(r'^[\w,-]+$')
LANG_FILES = ['mozorg/newsletters']
def validate_newsletters(newsletters):
if not newsletters:
raise ValidationError('No Newsletter Provided')
newsletters = newsletters.replace(' ', '')
if not _newsletters_re.match(newsletters):
raise ValidationError('Invalid Newsletter')
return newsletters
def get_lang_choices(newsletters=None):
"""
Return a localized list of choices for language.
List looks like: [[lang_code, lang_name], [lang_code, lang_name], ...]
:param newsletters: Either a comma separated string or a list of newsletter ids.
"""
lang_choices = []
languages = utils.get_languages_for_newsletters(newsletters)
for lang in languages:
if lang in product_details.languages:
lang_name = product_details.languages[lang]['native']
else:
try:
locale = [loc for loc in product_details.languages
if loc.startswith(lang)][0]
except IndexError:
continue
lang_name = product_details.languages[locale]['native']
lang_choices.append([lang, strip_parenthetical(lang_name)])
return sorted(lang_choices, key=itemgetter(1))
class SimpleRadioSelect(widgets.RadioSelect):
"""
Render radio buttons as just labels with no <ul> chrome.
"""
template_name = 'newsletter/forms/simple_radio_select.html'
class BooleanTabularRadioSelect(widgets.RadioSelect):
"""
A Select Widget intended to be used with NullBooleanField.
"""
template_name = 'newsletter/forms/tabular_radio_select.html'
wrap_label = False
def __init__(self, attrs=None):
choices = (
('true', _('Yes')),
('false', _('No')),
)
super(BooleanTabularRadioSelect, self).__init__(attrs, choices)
def format_value(self, value):
try:
return {
True: 'true', False: 'false',
'true': 'true', 'false': 'false',
}[value]
except KeyError:
return 'unknown'
def value_from_datadict(self, data, files, name):
value = data.get(name)
return {
True: True,
False: False,
'true': True,
'false': False,
}.get(value)
def get_context(self, name, value, attrs):
context = super(BooleanTabularRadioSelect, self).get_context(
name, value, attrs)
context['wrap_label'] = False
return context
class TableCheckboxInput(widgets.CheckboxInput):
"""Add table cell markup around the rendered checkbox"""
def render(self, *args, **kwargs):
out = super(TableCheckboxInput, self).render(*args, **kwargs)
return mark_safe("<td>" + out + "</td>")
class CountrySelectForm(forms.Form):
"""
Form used on a page dedicated to allowing an existing subscriber to provide
us with their country so that we can include them in mailings relevant to
their area of the world.
"""
country = forms.ChoiceField(choices=[]) # will set choices based on locale
def __init__(self, locale, *args, **kwargs):
regions = product_details.get_regions(locale)
regions = sorted(iter(regions.items()), key=itemgetter(1))
super(CountrySelectForm, self).__init__(*args, **kwargs)
self.fields['country'].choices = regions
class ManageSubscriptionsForm(forms.Form):
"""
Form used on manage subscriptions page for the user's information,
like email address and language preference.
@param locale: locale string, e.g. "en-US". Will be used to set
country and lang defaults if not otherwise provided in initial
or bound data.
@param args: Other standard form args
@param kwargs: Other standard form kwargs
"""
format = forms.ChoiceField(widget=SimpleRadioSelect,
choices=FORMATS,
initial='H')
remove_all = forms.BooleanField(required=False)
country = forms.ChoiceField(choices=[], # will set choices based on locale
required=False)
lang = forms.ChoiceField(choices=[], # will set choices based on newsletter languages
required=False)
def __init__(self, locale, *args, **kwargs):
regions = product_details.get_regions(locale)
regions = sorted(iter(regions.items()), key=itemgetter(1))
lang_choices = get_lang_choices()
languages = [x[0] for x in lang_choices]
lang = country = locale.lower()
if '-' in lang:
lang, country = lang.split('-', 1)
lang = lang if lang in languages else 'en'
self.newsletters = kwargs.pop('newsletters', [])
# Get initial - work with a copy so we're not modifying the
# data that was passed to us
initial = kwargs.get('initial', {}).copy()
if not initial.get('country', None):
initial['country'] = country
if not initial.get('lang', None):
initial['lang'] = lang
else:
lang = initial['lang']
# Sometimes people are in ET with a language that is spelled a
# little differently from our list. E.g. we have 'es' on our
# list, but in ET their language is 'es-ES'. Try to find a match
# for their current lang in our list and use that. If we can't
# find one, then fall back to guessing from their locale,
# ignoring what they had in ET. (This is just for the initial
# value on the form; they can always change to another valid
# language before submitting.)
if lang not in languages:
for valid_lang, _unused in lang_choices:
# if the first two chars match, close enough
if lang.lower()[:2] == valid_lang.lower()[:2]:
lang = valid_lang
break
else:
# No luck - guess from the locale
lang = locale.lower()
if '-' in lang:
lang, _unused = lang.split('-', 1)
initial['lang'] = lang
kwargs['initial'] = initial
super(ManageSubscriptionsForm, self).__init__(*args, **kwargs)
self.fields['country'].choices = regions
self.fields['lang'].choices = lang_choices
self.already_subscribed = initial.get('newsletters', [])
def clean(self):
valid_newsletters = utils.get_newsletters()
for newsletter in self.newsletters:
if newsletter not in valid_newsletters:
msg = _("%s is not a valid newsletter") % newsletter
raise ValidationError(msg)
return super(ManageSubscriptionsForm, self).clean()
class NewsletterForm(forms.Form):
"""
Form to let a user subscribe to or unsubscribe from a newsletter
on the manage existing newsletters page. Used in a FormSet.
"""
title = forms.CharField(required=False)
description = forms.CharField(required=False)
subscribed_radio = forms.BooleanField(
widget=BooleanTabularRadioSelect,
required=False, # they have to answer, but answer can be False
)
subscribed_check = forms.BooleanField(
widget=widgets.CheckboxInput,
required=False, # they have to answer, but answer can be False
)
newsletter = forms.CharField(widget=forms.HiddenInput)
class NewsletterFooterForm(forms.Form):
"""
Form used to subscribe to a single newsletter, typically in the
footer of a page (see newsletters/middleware.py) but sometimes
on a dedicated page.
"""
email = forms.EmailField(widget=EmailInput(attrs={'required': 'required'}))
# first/last_name not yet included in email_newsletter_form helper
# currently used on /contribute/friends/ (custom markup)
first_name = forms.CharField(widget=forms.TextInput, required=False)
last_name = forms.CharField(widget=forms.TextInput, required=False)
fmt = forms.ChoiceField(widget=SimpleRadioSelect,
choices=FORMATS,
initial='H')
privacy = forms.BooleanField(widget=PrivacyWidget)
source_url = forms.CharField(required=False)
newsletters = forms.CharField(widget=forms.HiddenInput,
required=True,
max_length=100)
# has to take a newsletters argument so it can figure
# out which languages to list in the form.
def __init__(self, newsletters, locale, data=None, *args, **kwargs):
regions = product_details.get_regions(locale)
regions = sorted(iter(regions.items()), key=itemgetter(1))
try:
newsletters = validate_newsletters(newsletters)
except ValidationError:
# replace with most common good newsletter
# form validation will work with submitted data
newsletters = 'mozilla-and-you'
lang = locale.lower()
if '-' in lang:
lang, country = lang.split('-', 1)
else:
country = ''
regions.insert(0, ('', ftl_lazy('newsletter-form-select-country-or-region',
fallback='newsletter-form-select-country')))
lang_choices = get_lang_choices(newsletters)
languages = [x[0] for x in lang_choices]
if lang not in languages:
# The lang from their locale is not one that our newsletters
# are translated into. Initialize the language field to no
# choice, to force the user to pick one of the languages that
# we do support.
lang = ''
lang_choices.insert(0, ('', ftl_lazy('newsletter-form-available-languages')))
super(NewsletterFooterForm, self).__init__(data, *args, **kwargs)
required_args = {
'required': 'required',
'aria-required': 'true',
}
country_widget = widgets.Select(attrs=required_args)
self.fields['country'] = forms.ChoiceField(widget=country_widget,
choices=regions,
initial=country,
required=False)
lang_widget = widgets.Select(attrs=required_args)
self.fields['lang'] = forms.TypedChoiceField(widget=lang_widget,
choices=lang_choices,
initial=lang,
required=False)
self.fields['newsletters'].initial = newsletters
def clean_newsletters(self):
return validate_newsletters(self.cleaned_data['newsletters'])
def clean_source_url(self):
su = self.cleaned_data['source_url'].strip()
if su:
# limit to 255 characters by truncation
return su[:255]
return su
class EmailForm(forms.Form):
"""
Form to enter email, e.g. to be sent a recovery message
"""
email = forms.EmailField(widget=EmailInput(attrs={'required': 'required'}))
|
ericawright/bedrock
|
bedrock/newsletter/forms.py
|
Python
|
mpl-2.0
| 11,976
|
from datetime import datetime
from time import sleep
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from dimagi.utils.parsing import string_to_datetime, json_format_datetime
from dimagi.utils.couch.cache import cache_core
from dimagi.utils.logging import notify_exception
from redis_cache.cache import RedisCache
class RedisClientError(Exception):
pass
class GenericEnqueuingOperation(BaseCommand):
"""
Implements a generic enqueuing operation.
"""
args = ""
help = ""
def handle(self, *args, **options):
if self.use_queue():
self.validate_args(**options)
self.keep_fetching_items()
else:
# If we return right away, supervisor will keep trying to restart
# the service. So just loop and do nothing.
while True:
sleep(60)
def keep_fetching_items(self):
while True:
try:
self.populate_queue()
except RedisClientError:
notify_exception(None,
message="Could not get redis connection. Is redis up?")
except:
notify_exception(None,
message="Could not populate %s." % self.get_queue_name())
sleep(15)
def populate_queue(self):
client = self.get_redis_client()
utcnow = datetime.utcnow()
entries = self.get_items_to_be_processed(utcnow)
for entry in entries:
item_id = entry["id"]
process_datetime_str = entry["key"]
self.enqueue(item_id, process_datetime_str, redis_client=client)
def enqueue(self, item_id, process_datetime_str, redis_client=None):
client = redis_client or self.get_redis_client()
queue_name = self.get_queue_name()
enqueuing_lock = self.get_enqueuing_lock(client,
"%s-enqueuing-%s-%s" % (queue_name, item_id, process_datetime_str))
if enqueuing_lock.acquire(blocking=False):
try:
self.enqueue_item(item_id)
except:
# We couldn't enqueue, so release the lock
enqueuing_lock.release()
def get_redis_client(self):
rcache = cache_core.get_redis_default_cache()
if not isinstance(rcache, RedisCache):
raise RedisClientError("Could not get redis connection.")
try:
client = rcache.raw_client
except:
raise RedisClientError("Could not get redis connection.")
return client
def get_enqueuing_lock(self, client, key):
lock_timeout = self.get_enqueuing_timeout() * 60
return client.lock(key, timeout=lock_timeout)
def get_queue_name(self):
"""Should return the name of this queue. Used for acquiring the
enqueuing lock to prevent enqueuing the same item twice"""
raise NotImplementedError("This method must be implemented.")
def get_enqueuing_timeout(self):
"""Should return the timeout, in minutes, to use with the
enqueuing lock. This is essentially the number of minutes to
wait before enqueuing an unprocessed item again."""
raise NotImplementedError("This method must be implemented.")
def get_items_to_be_processed(self, utcnow):
"""Should return the couch query result containing the items to be
enqueued. The result should just have the id of the item to be
processed and the key from the couch view for each item. The couch
view should emit a single value, which should be the timestamp that
the item should be processed. Since this just returns ids and keys,
no limiting is necessary.
utcnow - The current timestamp, in utc, at the time of the method's
call. Retrieve all items to be processed before this timestamp."""
raise NotImplementedError("This method must be implemented.")
def enqueue_item(self, _id):
"""This method should enqueue the item.
_id - The couch document _id of the item that is being referenced."""
raise NotImplementedError("This method must be implemented.")
def use_queue(self):
"""If this is False, the handle() method will do nothing and return."""
return True
def validate_args(self, **options):
"""Validate the options passed at the command line."""
pass
|
gmimano/commcaretest
|
hqscripts/generic_queue.py
|
Python
|
bsd-3-clause
| 4,471
|
import tempfile
import os.path
from pip.util import call_subprocess
from pip.util import display_path, rmtree
from pip.vcs import vcs, VersionControl
from pip.log import logger
from pip.compat import url2pathname, urlparse
urlsplit = urlparse.urlsplit
urlunsplit = urlparse.urlunsplit
class Git(VersionControl):
name = 'git'
dirname = '.git'
repo_name = 'clone'
schemes = (
'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
)
def __init__(self, url=None, *args, **kwargs):
# Works around an apparent Git bug
# (see http://article.gmane.org/gmane.comp.version-control.git/146500)
if url:
scheme, netloc, path, query, fragment = urlsplit(url)
if scheme.endswith('file'):
initial_slashes = path[:-len(path.lstrip('/'))]
newpath = (
initial_slashes +
url2pathname(path).replace('\\', '/').lstrip('/')
)
url = urlunsplit((scheme, netloc, newpath, query, fragment))
after_plus = scheme.find('+') + 1
url = scheme[:after_plus] + urlunsplit(
(scheme[after_plus:], netloc, newpath, query, fragment),
)
super(Git, self).__init__(url, *args, **kwargs)
def export(self, location):
"""Export the Git repository at the url to the destination location"""
temp_dir = tempfile.mkdtemp('-export', 'pip-')
self.unpack(temp_dir)
try:
if not location.endswith('/'):
location = location + '/'
call_subprocess(
[self.cmd, 'checkout-index', '-a', '-f', '--prefix', location],
filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
finally:
rmtree(temp_dir)
def check_rev_options(self, rev, dest, rev_options):
"""Check the revision options before checkout to compensate that tags
and branches may need origin/ as a prefix.
Returns the SHA1 of the branch or tag if found.
"""
revisions = self.get_refs(dest)
origin_rev = 'origin/%s' % rev
if origin_rev in revisions:
# remote branch
return [revisions[origin_rev]]
elif rev in revisions:
# a local tag or branch name
return [revisions[rev]]
else:
logger.warn(
"Could not find a tag or branch '%s', assuming commit." % rev,
)
return rev_options
def switch(self, dest, url, rev_options):
call_subprocess(
[self.cmd, 'config', 'remote.origin.url', url], cwd=dest)
call_subprocess(
[self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
self.update_submodules(dest)
def update(self, dest, rev_options):
# First fetch changes from the default remote
call_subprocess([self.cmd, 'fetch', '-q'], cwd=dest)
# Then reset to wanted revision (maby even origin/master)
if rev_options:
rev_options = self.check_rev_options(
rev_options[0], dest, rev_options,
)
call_subprocess(
[self.cmd, 'reset', '--hard', '-q'] + rev_options,
cwd=dest,
)
#: update submodules
self.update_submodules(dest)
def obtain(self, dest):
url, rev = self.get_url_rev()
if rev:
rev_options = [rev]
rev_display = ' (to %s)' % rev
else:
rev_options = ['origin/master']
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.notify(
'Cloning %s%s to %s' % (url, rev_display, display_path(dest)),
)
call_subprocess([self.cmd, 'clone', '-q', url, dest])
#: repo may contain submodules
self.update_submodules(dest)
if rev:
rev_options = self.check_rev_options(rev, dest, rev_options)
# Only do a checkout if rev_options differs from HEAD
if not self.get_revision(dest).startswith(rev_options[0]):
call_subprocess(
[self.cmd, 'checkout', '-q'] + rev_options,
cwd=dest,
)
def get_url(self, location):
url = call_subprocess(
[self.cmd, 'config', 'remote.origin.url'],
show_stdout=False, cwd=location)
return url.strip()
def get_revision(self, location):
current_rev = call_subprocess(
[self.cmd, 'rev-parse', 'HEAD'], show_stdout=False, cwd=location)
return current_rev.strip()
def get_refs(self, location):
"""Return map of named refs (branches or tags) to commit hashes."""
output = call_subprocess([self.cmd, 'show-ref'],
show_stdout=False, cwd=location)
rv = {}
for line in output.strip().splitlines():
commit, ref = line.split(' ', 1)
ref = ref.strip()
ref_name = None
if ref.startswith('refs/remotes/'):
ref_name = ref[len('refs/remotes/'):]
elif ref.startswith('refs/heads/'):
ref_name = ref[len('refs/heads/'):]
elif ref.startswith('refs/tags/'):
ref_name = ref[len('refs/tags/'):]
if ref_name is not None:
rv[ref_name] = commit.strip()
return rv
def get_src_requirement(self, dist, location, find_tags):
repo = self.get_url(location)
if not repo.lower().startswith('git:'):
repo = 'git+' + repo
egg_project_name = dist.egg_name().split('-', 1)[0]
if not repo:
return None
current_rev = self.get_revision(location)
refs = self.get_refs(location)
# refs maps names to commit hashes; we need the inverse
# if multiple names map to a single commit, this arbitrarily picks one
names_by_commit = dict((commit, ref) for ref, commit in refs.items())
if current_rev in names_by_commit:
# It's a tag
full_egg_name = (
'%s-%s' % (egg_project_name, names_by_commit[current_rev])
)
else:
full_egg_name = '%s-dev' % egg_project_name
return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
def get_url_rev(self):
"""
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
That's required because although they use SSH they sometimes doesn't
work with a ssh:// scheme (e.g. Github). But we need a scheme for
parsing. Hence we remove it again afterwards and return it as a stub.
"""
if '://' not in self.url:
assert 'file:' not in self.url
self.url = self.url.replace('git+', 'git+ssh://')
url, rev = super(Git, self).get_url_rev()
url = url.replace('ssh://', '')
else:
url, rev = super(Git, self).get_url_rev()
return url, rev
def update_submodules(self, location):
if not os.path.exists(os.path.join(location, '.gitmodules')):
return
call_subprocess(
[self.cmd, 'submodule', 'update', '--init', '--recursive', '-q'],
cwd=location,
)
vcs.register(Git)
|
1stvamp/pip
|
pip/vcs/git.py
|
Python
|
mit
| 7,461
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from collections import defaultdict, OrderedDict
from datetime import datetime, date
from sqlalchemy import Date, Time
from sqlalchemy.event import listens_for
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import joinedload
from sqlalchemy.sql import cast
from werkzeug.datastructures import OrderedMultiDict
from indico.core.db import db
from indico.core.db.sqlalchemy.custom import static_array, PyIntEnum
from indico.core.db.sqlalchemy.custom.utcdatetime import UTCDateTime
from indico.core.db.sqlalchemy.util.queries import limit_groups
from indico.core.errors import NoReportError
from indico.modules.rb.models.reservation_edit_logs import ReservationEditLog
from indico.modules.rb.models.reservation_occurrences import ReservationOccurrence
from indico.modules.rb.models.room_nonbookable_periods import NonBookablePeriod
from indico.modules.rb.models.equipment import (ReservationEquipmentAssociation, EquipmentType,
RoomEquipmentAssociation)
from indico.modules.rb.models.util import unimplemented
from indico.modules.rb.notifications.reservations import (notify_confirmation, notify_cancellation,
notify_creation, notify_modification,
notify_rejection)
from indico.modules.rb.util import rb_is_admin
from indico.util.date_time import now_utc, format_date, format_time, get_month_end, round_up_month
from indico.util.i18n import _, N_
from indico.util.serializer import Serializer
from indico.util.string import return_ascii
from indico.util.struct.enum import IndicoEnum
from indico.util.user import unify_user_args
from indico.web.flask.util import url_for
from MaKaC.common.Locators import Locator
class ConflictingOccurrences(Exception):
pass
class RepeatFrequency(int, IndicoEnum):
NEVER = 0
DAY = 1
WEEK = 2
MONTH = 3
class RepeatMapping(object):
mapping = {
(RepeatFrequency.NEVER, 0): (N_('Single reservation'), None, 'none'),
(RepeatFrequency.DAY, 1): (N_('Repeat daily'), 0, 'daily'),
(RepeatFrequency.WEEK, 1): (N_('Repeat once a week'), 1, 'weekly'),
(RepeatFrequency.WEEK, 2): (N_('Repeat once every two weeks'), 2, 'everyTwoWeeks'),
(RepeatFrequency.WEEK, 3): (N_('Repeat once every three weeks'), 3, 'everyThreeWeeks'),
(RepeatFrequency.MONTH, 1): (N_('Repeat every month'), 4, 'monthly')
}
@classmethod
@unimplemented(exceptions=(KeyError,), message=_('Unimplemented repetition pair'))
def get_message(cls, repeat_frequency, repeat_interval):
return cls.mapping[(repeat_frequency, repeat_interval)][0]
@classmethod
@unimplemented(exceptions=(KeyError,), message=_('Unimplemented repetition pair'))
def get_short_name(cls, repeat_frequency, repeat_interval):
# for the API
return cls.mapping[(repeat_frequency, repeat_interval)][2]
@classmethod
@unimplemented(exceptions=(KeyError,), message=_('Unknown old repeatability'))
def convert_legacy_repeatability(cls, repeat):
if repeat is None or repeat < 5:
for k, (_, v, _) in cls.mapping.iteritems():
if v == repeat:
return k
else:
raise KeyError('Undefined old repeat: {}'.format(repeat))
class Reservation(Serializer, db.Model):
__tablename__ = 'reservations'
__public__ = []
__calendar_public__ = [
'id', ('booked_for_name', 'bookedForName'), ('booking_reason', 'reason'), ('details_url', 'bookingUrl')
]
__api_public__ = [
'id', ('start_dt', 'startDT'), ('end_dt', 'endDT'), 'repeat_frequency', 'repeat_interval',
('booked_for_name', 'bookedForName'), ('details_url', 'bookingUrl'), ('booking_reason', 'reason'),
('uses_vc', 'usesAVC'), ('needs_vc_assistance', 'needsAVCSupport'),
'needs_assistance', ('is_accepted', 'isConfirmed'), ('is_valid', 'isValid'), 'is_cancelled',
'is_rejected', ('location_name', 'location'), 'booked_for_user_email'
]
@declared_attr
def __table_args__(cls):
return (db.Index('ix_reservations_start_dt_date', cast(cls.start_dt, Date)),
db.Index('ix_reservations_end_dt_date', cast(cls.end_dt, Date)),
db.Index('ix_reservations_start_dt_time', cast(cls.start_dt, Time)),
db.Index('ix_reservations_end_dt_time', cast(cls.end_dt, Time)),
{'schema': 'roombooking'})
id = db.Column(
db.Integer,
primary_key=True
)
created_dt = db.Column(
UTCDateTime,
nullable=False,
default=now_utc
)
start_dt = db.Column(
db.DateTime,
nullable=False,
index=True
)
end_dt = db.Column(
db.DateTime,
nullable=False,
index=True
)
repeat_frequency = db.Column(
PyIntEnum(RepeatFrequency),
nullable=False,
default=RepeatFrequency.NEVER
) # week, month, year, etc.
repeat_interval = db.Column(
db.SmallInteger,
nullable=False,
default=0
) # 1, 2, 3, etc.
booked_for_id = db.Column(
db.Integer,
db.ForeignKey('users.users.id'),
index=True,
nullable=True,
# Must be nullable for legacy data :(
)
booked_for_name = db.Column(
db.String,
nullable=False
)
created_by_id = db.Column(
db.Integer,
db.ForeignKey('users.users.id'),
index=True,
nullable=True,
# Must be nullable for legacy data :(
)
room_id = db.Column(
db.Integer,
db.ForeignKey('roombooking.rooms.id'),
nullable=False,
index=True
)
contact_email = db.Column(
db.String,
nullable=False,
default=''
)
contact_phone = db.Column(
db.String,
nullable=False,
default=''
)
is_accepted = db.Column(
db.Boolean,
nullable=False
)
is_cancelled = db.Column(
db.Boolean,
nullable=False,
default=False
)
is_rejected = db.Column(
db.Boolean,
nullable=False,
default=False
)
booking_reason = db.Column(
db.Text,
nullable=False
)
rejection_reason = db.Column(
db.String
)
uses_vc = db.Column(
db.Boolean,
nullable=False,
default=False
)
needs_vc_assistance = db.Column(
db.Boolean,
nullable=False,
default=False
)
needs_assistance = db.Column(
db.Boolean,
nullable=False,
default=False
)
event_id = db.Column(
db.Integer,
db.ForeignKey('events.events.id'),
nullable=True,
index=True
)
edit_logs = db.relationship(
'ReservationEditLog',
backref='reservation',
cascade='all, delete-orphan',
lazy='dynamic'
)
occurrences = db.relationship(
'ReservationOccurrence',
backref='reservation',
cascade='all, delete-orphan',
lazy='dynamic'
)
used_equipment = db.relationship(
'EquipmentType',
secondary=ReservationEquipmentAssociation,
backref='reservations',
lazy='dynamic'
)
#: The user this booking was made for.
#: Assigning a user here also updates `booked_for_name`.
booked_for_user = db.relationship(
'User',
lazy=False,
foreign_keys=[booked_for_id],
backref=db.backref(
'reservations_booked_for',
lazy='dynamic'
)
)
#: The user who created this booking.
created_by_user = db.relationship(
'User',
lazy=False,
foreign_keys=[created_by_id],
backref=db.backref(
'reservations',
lazy='dynamic'
)
)
#: The Event this reservation was made for
event_new = db.relationship(
'Event',
lazy=True,
backref=db.backref(
'reservations',
lazy='dynamic'
)
)
# relationship backrefs:
# - room (Room.reservations)
@hybrid_property
def is_archived(self):
return self.end_dt < datetime.now()
@hybrid_property
def is_pending(self):
return not (self.is_accepted or self.is_rejected or self.is_cancelled)
@is_pending.expression
def is_pending(self):
return ~(Reservation.is_accepted | Reservation.is_rejected | Reservation.is_cancelled)
@hybrid_property
def is_repeating(self):
return self.repeat_frequency != RepeatFrequency.NEVER
@hybrid_property
def is_valid(self):
return self.is_accepted and not (self.is_rejected or self.is_cancelled)
@is_valid.expression
def is_valid(self):
return self.is_accepted & ~(self.is_rejected | self.is_cancelled)
@property
def booked_for_user_email(self):
return self.booked_for_user.email if self.booked_for_user else None
@property
def contact_emails(self):
return set(filter(None, map(unicode.strip, self.contact_email.split(u','))))
@property
def details_url(self):
return url_for('rooms.roomBooking-bookingDetails', self, _external=True)
@property
def location_name(self):
return self.room.location_name
@property
def repetition(self):
return self.repeat_frequency, self.repeat_interval
@property
def status_string(self):
parts = []
if self.is_valid:
parts.append(_(u"Valid"))
else:
if self.is_cancelled:
parts.append(_(u"Cancelled"))
if self.is_rejected:
parts.append(_(u"Rejected"))
if not self.is_accepted:
parts.append(_(u"Not confirmed"))
if self.is_archived:
parts.append(_(u"Archived"))
else:
parts.append(_(u"Live"))
return u', '.join(map(unicode, parts))
@return_ascii
def __repr__(self):
return u'<Reservation({0}, {1}, {2}, {3}, {4})>'.format(
self.id,
self.room_id,
self.booked_for_name,
self.start_dt,
self.end_dt
)
@classmethod
def create_from_data(cls, room, data, user, prebook=None):
"""Creates a new reservation.
:param room: The Room that's being booked.
:param data: A dict containing the booking data, usually from a :class:`NewBookingConfirmForm` instance
:param user: The :class:`.User` who creates the booking.
:param prebook: Instead of determining the booking type from the user's
permissions, always use the given mode.
"""
populate_fields = ('start_dt', 'end_dt', 'repeat_frequency', 'repeat_interval', 'room_id', 'booked_for_user',
'contact_email', 'contact_phone', 'booking_reason', 'used_equipment',
'needs_assistance', 'uses_vc', 'needs_vc_assistance')
if data['repeat_frequency'] == RepeatFrequency.NEVER and data['start_dt'].date() != data['end_dt'].date():
raise ValueError('end_dt != start_dt for non-repeating booking')
if prebook is None:
prebook = not room.can_be_booked(user)
if prebook and not room.can_be_prebooked(user):
raise NoReportError('You cannot book this room')
room.check_advance_days(data['end_dt'].date(), user)
room.check_bookable_hours(data['start_dt'].time(), data['end_dt'].time(), user)
reservation = cls()
for field in populate_fields:
if field in data:
setattr(reservation, field, data[field])
reservation.room = room
reservation.booked_for_name = reservation.booked_for_user.full_name
reservation.is_accepted = not prebook
reservation.created_by_user = user
reservation.create_occurrences(True)
if not any(occ.is_valid for occ in reservation.occurrences):
raise NoReportError(_('Reservation has no valid occurrences'))
notify_creation(reservation)
return reservation
@staticmethod
def get_with_data(*args, **kwargs):
filters = kwargs.pop('filters', None)
limit = kwargs.pop('limit', None)
offset = kwargs.pop('offset', 0)
order = kwargs.pop('order', Reservation.start_dt)
limit_per_room = kwargs.pop('limit_per_room', False)
occurs_on = kwargs.pop('occurs_on')
if kwargs:
raise ValueError('Unexpected kwargs: {}'.format(kwargs))
query = Reservation.query.options(joinedload(Reservation.room))
if filters:
query = query.filter(*filters)
if occurs_on:
query = query.filter(
Reservation.id.in_(db.session.query(ReservationOccurrence.reservation_id)
.filter(ReservationOccurrence.date.in_(occurs_on),
ReservationOccurrence.is_valid))
)
if limit_per_room and (limit or offset):
query = limit_groups(query, Reservation, Reservation.room_id, order, limit, offset)
query = query.order_by(order, Reservation.created_dt)
if not limit_per_room:
if limit:
query = query.limit(limit)
if offset:
query = query.offset(offset)
result = OrderedDict((r.id, {'reservation': r}) for r in query)
if 'vc_equipment' in args:
vc_id_subquery = db.session.query(EquipmentType.id) \
.correlate(Reservation) \
.filter_by(name='Video conference') \
.join(RoomEquipmentAssociation) \
.filter(RoomEquipmentAssociation.c.room_id == Reservation.room_id) \
.as_scalar()
# noinspection PyTypeChecker
vc_equipment_data = dict(db.session.query(Reservation.id, static_array.array_agg(EquipmentType.name))
.join(ReservationEquipmentAssociation, EquipmentType)
.filter(Reservation.id.in_(result.iterkeys()))
.filter(EquipmentType.parent_id == vc_id_subquery)
.group_by(Reservation.id))
for id_, data in result.iteritems():
data['vc_equipment'] = vc_equipment_data.get(id_, ())
if 'occurrences' in args:
occurrence_data = OrderedMultiDict(db.session.query(ReservationOccurrence.reservation_id,
ReservationOccurrence)
.filter(ReservationOccurrence.reservation_id.in_(result.iterkeys()))
.order_by(ReservationOccurrence.start_dt))
for id_, data in result.iteritems():
data['occurrences'] = occurrence_data.getlist(id_)
return result.values()
@staticmethod
def find_overlapping_with(room, occurrences, skip_reservation_id=None):
return Reservation.find(Reservation.room == room,
Reservation.id != skip_reservation_id,
ReservationOccurrence.is_valid,
ReservationOccurrence.filter_overlap(occurrences),
_join=ReservationOccurrence)
@unify_user_args
def accept(self, user):
self.is_accepted = True
self.add_edit_log(ReservationEditLog(user_name=user.full_name, info=['Reservation accepted']))
notify_confirmation(self)
valid_occurrences = self.occurrences.filter(ReservationOccurrence.is_valid).all()
pre_occurrences = ReservationOccurrence.find_overlapping_with(self.room, valid_occurrences, self.id).all()
for occurrence in pre_occurrences:
if not occurrence.is_valid:
continue
occurrence.reject(user, u'Rejected due to collision with a confirmed reservation')
@unify_user_args
def cancel(self, user, reason=None, silent=False):
self.is_cancelled = True
self.rejection_reason = reason
self.occurrences.filter_by(is_valid=True).update({'is_cancelled': True, 'rejection_reason': reason},
synchronize_session='fetch')
if not silent:
notify_cancellation(self)
log_msg = u'Reservation cancelled: {}'.format(reason) if reason else 'Reservation cancelled'
self.add_edit_log(ReservationEditLog(user_name=user.full_name, info=[log_msg]))
@unify_user_args
def reject(self, user, reason, silent=False):
self.is_rejected = True
self.rejection_reason = reason
self.occurrences.filter_by(is_valid=True).update({'is_rejected': True, 'rejection_reason': reason},
synchronize_session='fetch')
if not silent:
notify_rejection(self)
log_msg = u'Reservation rejected: {}'.format(reason)
self.add_edit_log(ReservationEditLog(user_name=user.full_name, info=[log_msg]))
def add_edit_log(self, edit_log):
self.edit_logs.append(edit_log)
db.session.flush()
@unify_user_args
def can_be_accepted(self, user):
if user is None:
return False
return rb_is_admin(user) or self.room.is_owned_by(user)
@unify_user_args
def can_be_cancelled(self, user):
if user is None:
return False
return self.is_owned_by(user) or rb_is_admin(user) or self.is_booked_for(user)
@unify_user_args
def can_be_deleted(self, user):
if user is None:
return False
return rb_is_admin(user)
@unify_user_args
def can_be_modified(self, user):
if user is None:
return False
if self.is_rejected or self.is_cancelled:
return False
if rb_is_admin(user):
return True
return self.created_by_user == user or self.is_booked_for(user) or self.room.is_owned_by(user)
@unify_user_args
def can_be_rejected(self, user):
if user is None:
return False
return rb_is_admin(user) or self.room.is_owned_by(user)
def create_occurrences(self, skip_conflicts, user=None):
ReservationOccurrence.create_series_for_reservation(self)
db.session.flush()
if user is None:
user = self.created_by_user
# Check for conflicts with nonbookable periods
if not rb_is_admin(user) and not self.room.is_owned_by(user):
nonbookable_periods = self.room.nonbookable_periods.filter(NonBookablePeriod.end_dt > self.start_dt)
for occurrence in self.occurrences:
if not occurrence.is_valid:
continue
for nbd in nonbookable_periods:
if nbd.overlaps(occurrence.start_dt, occurrence.end_dt):
if not skip_conflicts:
raise ConflictingOccurrences()
occurrence.cancel(user, u'Skipped due to nonbookable date', silent=True, propagate=False)
break
# Check for conflicts with blockings
blocked_rooms = self.room.get_blocked_rooms(*(occurrence.start_dt for occurrence in self.occurrences))
for br in blocked_rooms:
blocking = br.blocking
if blocking.can_be_overridden(user, self.room):
continue
for occurrence in self.occurrences:
if occurrence.is_valid and blocking.is_active_at(occurrence.start_dt.date()):
# Cancel OUR occurrence
msg = u'Skipped due to collision with a blocking ({})'
occurrence.cancel(user, msg.format(blocking.reason), silent=True, propagate=False)
# Check for conflicts with other occurrences
conflicting_occurrences = self.get_conflicting_occurrences()
for occurrence, conflicts in conflicting_occurrences.iteritems():
if not occurrence.is_valid:
continue
if conflicts['confirmed']:
if not skip_conflicts:
raise ConflictingOccurrences()
# Cancel OUR occurrence
msg = u'Skipped due to collision with {} reservation(s)'
occurrence.cancel(user, msg.format(len(conflicts['confirmed'])), silent=True, propagate=False)
elif conflicts['pending'] and self.is_accepted:
# Reject OTHER occurrences
for conflict in conflicts['pending']:
conflict.reject(user, u'Rejected due to collision with a confirmed reservation')
# Mark occurrences created within the notification window as notified
for occurrence in self.occurrences:
if occurrence.is_valid and occurrence.is_in_notification_window():
occurrence.notification_sent = True
# Mark occurrences created within the digest window as notified
if self.repeat_frequency == RepeatFrequency.WEEK:
if self.room.is_in_digest_window():
digest_start = round_up_month(date.today())
else:
digest_start = date.today()
digest_end = get_month_end(digest_start)
self.occurrences.filter(ReservationOccurrence.start_dt <= digest_end).update({'notification_sent': True})
def find_excluded_days(self):
return self.occurrences.filter(~ReservationOccurrence.is_valid)
def find_overlapping(self):
occurrences = self.occurrences.filter(ReservationOccurrence.is_valid).all()
return Reservation.find_overlapping_with(self.room, occurrences, self.id)
def getLocator(self):
locator = Locator()
locator['roomLocation'] = self.location_name
locator['resvID'] = self.id
return locator
def get_conflicting_occurrences(self):
valid_occurrences = self.occurrences.filter(ReservationOccurrence.is_valid).all()
colliding_occurrences = ReservationOccurrence.find_overlapping_with(self.room, valid_occurrences, self.id).all()
conflicts = defaultdict(lambda: dict(confirmed=[], pending=[]))
for occurrence in valid_occurrences:
for colliding in colliding_occurrences:
if occurrence.overlaps(colliding):
key = 'confirmed' if colliding.reservation.is_accepted else 'pending'
conflicts[occurrence][key].append(colliding)
return conflicts
def get_vc_equipment(self):
vc_equipment = self.room.available_equipment \
.correlate(ReservationOccurrence) \
.with_entities(EquipmentType.id) \
.filter_by(name='Video conference') \
.as_scalar()
return self.used_equipment.filter(EquipmentType.parent_id == vc_equipment)
def is_booked_for(self, user):
if user is None:
return False
return self.booked_for_user == user or bool(self.contact_emails & set(user.all_emails))
@unify_user_args
def is_owned_by(self, user):
return self.created_by_user == user
def modify(self, data, user):
"""Modifies an existing reservation.
:param data: A dict containing the booking data, usually from a :class:`ModifyBookingForm` instance
:param user: The :class:`.User` who modifies the booking.
"""
populate_fields = ('start_dt', 'end_dt', 'repeat_frequency', 'repeat_interval', 'booked_for_user',
'contact_email', 'contact_phone', 'booking_reason', 'used_equipment',
'needs_assistance', 'uses_vc', 'needs_vc_assistance')
# fields affecting occurrences
occurrence_fields = {'start_dt', 'end_dt', 'repeat_frequency', 'repeat_interval'}
# fields where date and time are compared separately
date_time_fields = {'start_dt', 'end_dt'}
# fields for the repetition
repetition_fields = {'repeat_frequency', 'repeat_interval'}
# pretty names for logging
field_names = {
'start_dt/date': "start date",
'end_dt/date': "end date",
'start_dt/time': "start time",
'end_dt/time': "end time",
'repetition': "booking type",
'booked_for_user': "'Booked for' user",
'contact_email': "contact email",
'contact_phone': "contact phone number",
'booking_reason': "booking reason",
'used_equipment': "list of equipment",
'needs_assistance': "option 'General Assistance'",
'uses_vc': "option 'Uses Videoconference'",
'needs_vc_assistance': "option 'Videoconference Setup Assistance'"
}
self.room.check_advance_days(data['end_dt'].date(), user)
self.room.check_bookable_hours(data['start_dt'].time(), data['end_dt'].time(), user)
changes = {}
update_occurrences = False
old_repetition = self.repetition
for field in populate_fields:
if field not in data:
continue
old = getattr(self, field)
new = data[field]
converter = unicode
if field == 'used_equipment':
# Dynamic relationship
old = sorted(old.all())
converter = lambda x: u', '.join(x.name for x in x)
if old != new:
# Booked for user updates the (redundant) name
if field == 'booked_for_user':
old = self.booked_for_name
new = self.booked_for_name = data[field].full_name
# Apply the change
setattr(self, field, data[field])
# If any occurrence-related field changed we need to recreate the occurrences
if field in occurrence_fields:
update_occurrences = True
# Record change for history entry
if field in date_time_fields:
# The date/time fields create separate entries for the date and time parts
if old.date() != new.date():
changes[field + '/date'] = {'old': old.date(), 'new': new.date(), 'converter': format_date}
if old.time() != new.time():
changes[field + '/time'] = {'old': old.time(), 'new': new.time(), 'converter': format_time}
elif field in repetition_fields:
# Repetition needs special handling since it consists of two fields but they are tied together
# We simply update it whenever we encounter such a change; after the last change we end up with
# the correct change data
changes['repetition'] = {'old': old_repetition,
'new': self.repetition,
'converter': lambda x: RepeatMapping.get_message(*x)}
else:
changes[field] = {'old': old, 'new': new, 'converter': converter}
if not changes:
return False
# Create a verbose log entry for the modification
log = [u'Booking modified']
for field, change in changes.iteritems():
field_title = field_names.get(field, field)
converter = change['converter']
old = converter(change['old'])
new = converter(change['new'])
if not old:
log.append(u"The {} was set to '{}'".format(field_title, new))
elif not new:
log.append(u"The {} was cleared".format(field_title))
else:
log.append(u"The {} was changed from '{}' to '{}'".format(field_title, old, new))
self.edit_logs.append(ReservationEditLog(user_name=user.full_name, info=log))
# Recreate all occurrences if necessary
if update_occurrences:
cols = [col.name for col in ReservationOccurrence.__table__.columns
if not col.primary_key and col.name not in {'start_dt', 'end_dt'}]
old_occurrences = {occ.date: occ for occ in self.occurrences}
self.occurrences.delete(synchronize_session='fetch')
self.create_occurrences(True, user)
db.session.flush()
# Restore rejection data etc. for recreated occurrences
for occurrence in self.occurrences:
old_occurrence = old_occurrences.get(occurrence.date)
# Copy data from old occurrence UNLESS the new one is invalid (e.g. because of collisions)
# Otherwise we'd end up with valid occurrences ignoring collisions!
if old_occurrence and occurrence.is_valid:
for col in cols:
setattr(occurrence, col, getattr(old_occurrence, col))
# Don't cause new notifications for the entire booking in case of daily repetition
if self.repeat_frequency == RepeatFrequency.DAY and all(occ.notification_sent
for occ in old_occurrences.itervalues()):
for occurrence in self.occurrences:
occurrence.notification_sent = True
# Sanity check so we don't end up with an "empty" booking
if not any(occ.is_valid for occ in self.occurrences):
raise NoReportError(_('Reservation has no valid occurrences'))
notify_modification(self, changes)
return True
@listens_for(Reservation.booked_for_user, 'set')
def _booked_for_user_set(target, user, *unused):
target.booked_for_name = user.full_name if user else ''
|
belokop/indico_bare
|
indico/modules/rb/models/reservations.py
|
Python
|
gpl-3.0
| 30,995
|
# -*- coding: utf-8 -*-
import gxf
from gxf.formatting import Token, Formattable
@gxf.register()
class Registers(gxf.DataCommand):
'''
Shows registers.
'''
def setup(self, parser):
parser.add_argument("-m", "--mark", action='append', default=[],
help="Highlight some registers.")
parser.add_argument("-M", "--mark-used", action='store_true',
help="Highlight currently used registers.")
def run(self, args):
regs = gxf.Registers()
memory = gxf.Memory()
tomark = args.mark[:]
if args.mark_used:
try:
dis = gxf.disassemble_lines(regs.get('pc')).lines[:1]
except gxf.GdbError:
dis = ()
for line in dis:
for _, t in line.tokens[line.instidx:]:
tomark.append(t)
tomark.extend(regs.impact.get(t, ()))
for reg, val in regs.regs.items():
if reg == "eflags" or (len(reg) == 2 and reg[1] == "s"):
continue
if reg in tomark:
ttype = Token.Name.Builtin
elif reg in ("rdi", "rsi", "rdx", "rcx", "r8", "r9"):
ttype = Token.Text
elif reg in ("rip", "eip", "rbp", "esp", "rsp", "rax", "eax"):
ttype = Token.Generic.Heading
else:
ttype = Token.Comment
print("%s%s" % (
Formattable(((ttype, "%-4s" % reg),
(Token.Comment, ": "))),
memory.refchain(val)))
|
wapiflapi/gxf
|
gxf/extensions/registers.py
|
Python
|
mit
| 1,628
|
import pdb
import os.path
import os
import multiprocessing as mp
import itertools
from sklearn.externals import joblib
from data.psym import event_class_set, non_class, gold_annotation, intermediate_annotation, pos_class, reg_class_set, candidate_entity
#from data.event import Trigger
from utils.path import loadModule
import represent.sparse
import represent.feature
from utils import Conf, my_multiprocess
from represent.convertor import EntityConvertor, TriArgPairConvertor, AddArgConvertor
from intg.base import ExampleGenerator
from utils.multi_label import cleanSelfAssignTri
import utils
conf = utils.get_conf()
class TriggerClassificationMain(object):
def __init__(self, task_param):
'shared obj, used to create dictionary'
self.entityConvertor = EntityConvertor(task_param, '../config/TriCFeature.config', 'TriC/')
self.classDictTri = represent.feature.ClassDict()
self.classDictTri.addKeys(event_class_set)
self.task_param = task_param
def prepareVectors(self, cps, nbp = None):
if nbp == None:
nbp = max(mp.cpu_count()/2, 4)
print 'initial task queue'
task_queue = mp.Queue()
for fid in cps.fileids():
task_queue.put(fid)
for _ in range(nbp):
task_queue.put(None)
self.entityConvertor.initFeatureFromConfig()
exampleDict = mp.Manager().dict()
exampleGenerator = TriggerExample(cps, self.task_param, task_queue, self.entityConvertor)
print 'initialize worker process'
workers = [mp.Process(target=exampleGenerator.prepaireTrainVectors, kwargs={'output':exampleDict})
for i in range(nbp)]
for worker in workers:
worker.start()
for worker in workers:
worker.join()
task_finished = task_queue.empty()
print 'processed %s the documents: %s' % (len(exampleDict), task_finished)
for fid in cps.fileids():
for example in exampleDict[fid]:
self.entityConvertor.addKey2Dictionary(example[0])
self.entityConvertor.compact()
self.entityConvertor.saveDict()
entityIndex = [index for fid in cps.fileids() for _,_,index in exampleDict[fid]]
self.entityConvertor.saveIndex(entityIndex)
entityFeat = [feat for fid in cps.fileids() for feat,_,_ in exampleDict[fid]]
self.entityConvertor.saveVector(entityFeat)
entityCls = [cls for fid in cps.fileids() for _,cls,_ in exampleDict[fid]]
self.entityConvertor.saveClasses(entityCls, self.classDictTri)
return task_finished
def trainClassifier(self):
entityVectors = self.entityConvertor.loadVectorFromConfig()
entityClasses = self.entityConvertor.loadClasses()
'initialize classifier'
entityClassifierParams = self.task_param['classifier']
entityClassifier = loadModule(entityClassifierParams['name'])()
entityClassifier.fit(entityVectors, entityClasses, entityClassifierParams)
entityClassifierOutPath = conf['out_path'] + self.task_param['task name'] + '/classifier/'
if not os.path.exists(entityClassifierOutPath):
os.makedirs(entityClassifierOutPath)
joblib.dump(entityClassifier, entityClassifierOutPath + entityClassifier.__class__.__name__)
def test(self, cps, nbp = None):
outPath = conf['out_path'] + self.task_param['task name'] + '/classifyOutput/' + self.task_param['mode'] + '/'
classifierParams = self.task_param['classifier']
entityClassifierOutPath = conf['out_path'] + self.task_param['task name'] + '/classifier/'
ind = classifierParams['name'].rfind('.')
print 'load classifier', entityClassifierOutPath + classifierParams['name'][ind + 1:]
classifier = joblib.load(entityClassifierOutPath + classifierParams['name'][ind + 1:])
if nbp == None:
nbp = max(mp.cpu_count()/2, 4)
print 'initial task queue'
task_queue = mp.Queue()
for fid in cps.fileids():
task_queue.put(fid)
for _ in range(nbp):
task_queue.put(None)
output = mp.Queue()
writer = mp.Process(target = my_multiprocess.writeResultIntoFile,
kwargs={'outputPath':outPath, 'output':output})
writer.start()
self.entityConvertor.initFeatureFromConfig()
self.entityConvertor.load_dictionary()
exampleGenerator = TriggerExample(cps, self.task_param, task_queue, self.entityConvertor)
workers = [mp.Process(target = exampleGenerator.test, kwargs={'classifier':classifier, 'out_queue':output})
for i in range(nbp)]
for worker in workers:
worker.start()
for worker in workers:
worker.join()
output.put(None)
def postProcess(self, cps, PostProcessing):
'load index and prediction'
outPath = conf['out_path'] + self.task_param['task name'] + '/classifyOutput/' + self.task_param['mode'] + '/'
f = open('%s%s' % (outPath, 'vec_index'))
example_index = [eval(line) for line in f]
f = open('%s%s' % (outPath, 'y_pred'))
y_pred = [eval(line) for line in f]
'postprocessing'
outPath = conf['out_path'] + self.task_param['task name'] + '/annotation/' + self.task_param['mode'] + '/'
conf['baseTokName'] = self.task_param['baseTokName']
PostProcessing(cps, example_index, y_pred, self.classDictTri.inverse, outPath)
class TriggerExample(ExampleGenerator):
'''
task_queue: task queue, a process
entityConvertor: converter to extract feature terms
'''
def __init__(self, cps, task_param, task_queue, entityConvertor):
self.cps = cps
super(TriggerExample, self).__init__(task_param)
conf = eval(open('../config/TriC.config').read())
self.triggerDict = represent.feature.TriggerGazetteer(conf['gazetteerName'], conf['gazetteerPath'])
self.eliminateEntitGenerators = []
self.dominantEntityGenerators = []
self.subordinateEntityGenerators = []
for generatorConf in conf['eliminateGenerators']:
params = eval(generatorConf['params'])
params['generator'] = self
generator = loadModule(generatorConf['name'])(**params)
self.eliminateEntitGenerators.append(generator)
for generatorConf in conf['dominantGenerators']:
params = eval(generatorConf['params'])
params['generator'] = self
generator = loadModule(generatorConf['name'])(**params)
self.dominantEntityGenerators.append(generator)
for generatorConf in conf['subordinateGenerators']:
params = eval(generatorConf['params'])
params['generator'] = self
generator = loadModule(generatorConf['name'])(**params)
self.subordinateEntityGenerators.append(generator)
self.baseTokName = task_param['baseTokName']
self.task_queue = task_queue
self.entityConvertor = entityConvertor
self.classDictTri = represent.feature.ClassDict()
self.classDictTri.addKeys(event_class_set)
'''
output: shared dictionary to save the feature vectors in format {fid: [example1, example2]}
'''
def prepaireTrainVectors(self, output):
while True:
fid = self.task_queue.get()
if fid == None:
break
hyper_doc = self.cps.load(fid)
entityExamples = []
for hyper_sent in hyper_doc.hyper_sents:
hyper_sent.entities[candidate_entity] = {}
if len(hyper_sent.proteins) == 0:
continue
'filter the protein strings'
rawText = hyper_doc.raw_text
dominantEntityLst = []
eliminateEntityLst = []
subordinateEntityLst = []
for generator in self.eliminateEntitGenerators:
entityLst, rawText = generator.extractEntities(hyper_doc, hyper_sent, rawText)
eliminateEntityLst.extend(entityLst)
for generator in self.dominantEntityGenerators:
entityLst, rawText = generator.extractEntities(hyper_doc, hyper_sent, rawText)
dominantEntityLst.extend(entityLst)
for generator in self.subordinateEntityGenerators:
entityLst, rawText = generator.extractEntities(hyper_doc, hyper_sent, rawText)
subordinateEntityLst.extend(entityLst)
candidateEntityLst = list((set(subordinateEntityLst) - set(eliminateEntityLst)).union(set(dominantEntityLst)))
for entity in candidateEntityLst:
entity.setPhase(self.baseTokName)
candidateEntityLst.sort(key=lambda entity:''.join(str(tokenInd) for tokenInd in entity.tokIndexInSentenceList()))
'should use gold annotation as intermediate annotation for training'
spannedTriggersLst = [cleanSelfAssignTri(hyper_sent, entity) for entity in candidateEntityLst]
for entity, spannedTriggers in zip(candidateEntityLst, spannedTriggersLst):
if len(spannedTriggers) == 0:
example, index, cls = entity, entity.tokIndexInDocumentList(), non_class
features = self.entityConvertor.extractFeature(example)
entityExamples.append((features, cls, (fid, index)))
else:
example, index = entity, entity.tokIndexInDocumentList()
features = self.entityConvertor.extractFeature(example)
typ = spannedTriggers[0].typ
entityExamples.append((features, typ, (fid, index)))
output[fid] = entityExamples
def test(self, classifier, out_queue):
while True:
fid = self.task_queue.get()
if fid == None:
break
hyper_doc = self.cps.load(fid)
results = []
for hyper_sent in hyper_doc.hyper_sents:
hyper_sent.entities[candidate_entity] = {}
if len(hyper_sent.proteins) == 0:
continue
'filter the protein strings'
rawText = hyper_doc.raw_text
dominantEntityLst = []
eliminateEntityLst = []
subordinateEntityLst = []
for generator in self.eliminateEntitGenerators:
entityLst, rawText = generator.extractEntities(hyper_doc, hyper_sent, rawText)
eliminateEntityLst.extend(entityLst)
for generator in self.dominantEntityGenerators:
entityLst, rawText = generator.extractEntities(hyper_doc, hyper_sent, rawText)
dominantEntityLst.extend(entityLst)
for generator in self.subordinateEntityGenerators:
entityLst, rawText = generator.extractEntities(hyper_doc, hyper_sent, rawText)
subordinateEntityLst.extend(entityLst)
candidateEntityLst = list((set(subordinateEntityLst) - set(eliminateEntityLst)).union(set(dominantEntityLst)))
for entity in candidateEntityLst:
entity.setPhase(self.baseTokName)
candidateEntityLst.sort(key=lambda entity:''.join(str(tokenInd) for tokenInd in entity.tokIndexInSentenceList()))
predictedEntityLst = [(cand, classifier.highestScoreLabel(self.entityConvertor.convertVector(cand)))
for cand in candidateEntityLst]
predictedEntityLst = [x for x in predictedEntityLst if x[1][1][0] != self.classDictTri[non_class]]
for tri, (score, cls) in predictedEntityLst:
cls = cls[0]
results.append(((fid, (tri.charOffsetBeg, tri.charOffsetEnd)), cls))
out_queue.put(results)
'integrate three factor, generating example, including classifier, extract features'
class EdgeDetectionMain(object):
def __init__(self, task_param):
'shared obj, used to create dictionary'
#manager = MySyncManager()
#manager.start()
self.edgeConvertor = TriArgPairConvertor(task_param, '../config/TriArgPairFeature.config', 'EdgeC/')
self.classDictEdge = represent.feature.ClassDict()
self.classDictEdge.addKeys(['Theme', 'Cause'])
self.task_param = task_param
def prepareVectors(self, cps, nbp = None):
if nbp == None:
nbp = max(mp.cpu_count()/2, 4)
print 'initial task queue'
task_queue = mp.Queue()
for fid in cps.fileids():
task_queue.put(fid)
for _ in range(nbp):
task_queue.put(None)
self.edgeConvertor.initFeatureFromConfig()
exampleDict = mp.Manager().dict()
exampleGenerator = EdgeExample(cps, self.task_param, task_queue,
self.edgeConvertor)
print 'initialize worker process'
workers = [mp.Process(target=exampleGenerator.prepaireTrainVectors, kwargs={'output':exampleDict})
for i in range(nbp)]
for worker in workers:
worker.start()
for worker in workers:
worker.join()
task_finished = task_queue.empty()
print 'processed %s documents: %s' % (len(exampleDict), task_finished)
for fid in cps.fileids():
for example in exampleDict[fid]:
self.edgeConvertor.addKey2Dictionary(example[0])
self.edgeConvertor.compact()
self.edgeConvertor.saveDict()
edgeIndex = [index for fid in cps.fileids() for _,_,index in exampleDict[fid]]
self.edgeConvertor.saveIndex(edgeIndex)
edgeFeat = [feat for fid in cps.fileids() for feat,_,_ in exampleDict[fid]]
self.edgeConvertor.saveVector(edgeFeat)
edgeCls = [cls for fid in cps.fileids() for _,cls,_ in exampleDict[fid]]
self.edgeConvertor.saveClasses(edgeCls, self.classDictEdge)
return task_finished
def trainClassifier(self):
edgeVectors = self.edgeConvertor.loadVectorFromConfig()
edgeClasses = self.edgeConvertor.loadClasses()
#edgeIndex = self.edgeConvertor.loadIndex()
'initialize classifier'
edgeClassifierParams = self.task_param['classifier']
edgeClassifier = loadModule(edgeClassifierParams['name'])()
edgeClassifier.fit(edgeVectors, edgeClasses, edgeClassifierParams)
edgeClassifierOutPath = conf['out_path'] + self.task_param['task name'] + '/classifier/'
if not os.path.exists(edgeClassifierOutPath):
os.makedirs(edgeClassifierOutPath)
joblib.dump(edgeClassifier, edgeClassifierOutPath + edgeClassifier.__class__.__name__)
def test(self, cps, nbp = None):
outPath = conf['out_path'] + self.task_param['task name'] + '/dynamicOutput/' + self.task_param['mode'] + '/'
classifierParams = self.task_param['classifier']
edgeClassifierOutPath = conf['out_path'] + self.task_param['task name'] + '/classifier/'
ind = classifierParams['name'].rfind('.')
print 'load classifier', edgeClassifierOutPath + classifierParams['name'][ind + 1:]
classifier = joblib.load(edgeClassifierOutPath + classifierParams['name'][ind + 1:])
if nbp == None:
nbp = max(mp.cpu_count()/2, 4)
print 'initial task queue'
task_queue = mp.Queue()
for fid in cps.fileids():
task_queue.put(fid)
for _ in range(nbp):
task_queue.put(None)
output = mp.Queue()
writer = mp.Process(target = my_multiprocess.writeResultIntoFile,
kwargs={'outputPath':outPath, 'output':output})
writer.start()
self.edgeConvertor.initFeatureFromConfig()
self.edgeConvertor.load_dictionary()
exampleGenerator = EdgeExample(cps, self.task_param, task_queue,
self.edgeConvertor)
workers = [mp.Process(target = exampleGenerator.test, kwargs={'classifier':classifier, 'out_queue':output})
for i in range(nbp)]
for worker in workers:
worker.start()
for worker in workers:
worker.join()
output.put(None)
def postProcess(self, cps, PostProcessing):
'load index and prediction'
outPath = conf['out_path'] + self.task_param['task name'] + '/dynamicOutput/' + self.task_param['mode'] + '/'
f = open('%s%s' % (outPath, 'vec_index'))
example_index = [eval(line) for line in f]
f = open('%s%s' % (outPath, 'y_pred'))
y_pred = [eval(line) for line in f]
'postprocessing'
outPath = conf['out_path'] + self.task_param['task name'] + '/annotation/' + self.task_param['mode'] + '/'
conf['baseTokName'] = self.task_param['baseTokName']
PostProcessing(cps, example_index, y_pred, self.classDictEdge.inverse, outPath)
class EdgeExample(ExampleGenerator):
'''
task_queue: task queue, a process
edgeConvertor: converter to extract feature terms
mainProcess: shared object managed by a process
'''
def __init__(self, cps, task_param, task_queue, edgeConvertor):
super(EdgeExample, self).__init__(task_param)
self.cps = cps
self.baseTokName = task_param['baseTokName']
self.task_queue = task_queue
self.edgeConvertor = edgeConvertor
self.classDictEdge = represent.feature.ClassDict()
self.classDictEdge.addKeys(['Theme', 'Cause'])
def prepaireTrainVectors(self, output):
while True:
fid = self.task_queue.get()
if fid == None:
break
hyper_doc = self.cps.load(fid)
edgeExamples = []
for hyper_sent in hyper_doc.hyper_sents:
hyper_sent.entities[candidate_entity] = {}
if len(hyper_sent.proteins) == 0:
continue
'filter the protein strings'
triList = hyper_sent.target_annotation[gold_annotation]['triggers'].values()
protList = hyper_sent.proteins.values()
argList = triList+protList
for tri in triList:
trueArgs = [(rel.dst, rel.typ) for evt in tri.events for rel in evt.relations]
#print 'true arguments:', ' '.join([str((trueArg[0].label, trueArg[1]))for trueArg in trueArgs])
#print 'candidate argument:',
for arg in argList:
cls = next((trueArg[1] for trueArg in trueArgs if trueArg[0].label==arg.label), non_class)
try:
float(cls[-1])
cls = cls[:-1]
except:
pass
#print (arg.label, cls),
features = self.edgeConvertor.extractFeature((tri.entity, arg.entity))
edgeExamples.append((features, cls, (fid, tri.label, arg.label)))
#print
output[fid] = edgeExamples
def test(self, classifier, out_queue):
while True:
fid = self.task_queue.get()
if fid == None:
break
hyper_doc = self.cps.load(fid)
results = []
for hyper_sent in hyper_doc.hyper_sents:
hyper_sent.entities[candidate_entity] = {}
if len(hyper_sent.proteins) == 0:
continue
triList = hyper_sent.target_annotation[intermediate_annotation]['triggers'].values()
protList = hyper_sent.proteins.values()
argList = triList+protList
for tri in triList:
for arg in argList:
if tri.label == arg.label:
continue
features = self.edgeConvertor.convertVector((tri.entity, arg.entity))
#print features
score, cls = classifier.highestScoreLabel(features)
cls = cls[0]
if cls != self.classDictEdge[non_class]:
results.append(((fid, (tri.label, arg.label)), cls))
out_queue.put(results)
'integrate three factor, generating example, including classifier, extract features'
class MergeCauseMain(object):
def __init__(self, task_param):
'shared obj, used to create dictionary'
#manager = MySyncManager()
#manager.start()
self.tripletConvertor = AddArgConvertor(task_param, '../config/RegAddArgFeature.config', 'MergeCause/')
self.classDict = represent.feature.ClassDict()
self.classDict.addKeys([pos_class])
self.task_param = task_param
def prepareVectors(self, cps, nbp = None):
if nbp == None:
nbp = max(mp.cpu_count()/2, 4)
print 'initial task queue'
task_queue = mp.Queue()
for fid in cps.fileids():
task_queue.put(fid)
for _ in range(nbp):
task_queue.put(None)
self.tripletConvertor.initFeatureFromConfig()
exampleDict = mp.Manager().dict()
exampleGenerator = MergeCauseExample(cps, self.task_param, task_queue,
self.tripletConvertor)
print 'initialize worker process'
workers = [mp.Process(target=exampleGenerator.prepaireTrainVectors, kwargs={'output':exampleDict})
for i in range(nbp)]
for worker in workers:
worker.start()
for worker in workers:
worker.join()
task_finished = task_queue.empty()
print 'processed %s documents: %s' % (len(exampleDict), task_finished)
for fid in cps.fileids():
for example in exampleDict[fid]:
self.tripletConvertor.addKey2Dictionary(example[0])
self.tripletConvertor.compact()
self.tripletConvertor.saveDict()
tripletIndex = [index for fid in cps.fileids() for _,_,index in exampleDict[fid]]
self.tripletConvertor.saveIndex(tripletIndex)
tripletFeat = [feat for fid in cps.fileids() for feat,_,_ in exampleDict[fid]]
self.tripletConvertor.saveVector(tripletFeat)
tripletCls = [cls for fid in cps.fileids() for _,cls,_ in exampleDict[fid]]
self.tripletConvertor.saveClasses(tripletCls, self.classDict)
return task_finished
def trainClassifier(self):
tripletVectors = self.tripletConvertor.loadVectorFromConfig()
tripletClasses = self.tripletConvertor.loadClasses()
'initialize classifier'
tripletClassifierParams = self.task_param['classifier']
tripletClassifier = loadModule(tripletClassifierParams['name'])()
tripletClassifier.fit(tripletVectors, tripletClasses, tripletClassifierParams)
tripletClassifierOutPath = conf['out_path'] + self.task_param['task name'] + '/classifier/'
if not os.path.exists(tripletClassifierOutPath):
os.makedirs(tripletClassifierOutPath)
joblib.dump(tripletClassifier, tripletClassifierOutPath + tripletClassifier.__class__.__name__)
def test(self, cps, nbp = None):
outPath = conf['out_path'] + self.task_param['task name'] + '/dynamicOutput/' + self.task_param['mode'] + '/'
classifierParams = self.task_param['classifier']
tripletClassifierOutPath = conf['out_path'] + self.task_param['task name'] + '/classifier/'
ind = classifierParams['name'].rfind('.')
print 'load classifier', tripletClassifierOutPath + classifierParams['name'][ind + 1:]
classifier = joblib.load(tripletClassifierOutPath + classifierParams['name'][ind + 1:])
if nbp == None:
nbp = max(mp.cpu_count()/2, 4)
print 'initial task queue'
task_queue = mp.Queue()
for fid in cps.fileids():
task_queue.put(fid)
for _ in range(nbp):
task_queue.put(None)
output = mp.Queue()
writer = mp.Process(target = my_multiprocess.writeResultIntoFile,
kwargs={'outputPath':outPath, 'output':output})
writer.start()
self.tripletConvertor.initFeatureFromConfig()
self.tripletConvertor.load_dictionary()
exampleGenerator = MergeCauseExample(cps, self.task_param, task_queue,
self.tripletConvertor)
workers = [mp.Process(target = exampleGenerator.test, kwargs={'classifier':classifier, 'out_queue':output})
for i in range(nbp)]
for worker in workers:
worker.start()
for worker in workers:
worker.join()
output.put(None)
def postProcess(self, cps, PostProcessing):
'load index and prediction'
outPath = conf['out_path'] + self.task_param['task name'] + '/dynamicOutput/' + self.task_param['mode'] + '/'
f = open('%s%s' % (outPath, 'vec_index'))
example_index = [eval(line) for line in f]
f = open('%s%s' % (outPath, 'y_pred'))
y_pred = [eval(line) for line in f]
'postprocessing'
outPath = conf['out_path'] + self.task_param['task name'] + '/annotation/' + self.task_param['mode'] + '/'
conf['baseTokName'] = self.task_param['baseTokName']
PostProcessing(cps, example_index, y_pred, self.classDict.inverse, outPath)
class MergeCauseExample(ExampleGenerator):
'''
task_queue: task queue, a process
tripletConvertor: converter to extract feature terms
mainProcess: shared object managed by a process
'''
argTypNames = ['Theme', 'Cause']
def __init__(self, cps, task_param, task_queue, tripletConvertor):
super(MergeCauseExample, self).__init__(task_param)
self.cps = cps
self.baseTokName = task_param['baseTokName']
self.task_queue = task_queue
self.tripletConvertor = tripletConvertor
self.classDict = represent.feature.ClassDict()
self.classDict.addKeys(event_class_set)
def prepaireTrainVectors(self, output):
while True:
fid = self.task_queue.get()
if fid == None:
break
hyper_doc = self.cps.load(fid)
tripletExamples = []
for hyper_sent in hyper_doc.hyper_sents:
hyper_sent.entities[candidate_entity] = {}
if len(hyper_sent.proteins) == 0:
continue
'filter the protein strings'
triggerLst = hyper_sent.target_annotation[gold_annotation]['triggers'].values()
regTriLst = [tri for tri in triggerLst if tri.typ in reg_class_set]
protLst = hyper_sent.proteins.values()
argLst = triggerLst+protLst
for tri in regTriLst:
trueArgs = [set((rel.dst, rel.typ) for rel in evt.relations) for evt in tri.events ]
#if any(len(trueArg) == 2 for trueArg in trueArgs):
# print 'true args:\t', ' '.join(str([(dst.label, typ) for dst, typ in x]) for x in trueArgs)
for config in itertools.permutations(argLst, 2):
conf_set = set(zip(config, self.argTypNames))
cls = pos_class if conf_set in trueArgs else non_class
#print 'conf_set, ', ' '.join(str((x[0].label, x[1])) for x in conf_set), cls
features = self.tripletConvertor.extractFeature((tri.entity, config[0].entity, config[1].entity))
tripletExamples.append((features, cls, (fid, (tri.label, config[0].label, config[1].label))))
output[fid] = tripletExamples
def test(self, classifier, out_queue):
while True:
fid = self.task_queue.get()
if fid == None:
break
hyper_doc = self.cps.load(fid)
results = []
for hyper_sent in hyper_doc.hyper_sents:
hyper_sent.entities[candidate_entity] = {}
if len(hyper_sent.proteins) == 0:
continue
regTriList = [tri for tri in hyper_sent.target_annotation[intermediate_annotation]['triggers'].values()
if tri.typ in reg_class_set]
for tri in regTriList:
themeArgs = [rel.dst for evt in tri.events for rel in evt.relations if rel.typ == 'Theme']
causeArgs = [rel.dst for evt in tri.events for rel in evt.relations if rel.typ == 'Cause']
for themeArg in themeArgs:
for causeArg in causeArgs:
features = self.tripletConvertor.convertVector((tri.entity, themeArg.entity, causeArg.entity))
score, cls = classifier.highestScoreLabel(features)
cls = cls[0]
if cls != self.classDict[non_class]:
results.append(((fid, (tri.label, themeArg.label, causeArg.label)), cls))
out_queue.put(results)
|
XiaoLiuAI/RUPEE
|
src/python/intg/pipeline.py
|
Python
|
gpl-2.0
| 29,500
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""## Control Flow Operations
TensorFlow provides several operations and classes that you can use to control
the execution of operations and add conditional dependencies to your graph.
@@identity
@@tuple
@@group
@@no_op
@@count_up_to
@@cond
@@case
## Logical Operators
TensorFlow provides several operations that you can use to add logical operators
to your graph.
@@logical_and
@@logical_not
@@logical_or
@@logical_xor
## Comparison Operators
TensorFlow provides several operations that you can use to add comparison
operators to your graph.
@@equal
@@not_equal
@@less
@@less_equal
@@greater
@@greater_equal
@@select
@@where
## Debugging Operations
TensorFlow provides several operations that you can use to validate values and
debug your graph.
@@is_finite
@@is_inf
@@is_nan
@@verify_tensor_all_finite
@@check_numerics
@@add_check_numerics_ops
@@Assert
@@Print
"""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import six
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import common_shapes
from tensorflow.python.ops import constant_op
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_control_flow_ops
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.ops import logging_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import tensor_array_ops
# pylint: disable=wildcard-import,undefined-variable
from tensorflow.python.ops.gen_control_flow_ops import *
# pylint: enable=wildcard-import
from tensorflow.python.platform import logging
# We override the 'tuple' for a control flow op, so we keep python's
# existing 'tuple' for later use in this module.
_basetuple = tuple
# pylint: disable=protected-access
def _Identity(data, name=None):
"""Return a tensor with the same shape and contents as the input tensor.
Args:
data: A Tensor.
name: A name for this operation (optional).
Returns:
A Tensor with the same type and value as the input Tensor.
"""
if not data.dtype.is_ref_dtype:
return array_ops.identity(data, name=name)
else:
return gen_array_ops._ref_identity(data, name=name)
def _NextIteration(data, name=None):
if not data.dtype.is_ref_dtype:
return next_iteration(data, name=name)
else:
return ref_next_iteration(data, name=name)
def _Merge(values, name=None):
if all([v.dtype.is_ref_dtype for v in values]):
return gen_control_flow_ops._ref_merge(values, name)
else:
return gen_control_flow_ops._merge(values, name)
def _Enter(data, frame_name, is_constant=False, parallel_iterations=10,
use_ref=True, name=None):
"""Creates or finds a child frame, and makes `data` available to it.
The unique `frame_name` is used by the `Executor` to identify frames. If
`is_constant` is true, `data` is a constant in the child frame; otherwise
it may be changed in the child frame. At most `parallel_iterations`
iterations are run in parallel in the child frame.
Args:
data: The tensor to be made available to the child frame.
frame_name: The name of the child frame.
is_constant: If true, the output is constant within the child frame.
parallel_iterations: The number of iterations allowed to run in parallel.
use_ref: If true, use ref_enter if data is of ref type.
name: A name for this operation (optional).
Returns:
The same tensor as `data`.
"""
if data.dtype.is_ref_dtype and use_ref:
return ref_enter(data, frame_name, is_constant, parallel_iterations,
name=name)
else:
return enter(data, frame_name, is_constant, parallel_iterations,
name=name)
def exit(data, name=None):
"""Exits the current frame to its parent frame.
Exit makes its input `data` available to the parent frame.
Args:
data: The tensor to be made available to the parent frame.
name: A name for this operation (optional).
Returns:
The same tensor as `data`.
"""
if data.dtype.is_ref_dtype:
return gen_control_flow_ops._ref_exit(data, name)
else:
return gen_control_flow_ops._exit(data, name)
def switch(data, pred, dtype=None, name=None):
"""Forwards `data` to an output determined by `pred`.
If `pred` is true, the `data` input is forwared to the first output.
Otherwise, the data goes to the second output.
This op handles `Tensor`s and `IndexedSlices`.
Args:
data: The tensor to be forwarded to the appropriate output.
pred: A scalar that specifies which output port will receive data.
dtype: Optional element type for the returned tensor. If missing,
the type is inferred from the type of `value`.
name: A name for this operation (optional).
Returns:
`(output_false, output_true)`: If `pred` is true, data will be forwarded to
`output_true`, otherwise it goes to `output_false`.
"""
with ops.op_scope([data, pred], name, "Switch") as name:
data = ops.convert_to_tensor_or_indexed_slices(data, dtype=dtype,
name="data")
pred = ops.convert_to_tensor(pred, name="pred")
if isinstance(data, ops.Tensor):
return gen_control_flow_ops._switch(data, pred, name=name)
else:
val, ind, dense_shape = data.values, data.indices, data.dense_shape
val_f, val_t = gen_control_flow_ops._switch(val, pred, name=name)
ind_f, ind_t = gen_control_flow_ops._switch(ind, pred, name="indices")
if dense_shape:
dense_shape_f, dense_shape_t = gen_control_flow_ops._switch(
dense_shape, pred, name="dense_shape")
else:
dense_shape_f, dense_shape_t = None, None
return (ops.IndexedSlices(val_f, ind_f, dense_shape_f),
ops.IndexedSlices(val_t, ind_t, dense_shape_t))
def merge(inputs, name=None):
"""Returns the value of an available element of `inputs`.
This op tests each of the tensors in `inputs` in turn to determine if any of
them is available. If it finds an available tensor, it returns it and its
index in `inputs`.
It is an error if more than one tensor in `inputs` is available. If no tensor
in `inputs` is available, the returned tensor and index are not set.
This op handles both `Tensor`s and `IndexedSlices`. If inputs has a mix of
`Tensor`s and `IndexedSlices`, all inputs are converted to IndexedSlices
before merging.
Args:
inputs: The input tensors, at most one of which is available.
name: A name for this operation (optional).
Returns:
A tuple containing the chosen input tensor and its index in `inputs`.
Raises:
ValueError: If inputs are IndexedSlices and some but not all have a
dense_shape property.
"""
with ops.op_scope(inputs, name, "Merge") as name:
inputs = [ops.convert_to_tensor_or_indexed_slices(inp)
for inp in inputs]
if all([isinstance(inp, ops.Tensor) for inp in inputs]):
return _Merge(inputs, name=name)
else:
inputs = math_ops._as_indexed_slices_list(inputs)
values, _ = _Merge([inp.values for inp in inputs], name=name)
indices, chosen_index = _Merge(
[inp.indices for inp in inputs], name="indices")
if any(inp.dense_shape for inp in inputs):
if not all(inp.dense_shape for inp in inputs):
raise ValueError("Either all merged IndexedSlices must have a "
"dense_shape, or none must have a dense_shape.")
dense_shape, _ = _Merge(
[inp.dense_shape for inp in inputs], name="dense_shape")
else:
dense_shape = None
return ops.IndexedSlices(values, indices, dense_shape), chosen_index
def _SwitchRefOrTensor(data, pred, name="Switch"):
"""Forwards `data` to an output determined by `pred`.
If `pred` is true, the `data` input is forwared to the first output.
Otherwise, the data goes to the second output.
This op handles `Tensor`s and `IndexedSlices`.
Args:
data: The tensor to be forwarded to the appropriate output.
pred: A scalar that specifies which output port will receive data.
name: A name for this operation (optional).
Returns:
`(output_false, output_false)`: If `pred` is true, data will be forwarded to
`output_true`, otherwise it goes to `output_false`.
Raises:
TypeError: if data is not a Tensor or IndexedSlices
"""
data = ops.convert_to_tensor_or_indexed_slices(data, name="data")
# NOTE(mrry): ops.device(None) below addresses the following scenario.
#
# Assume you execute Optimizer.apply_gradients() in a branch of a cond().
#
# 1. The update op is created inside a `with tf.device(var.device):` block
# say var.device = "/job:ps/task:1".
#
# 2. Some tensor `data` is captured and a switch is created in a
# `with tf.device(data.device):` block (data.device = "/job:worker_train").
#
# with tf.device("/job:ps/task:1"):
# with tf.device("/job:worker_train"):
# op = ...
#
# But then calling `print op.device` returns:
# ==> "/job:worker_train/task:1" -- a device that doesn't exist in this case!
with ops.colocate_with(data):
if isinstance(data, ops.Tensor):
if not data.dtype.is_ref_dtype:
return switch(data, pred, name=name)
else:
return ref_switch(data, pred, name=name)
else:
return switch(data, pred, name=name)
def _convert_tensorarrays_to_flows(tensors_or_tensor_arrays):
return [ta.flow if isinstance(ta, tensor_array_ops.TensorArray)
else ta
for ta in tensors_or_tensor_arrays]
def _convert_flows_to_tensorarrays(tensors_or_tensorarrays, tensors_or_flows):
if len(tensors_or_tensorarrays) != len(tensors_or_flows):
raise ValueError(
"Lengths of original Tensor list and new list do not match: %d vs. %d"
% (len(tensors_or_tensorarrays), len(tensors_or_flows)))
return [
tensor_array_ops.TensorArray(
dtype=ta.dtype, handle=ta.handle, flow=t_or_flow)
if isinstance(ta, tensor_array_ops.TensorArray)
else t_or_flow
for (ta, t_or_flow) in zip(tensors_or_tensorarrays, tensors_or_flows)]
class ControlFlowOpWrapper(object):
"""A wrapper class for Operation.
A wrapped op allows us to capture the uses of its inputs and outputs. In
gradients(), right before calling the gradient function of an op, we wrap
the op by calling MakeWrapper. So during the exection of the gradient
function of an op , any time when one of its inputs/outputs is used, we
generate code to remember its values for all iterations.
"""
class _ControlFlowOpInputs(object):
"""An indirection to capture the input tensors needed in backprop."""
def __init__(self, op, grad_state):
self._op = op
self._grad_state = grad_state
self._inputs = None
def __len__(self):
return len(self._op._inputs)
def __getitem__(self, index):
if self._inputs is None:
self._inputs = [None for _ in self._op.inputs]
if isinstance(index, int):
val = self._inputs[index]
if val is None:
f_val = self._op.inputs[index]
val = self._grad_state.GetRealValue(f_val)
self._inputs[index] = val
return val
elif isinstance(index, slice):
start, stop, step = index.indices(len(self))
vals = [self[i] for i in xrange(start, stop, step)]
return vals
else:
raise TypeError("index must be an integer or slice")
class _ControlFlowOpOutputs(object):
"""An indirection to capture the output tensors needed in backprop."""
def __init__(self, op, grad_state):
self._op = op
self._grad_state = grad_state
self._outputs = None
def __len__(self):
return len(self._op._outputs)
def __getitem__(self, index):
if self._outputs is None:
self._outputs = [None for _ in self._op.outputs]
if isinstance(index, int):
val = self._outputs[index]
if val is None:
f_val = self._op.outputs[index]
val = self._grad_state.GetRealValue(f_val)
self._outputs[index] = val
return val
elif isinstance(index, slice):
start, stop, step = index.indices(len(self))
vals = [self[i] for i in xrange(start, stop, step)]
return vals
else:
raise TypeError("index must be an integer or slice")
def __init__(self, op, grad_state):
self._grad_state = grad_state # The GradLoopState this op belongs to.
self._op = op
self._inputs = None
self._outputs = None
@property
def grad_state(self):
return self._grad_state
@property
def inputs(self):
if self._inputs is None:
self._inputs = self._ControlFlowOpInputs(self._op, self._grad_state)
return self._inputs
@property
def outputs(self):
if self._outputs is None:
self._outputs = self._ControlFlowOpOutputs(self._op, self._grad_state)
return self._outputs
@property
def op(self):
return self._op
@property
def name(self):
"""Returns the name of this instance of op."""
return self._op.name
@property
def _id(self):
"""Returns the unique id of this operation."""
return self._op._id
@property
def device(self):
"""Returns the device of this operation.
Returns:
a string or None if the device was not set.
"""
return self._op.device
@property
def type(self):
"""Returns the type of the op."""
return self._op.type
@property
def graph(self):
"""The `Graph` that contains this operation."""
return self._op.graph
def get_attr(self, name):
"""Returns the value of the attr of this op with the given `name`."""
return self._op.get_attr(name)
def _get_control_flow_context(self):
"""Returns the control flow context of this op."""
return self._op._get_control_flow_context()
def _IsLoopConstantEnter(op):
"""Returns true iff op is a loop invariant."""
is_enter = (op.type == "Enter" or op.type == "RefEnter")
return is_enter and op.get_attr("is_constant")
def _IsLoopExit(op):
return op.type == "Exit" or op.type == "RefExit"
class GradLoopState(object):
"""The state used for constructing the gradient graph for a while loop.
We create a GradLoopState for each while loop in forward and its
corresponding while loop in backprop. This gives us access to both
the forward and the backprop WhileContexts.
During the construction of gradient graph, any time when we detect
a forward value that is needed for backprop, we create a history
accumulator and add it to `history_map`. Any time when we backprop
a loop switch op (in _SwitchGrad), we add the grad merge op in
`switch_map`.
"""
def __init__(self, forward_ctxt, outer_grad_state):
# The grad loop state for the outer while loop.
self._outer_grad_state = None
# The while loop context for forward.
self._forward_context = None
# The loop counter added by AddForwardCounter. It is the value
# of the loop counter for the next iteration.
self._forward_index = None
# A sync op for forward.
self._forward_sync = None
# The while loop context for backprop.
self._grad_context = None
# The loop counter added by AddBackPropCounter. It is the value
# of the loop counter for the current iteration.
self._grad_index = None
# A sync op for backprop.
self._grad_sync = None
# Information needed by backprop.
self._history_map = {}
self._switch_map = {}
self._outer_grad_state = outer_grad_state
if outer_grad_state:
outer_forward_ctxt = outer_grad_state.forward_context
else:
outer_forward_ctxt = forward_ctxt.outer_context
# Add the forward loop counter.
if outer_forward_ctxt: outer_forward_ctxt.Enter()
cnt, forward_index = forward_ctxt.AddForwardCounter()
if outer_forward_ctxt: outer_forward_ctxt.Exit()
self._forward_context = forward_ctxt
self._forward_index = forward_index
# Add the backprop WhileContext, and the backprop loop counter.
if outer_grad_state:
# This is a nested loop. Remember the iteration counts for each
# execution of this inner loop.
outer_forward_ctxt.AddName(cnt.name)
history_cnt = outer_grad_state.AddForwardAccumulator(cnt)
outer_grad_ctxt = outer_grad_state.grad_context
outer_grad_ctxt.Enter()
self._grad_context = WhileContext(forward_ctxt.parallel_iterations,
forward_ctxt.back_prop,
forward_ctxt.swap_memory,
forward_ctxt.name)
real_cnt = outer_grad_state.AddBackPropAccumulatedValue(history_cnt, cnt)
self._grad_index = self._grad_context.AddBackPropCounter(real_cnt)
outer_grad_ctxt.Exit()
else:
if outer_forward_ctxt: outer_forward_ctxt.Enter()
self._grad_context = WhileContext(forward_ctxt.parallel_iterations,
forward_ctxt.back_prop,
forward_ctxt.swap_memory,
forward_ctxt.name)
self._grad_index = self._grad_context.AddBackPropCounter(cnt)
if outer_forward_ctxt: outer_forward_ctxt.Exit()
@property
def outer_grad_state(self):
"""The grad loop state for outer loop."""
return self._outer_grad_state
@property
def forward_context(self):
"""The while loop context for forward."""
return self._forward_context
@property
def forward_index(self):
"""The loop index of forward loop."""
return self._forward_index
@property
def forward_sync(self):
"""A control trigger node for synchronization in the forward loop.
One main use is to keep the push ops of a stack executed in the
iteration order.
"""
if self._forward_sync is None:
with ops.control_dependencies(None):
self._forward_sync = control_trigger(name="f_sync")
self._forward_sync._set_control_flow_context(self._forward_context)
self._forward_index.op._add_control_input(self._forward_sync)
return self._forward_sync
@property
def grad_context(self):
"""The corresponding WhileContext for gradient."""
return self._grad_context
@property
def grad_index(self):
"""The loop index of backprop loop."""
return self._grad_index
@property
def grad_sync(self):
"""A control trigger node for synchronization in the grad loop.
One main use is to keep the pop ops of a stack executed in the
iteration order.
"""
if self._grad_sync is None:
with ops.control_dependencies(None):
self._grad_sync = control_trigger(name="b_sync")
self._grad_sync._set_control_flow_context(self._grad_context)
self._grad_index.op._add_control_input(self._grad_sync)
return self._grad_sync
@property
def history_map(self):
"""The map that records all the tensors needed for backprop."""
return self._history_map
@property
def switch_map(self):
"""The map that records all the Switch ops for the While loop."""
return self._switch_map
def AddForwardAccumulator(self, value, dead_branch=False):
"""Add an accumulator for each forward tensor that is needed in backprop.
This is added to the forward loop at the first time when a tensor
in the forward loop is used by backprop gradient computation loop.
We create an accumulator that accumulates the value of tensor at each
iteration. Called in the control flow context where gradients() is called.
The pseudocode is:
```
acc = stack();
while (_pivot) {
acc = stack_push(acc, value);
}
```
We make sure that the stack push op in one iteration is executed before
next iteration. This is achieved by adding a control edge from
`forward_index.op.inputs[0].op` to the push op, and another control
edge from the push op to either `forward_index.op` or `forward_sync`.
Args:
value: The tensor that is to be accumulated.
dead_branch: True iff the tensor is on a dead branch of a cond.
Returns:
The stack that contains the accumulated history of the tensor.
"""
# TODO(yuanbyu): Make sure the colocation of stack ops and value.
# pylint: disable=protected-access
acc = gen_data_flow_ops._stack(value.dtype.base_dtype, name="f_acc")
# pylint: enable=protected-access
# Make acc available in the forward context.
enter_acc = self.forward_context.AddValue(acc)
# Add the stack_push op in the context of value.op.
swap_enabled = self.forward_context.swap_memory
value_ctxt = value.op._get_control_flow_context()
if _IsLoopExit(value.op):
value_ctxt = value_ctxt.outer_context
if value_ctxt == self.forward_context:
# value is not nested in the forward context.
self.forward_context.Enter()
push = gen_data_flow_ops._stack_push(enter_acc, value,
swap_memory=swap_enabled)
self.forward_context.Exit()
# Protect stack push and order it before forward_index.
self.forward_index.op._add_control_input(push.op)
else:
# value is in a cond context within the forward context.
assert isinstance(value_ctxt, CondContext)
if dead_branch:
# The special case for creating a zero tensor for a dead
# branch of a switch. See ControlFlowState.ZerosLike().
value_ctxt.outer_context.Enter()
push = gen_data_flow_ops._stack_push(enter_acc, value,
swap_memory=swap_enabled)
value_ctxt.outer_context.Exit()
push.op._set_control_flow_context(value_ctxt)
else:
value_ctxt.Enter()
push = gen_data_flow_ops._stack_push(enter_acc, value,
swap_memory=swap_enabled)
value_ctxt.Exit()
# Protect stack push and order it before forward_sync.
self.forward_sync._add_control_input(push.op)
# Order stack push after the successor of forward_index
add_op = self.forward_index.op.inputs[0].op
push.op._add_control_input(add_op)
return acc
def AddBackPropAccumulatedValue(self, history_value, value,
dead_branch=False):
"""Add the getter for an accumulated value in the grad context.
This is added to the backprop loop. Called in the grad context to
get the value of an accumulated value. The stack pop op must be guarded
by the pred of the controlling cond.
Args:
history_value: The history (a stack) of a value.
value: The value that is pushed onto the stack.
dead_branch: True iff the tensor is on a dead branch of a cond.
Returns:
The current value (the top of the stack).
"""
history_ctxt = history_value.op._get_control_flow_context()
# Find the cond context that controls history_value.
cond_ctxt = None
value_ctxt = value.op._get_control_flow_context()
while value_ctxt and value_ctxt != history_ctxt:
if isinstance(value_ctxt, CondContext):
cond_ctxt = value_ctxt
break
value_ctxt = value_ctxt.outer_context
if cond_ctxt:
# Guard stack pop with a switch if it is controlled by a cond
grad_state = self
pred = None
while not pred and grad_state:
pred = grad_state.history_map.get(cond_ctxt.pred.name)
grad_state = grad_state.outer_grad_state
branch = (1 - cond_ctxt.branch) if dead_branch else cond_ctxt.branch
history_value = _SwitchRefOrTensor(history_value, pred)[branch]
pop = gen_data_flow_ops._stack_pop(history_value, value.dtype.base_dtype)
if self.grad_context.parallel_iterations > 1:
# All pops are ordered after pivot_for_body and before grad_sync.
self.grad_sync._add_control_input(pop.op)
return pop
def GetRealValue(self, value):
"""Get the real value.
If backprop "uses" a value produced by forward inference, an
accumulator is added in the forward loop to accumulate its values.
We use the accumulated value.
Args:
value: A tensor to be captured.
Returns:
The same tensor value from the saved history.
"""
assert value.op.type != "Variable"
real_value = self._history_map.get(value.name)
if real_value is None:
if _IsLoopConstantEnter(value.op):
# Special case for loop invariant.
if self._outer_grad_state:
# This is a nested loop so we record the history of this
# value in outer_forward_ctxt.
self._grad_context.Exit()
outer_value = value.op.inputs[0]
history_value = self._outer_grad_state.AddForwardAccumulator(
outer_value)
self._grad_context.Enter()
else:
# Just use the input value of this Enter node.
real_value = GetRealOp(value.op).inputs[0]
else:
# Record the history of this value in forward_ctxt.
# NOTE(yuanbyu): Don't record for constants.
self._grad_context.Exit()
history_value = self.AddForwardAccumulator(value)
self._grad_context.Enter()
if real_value is None:
# Add the stack pop op in the grad context.
real_value = self.AddBackPropAccumulatedValue(history_value, value)
self._history_map[value.name] = real_value
return real_value
def _GetWhileContext(op):
"""Get the WhileContext to which this op belongs."""
ctxt = op._get_control_flow_context()
if ctxt:
ctxt = ctxt.GetWhileContext()
return ctxt
class ControlFlowState(object):
"""Maintain the mapping from the loops to their grad states."""
def __init__(self):
self._map = {} # maps forward loop context to GradLoopState
def _GetGradState(self, op):
"""Get the gradient loop state for this op if any."""
if _IsLoopExit(op):
forward_ctxt = op._get_control_flow_context()
forward_ctxt = forward_ctxt.outer_context
if forward_ctxt:
forward_ctxt = forward_ctxt.GetWhileContext()
else:
forward_ctxt = _GetWhileContext(op)
if forward_ctxt:
return self._map.get(forward_ctxt)
return None
def MakeWrapper(self, op):
"""Make a wrapper for op if it is in a WhileContext."""
forward_ctxt = _GetWhileContext(op)
if forward_ctxt:
grad_state = self._map.get(forward_ctxt)
if grad_state:
return ControlFlowOpWrapper(op, grad_state)
return op
def GetAllLoopExits(self):
"""Return a list containing the exits of all the loops."""
loop_exits = []
for forward_ctxt in self._map:
for loop_exit in forward_ctxt.loop_exits:
loop_exits.append(loop_exit)
return loop_exits
def EnterGradWhileContext(self, op):
"""Enter the WhileContext for gradient computation."""
grad_state = self._GetGradState(op)
if grad_state:
grad_state.grad_context.Enter()
def ExitGradWhileContext(self, op):
"""Exit the WhileContext for gradient computation."""
grad_state = self._GetGradState(op)
if grad_state:
grad_state.grad_context.Exit()
def AddWhileContext(self, op, between_op_list, between_ops):
"""Add the grad state for the while loop that op belongs to.
Note that op is an Exit, and this method must be called in
the control flow context where gradients() is called.
Note that this method modifies `between_op_list` and `between_ops`.
"""
forward_ctxt = _GetWhileContext(op)
grad_state = self._map.get(forward_ctxt)
if grad_state is None:
# This is a new while loop so create a grad state for it.
outer_forward_ctxt = forward_ctxt.outer_context
if outer_forward_ctxt:
outer_forward_ctxt = outer_forward_ctxt.GetWhileContext()
outer_grad_state = None
if outer_forward_ctxt:
outer_grad_state = self._map.get(outer_forward_ctxt)
grad_state = GradLoopState(forward_ctxt, outer_grad_state)
self._map[forward_ctxt] = grad_state
# We need to include all exits of a loop for backprop.
for loop_exit in forward_ctxt.loop_exits:
if not between_ops[loop_exit.op._id]:
between_ops[loop_exit.op._id] = True
between_op_list.append(loop_exit.op)
def ZerosLikeForExit(self, val):
"""Create zeros_like gradient for a loop exit.
If the result of a loop variable is not used but is involved in
computing the result of some needed loop variable, we create a
zero-valued tensor that is fed as gradient for the Exit node of that
loop variable. Note that val.op is an Exit, and this method must be
called in the control flow context where gradients() is called.
Args:
val: The output tensor of an Exit op.
Returns:
A zero tensor of the same shape of val.
"""
val_shape = val.get_shape()
forward_ctxt = val.op._get_control_flow_context()
outer_forward_ctxt = forward_ctxt.outer_context
if outer_forward_ctxt:
outer_forward_ctxt = outer_forward_ctxt.GetWhileContext()
outer_grad_state = None
if outer_forward_ctxt:
outer_grad_state = self._map.get(outer_forward_ctxt)
if outer_grad_state:
# This is a nested loop.
if val_shape.is_fully_defined():
# If the shape is known statically, just create a zero tensor
# with the right shape in the right context.
outer_grad_state.grad_context.Enter()
result = array_ops.zeros(val_shape.dims, val.dtype)
outer_grad_state.grad_context.Exit()
else:
history_val = outer_grad_state.AddForwardAccumulator(val)
outer_grad_ctxt = outer_grad_state.grad_context
outer_grad_ctxt.Enter()
real_val = outer_grad_state.AddBackPropAccumulatedValue(
history_val, val)
result = array_ops.zeros_like(real_val)
outer_grad_ctxt.Exit()
else:
# This is not a nested loop.
if val_shape.is_fully_defined():
# If the shape is known statically, just create a zero tensor
# with the right shape.
result = array_ops.zeros(val_shape.dims, val.dtype)
else:
result = array_ops.zeros_like(val)
return result
def ZerosLike(self, op, index):
"""Create zeros_like for the specified output of an op.
This method must be called in the grad loop context.
Args:
op: A tensorflow operation.
index: the index for a specific output of the op.
Returns:
A zero tensor of the same shape of op.outputs[index].
"""
if IsLoopSwitch(op): return None
dead_branch = op.type in {"Switch", "RefSwitch"}
forward_ctxt = _GetWhileContext(op)
if forward_ctxt is None:
return array_ops.zeros_like(op.outputs[index])
op_ctxt = op._get_control_flow_context()
grad_state = self._map.get(forward_ctxt)
val = ops.convert_to_tensor(op.outputs[index], name="tensor")
shape = val.get_shape()
if shape.is_fully_defined():
# If the shape is known statically, just create a zero tensor with
# the right shape in the grad loop context.
result = constant_op.constant(0, shape=shape.dims, dtype=val.dtype)
if dead_branch:
# op is a cond switch. Guard the zero tensor with a switch.
pred = grad_state.history_map.get(op_ctxt.pred.name)
branch = op_ctxt.branch
result = _SwitchRefOrTensor(result, pred)[1 - branch]
else:
# Unknown shape so keep a history of the shape at runtime.
if dead_branch:
# Need to add a special switch to guard the value.
pred = op_ctxt.pred
branch = op_ctxt.branch
op_ctxt.outer_context.Enter()
val = _SwitchRefOrTensor(op.inputs[0], pred)[1 - branch]
zeros_shape = array_ops.shape(val)
op_ctxt.outer_context.Exit()
val.op._set_control_flow_context(op_ctxt)
zeros_shape.op._set_control_flow_context(op_ctxt)
else:
op_ctxt.Enter()
zeros_shape = array_ops.shape(val)
op_ctxt.Exit()
# Add forward accumulator for shape.
grad_state.grad_context.Exit()
history_shape = grad_state.AddForwardAccumulator(zeros_shape, dead_branch)
grad_state.grad_context.Enter()
# Create a zero tensor with the right shape.
shape = grad_state.AddBackPropAccumulatedValue(
history_shape, zeros_shape, dead_branch)
result = array_ops.zeros(shape, val.dtype)
return result
def GetRealOp(op):
"""Get the real op by removing the wrapper."""
while isinstance(op, ControlFlowOpWrapper):
op = op.op
return op
def MaybeCreateControlFlowState(between_op_list, between_ops):
"""Create the state for all the while loops involved in one gradients().
We create a ControlFlowState when there are while loops involved in
gradients(). In gradients(), control flow logic is only invoked when
the ControlFlowState is not None.
Note that this method modifies `between_op_list` and `between_ops`.
"""
loop_state = None
for op in between_op_list:
if _IsLoopExit(op):
if loop_state is None:
loop_state = ControlFlowState()
loop_state.AddWhileContext(op, between_op_list, between_ops)
return loop_state
def IsLoopSwitch(op):
"""Return true if `op` is the Switch for a While loop."""
if op.type == "Switch" or op.type == "RefSwitch":
ctxt = op._get_control_flow_context()
return ctxt and isinstance(ctxt, WhileContext)
return False
class ControlFlowContext(object):
"""The base class for control flow context.
The usage pattern is a sequence of (Enter, Exit) followed by a final
ExitResult.
We maintain the following state for control flow contexts during graph
construction:
1. graph has _control_flow_context: the current context used to
construct new nodes. Changed by ctxt.Enter() and ctxt.Exit()
2. op has _control_flow_context: the context to which the op belongs.
Set at the time the op is created. Immutable.
3. A ControlFlowContext has _outer_context: the context in which this
context is created. Set at the time a context is created. Immutable.
4. A ControlFlowContext has _context_stack.
Pushed and popped by ctxt.Enter() and ctxt.Exit()
"""
def __init__(self):
self._outer_context = ops.get_default_graph()._get_control_flow_context()
self._context_stack = []
# Values that have been already seen in this context.
self._values = set()
# Values referenced by but external to this context.
self._external_values = {}
@property
def outer_context(self):
"""Return the context containing this context."""
return self._outer_context
def AddName(self, name):
self._values.add(name)
# pylint: disable=protected-access
def Enter(self):
"""Enter this control flow context."""
graph = ops.get_default_graph()
self._context_stack.append(graph._get_control_flow_context())
graph._set_control_flow_context(self)
def Exit(self):
"""Exit this control flow context."""
graph = ops.get_default_graph()
last_context = self._context_stack.pop()
graph._set_control_flow_context(last_context)
def ExitResult(self, result):
"""Make a list of tensors available in the outer context."""
if self._outer_context:
for x in result:
self._outer_context.AddName(x.name)
def GetWhileContext(self):
"""Return the while context containing this context."""
if self._outer_context:
return self._outer_context.GetWhileContext()
return None
def MaybeAddToWhileContext(self, op):
"""Add a control dependency to the containing WhileContext.
The added control dependency ensures that the outputs of this op
belong to the WhileContext. Do nothing if the op is not contained
in a WhileContext.
Args:
op: An operation.
"""
while_ctxt = self.GetWhileContext()
if while_ctxt is not None:
# pylint: disable=protected-access
op._add_control_input(while_ctxt.GetControlPivot().op)
# pylint: enable=protected-access
class CondContext(ControlFlowContext):
"""The context for the conditional construct."""
def __init__(self, pred, pivot, branch):
ControlFlowContext.__init__(self)
self._pred = pred # The boolean tensor for the cond predicate
self._pivot = pivot # The predicate tensor in this branch
self._branch = branch # 0 or 1 representing this branch
# Values considered to have been already seen in this context.
self._values.add(pred.name)
self._values.add(pivot.name)
@property
def pred(self):
return self._pred
@property
def pivot(self):
return self._pivot
@property
def branch(self):
return self._branch
def AddValue(self, val):
"""Add `val` to the current context and its outer context recursively."""
result = val
if val.name not in self._values:
self._values.add(val.name)
if self._outer_context:
result = self._outer_context.AddValue(val)
self._values.add(result.name)
with ops.control_dependencies(None):
result = _SwitchRefOrTensor(result, self._pred)[self._branch]
# pylint: disable=protected-access
result.op._set_control_flow_context(self)
# pylint: enable=protected-access
self._values.add(result.name)
self._external_values[val.name] = result
return result
def AddOp(self, op):
"""Add `op` to the current context."""
if not op.inputs:
# Add this op to the enclosing while context
self.MaybeAddToWhileContext(op)
# pylint: disable=protected-access
op._add_control_input(self._pivot.op)
# pylint: enable=protected-access
for x in op.outputs:
self._values.add(x.name)
else:
for index in range(len(op.inputs)):
x = op.inputs[index]
if x.name not in self._values:
self._values.add(x.name)
# Add this value to the parent contexts up to the context that
# creates this value.
real_x = x
if self._outer_context:
real_x = self._outer_context.AddValue(x)
self._values.add(real_x.name)
real_x = _SwitchRefOrTensor(real_x, self._pred)[self._branch]
self._external_values[x.name] = real_x
x = self._external_values.get(x.name)
if x is not None:
op._update_input(index, x)
for x in op.outputs:
self._values.add(x.name)
def BuildCondBranch(self, fn):
"""Add the subgraph defined by fn() to the graph."""
r = fn()
result = []
if r is not None:
if not isinstance(r, list) and not isinstance(r, _basetuple):
r = [r]
for v in r:
real_v = v
if isinstance(v, ops.Operation):
# Use pivot as the proxy for this op.
real_v = with_dependencies([v], self._pivot)
elif v.name not in self._values:
# Handle the special case of lambda: x
self._values.add(v.name)
if self._outer_context:
real_v = self._outer_context.AddValue(v)
self._values.add(real_v.name)
real_v = _SwitchRefOrTensor(real_v, self._pred)[self._branch]
self._external_values[v.name] = real_v
else:
external_v = self._external_values.get(v.name)
if external_v is not None:
real_v = external_v
result.append(real_v)
return result
def cond(pred, fn1, fn2, name=None):
"""Return either fn1() or fn2() based on the boolean predicate `pred`.
`fn1` and `fn2` both return lists of output tensors. `fn1` and `fn2` must have
the same non-zero number and type of outputs.
Args:
pred: A scalar determining whether to return the result of `fn1` or `fn2`.
fn1: The function to be performed if pred is true.
fn2: The function to be performed if pref is false.
name: Optional name prefix for the returned tensors.
Returns:
Tensors returned by the call to either `fn1` or `fn2`. If the functions
return a singleton list, the element is extracted from the list.
Raises:
TypeError: if `fn1` or `fn2` is not callable.
ValueError: if `fn1` and `fn2` do not return the same number of tensors, or
return tensors of different types.
Example:
```python
x = tf.constant(2)
y = tf.constant(5)
def f1(): return tf.mul(x, 17)
def f2(): return tf.add(y, 23)
r = cond(math_ops.less(x, y), f1, f2)
# r is set to f1().
# Operations in f2 (e.g., tf.add) are not executed.
```
"""
with ops.op_scope([pred], name, "cond") as name:
if not callable(fn1):
raise TypeError("fn1 must be callable.")
if not callable(fn2):
raise TypeError("fn2 must be callable.")
# Add the Switch to the graph.
if isinstance(pred, bool):
raise TypeError("pred must not be a Python bool")
p_2, p_1 = switch(pred, pred)
pivot_1 = array_ops.identity(p_1, name="switch_t")
pivot_2 = array_ops.identity(p_2, name="switch_f")
pred = array_ops.identity(pred, name="pred_id")
# Build the graph for the true branch in a new context.
context_t = CondContext(pred, pivot_1, 1)
context_t.Enter()
res_t = context_t.BuildCondBranch(fn1)
context_t.ExitResult(res_t)
context_t.Exit()
# Build the graph for the false branch in a new context.
context_f = CondContext(pred, pivot_2, 0)
context_f.Enter()
res_f = context_f.BuildCondBranch(fn2)
context_f.ExitResult(res_f)
context_f.Exit()
# Add the final merge to the graph.
if len(res_t) != len(res_f):
raise ValueError("fn1 and fn2 must return the same number of results.")
if not res_t:
raise ValueError("fn1 and fn2 must return at least one result.")
for x, y in zip(res_f, res_t):
assert ((isinstance(x, ops.IndexedSlices) and
isinstance(y, ops.IndexedSlices)) or
(isinstance(x, ops.Tensor) and isinstance(y, ops.Tensor)))
val_x = x if isinstance(x, ops.Tensor) else x.values
val_y = y if isinstance(y, ops.Tensor) else y.values
if val_x.dtype.base_dtype != val_y.dtype.base_dtype:
raise ValueError("Outputs of fn1 and fn2 must have the same type: "
"%s, %s" % (val_x.dtype.name, val_y.dtype.name))
merges = [merge([x[0], x[1]])[0] for x in zip(res_f, res_t)]
return merges[0] if len(merges) == 1 else merges
# TODO(yuanbyu): Consider having a unified notion of context for
# not only conditionals and loops but also control dependency and
# subgraphs.
class WhileContext(ControlFlowContext):
"""The context for the loop construct."""
def __init__(self, parallel_iterations, back_prop, swap_memory, name):
ControlFlowContext.__init__(self)
self._name = ops.get_default_graph().unique_name(name)
self._parallel_iterations = parallel_iterations
self._back_prop = back_prop
self._swap_memory = swap_memory
# We use this node to control constants created by the pred lambda.
self._pivot_for_pred = None
# We use this node to control constants created by the body lambda.
self._pivot_for_body = None
# The boolean tensor for loop termination condition. Used in code
# generation for gradient computation
self._pivot = None
# The list of exit tensors for loop variables.
self._loop_exits = None
@property
def name(self):
return self._name
@property
def parallel_iterations(self):
"""The number of iterations allowed to run in parallel."""
return self._parallel_iterations
@property
def back_prop(self):
"""True iff backprop is enabled for this While loop."""
return self._back_prop
@property
def swap_memory(self):
"""True iff GPU-CPU memory swap is enabled for this While loop."""
return self._swap_memory
@property
def pivot(self):
"""The boolean tensor representing the loop termination condition."""
return self._pivot
@property
def loop_exits(self):
"""The list of exit tensors for loop variables."""
return self._loop_exits
def GetWhileContext(self):
return self
def GetControlPivot(self):
if self._pivot_for_body:
return self._pivot_for_body
return self._pivot_for_pred
def AddValue(self, val):
"""Add `val` to the current context and its outer context recursively."""
result = val
if val.name not in self._values:
self._values.add(val.name)
if self._outer_context is not None:
result = self._outer_context.AddValue(val)
# Create an Enter to make `result` known to this loop context.
with ops.control_dependencies(None):
enter = _Enter(result, self._name, is_constant=True,
parallel_iterations=self._parallel_iterations)
# pylint: disable=protected-access
enter.op._set_control_flow_context(self)
# pylint: enable=protected-access
# Add `enter` in this context.
self._values.add(enter.name)
self._external_values[val.name] = enter
result = enter
else:
actual_val = self._external_values.get(val.name)
if actual_val is not None:
result = actual_val
return result
def AddOp(self, op):
"""Adds `op` to the current context."""
if not op.inputs:
if not op.control_inputs:
# Add a control edge from the control pivot to this op.
# pylint: disable=protected-access
op._add_control_input(self.GetControlPivot().op)
# pylint: enable=protected-access
else:
# Control edges must be in the same context.
for x in op.control_inputs:
assert x._get_control_flow_context() == self, (
"Control inputs must come from Operations in the same while "
"loop context (not an outer context).")
for x in op.outputs:
self._values.add(x.name)
else:
for index in range(len(op.inputs)):
x = op.inputs[index]
self.AddValue(x)
real_x = self._external_values.get(x.name)
if real_x is not None:
op._update_input(index, real_x)
# Add a control dependency to prevent loop invariants from
# enabling ops that should not be executed.
if real_x.op.type == "RefEnter" and real_x.op.get_attr("is_constant"):
# pylint: disable=protected-access
op._add_control_input(self.GetControlPivot().op)
# pylint: enable=protected-access
for x in op.outputs:
self._values.add(x.name)
def AddForwardCounter(self):
"""Adds a loop that counts the number of iterations.
This is added to the forward loop at the time when we start to
create the loop for backprop gradient computation. Called in
the outer context of this forward context.
The pseudocode is:
`n = 0; while (_pivot) { n++; }`
Returns:
The number of iterations taken by the forward loop and the loop index.
"""
n = constant_op.constant(0, name="f_count")
assert n.op._get_control_flow_context() == self.outer_context
self.Enter()
self.AddName(n.name)
enter_n = _Enter(n, self._name, is_constant=False,
parallel_iterations=self._parallel_iterations,
name="f_count")
merge_n = merge([enter_n, enter_n])[0]
switch_n = switch(merge_n, self._pivot)
index = math_ops.add(switch_n[1], 1)
next_n = _NextIteration(index)
merge_n.op._update_input(1, next_n)
total_iterations = exit(switch_n[0], name="f_count")
self.ExitResult([total_iterations])
self.Exit()
return total_iterations, next_n
def AddBackPropCounter(self, count):
"""Add the backprop loop that controls the iterations.
This is added to the backprop loop. It is used to control the loop
termination of the backprop loop. Called in the outer context of
this grad context.
The pseudocode is:
`n = count; while (n >= 1) { n--; }`
Args:
count: The number of iterations for backprop.
Returns:
The loop index.
"""
one = constant_op.constant(1, name="b_count")
self.Enter()
self.AddName(count.name)
enter_count = _Enter(count, self._name, is_constant=False,
parallel_iterations=self._parallel_iterations,
name="b_count")
merge_count = merge([enter_count, enter_count])[0]
self._pivot_for_pred = merge_count
cond = math_ops.greater_equal(merge_count, one)
self._pivot = loop_cond(cond, name="b_count")
switch_count = switch(merge_count, self._pivot)
index = math_ops.sub(switch_count[1], one)
self._pivot_for_body = index
next_count = _NextIteration(index)
merge_count.op._update_input(1, next_count)
self.Exit()
return next_count
def AddBackPropAccumulator(self, value):
"""Add an accumulation loop for every loop invariant.
This is added to the backprop loop. It is used to accumulate
partial gradients within each loop iteration. Called when in the
gradient while context.
The pseudocode is:
```
acc = 0.0;
while (_pivot) {
acc += value;
}
```
Args:
value: The partial gradient of an iteration for a loop invariant.
Returns:
The gradient for a loop invariant.
"""
self.Exit()
shape = value.get_shape()
if not shape.is_fully_defined():
shape = None
if self.outer_context: self.outer_context.Enter()
acc = constant_op.constant(0, value.dtype, shape=shape, name="b_acc")
if not shape:
acc._shape = value.get_shape() # pylint: disable=protected-access
if self.outer_context: self.outer_context.Exit()
self.Enter()
self.AddName(acc.name)
enter_acc = _Enter(acc, self._name, is_constant=False,
parallel_iterations=self._parallel_iterations,
name="b_acc")
merge_acc = merge([enter_acc, enter_acc], name="b_acc")[0]
switch_acc = switch(merge_acc, self._pivot)
add_acc = math_ops.add(switch_acc[1], value)
next_acc = _NextIteration(add_acc)
merge_acc.op._update_input(1, next_acc)
acc_result = exit(switch_acc[0], name="b_acc")
self.ExitResult([acc_result])
return acc_result
def BuildLoop(self, pred, body, loop_vars):
"""Add the loop termination condition and body to the graph."""
# Keep original_loop_vars to identify which are TensorArrays
original_loop_vars = loop_vars
# Connvert TensorArrays to their flow variables
loop_vars = _convert_tensorarrays_to_flows(loop_vars)
loop_vars = ops.convert_n_to_tensor_or_indexed_slices(loop_vars)
# Let the context know the loop variabes so the loop variables
# would be added in the outer contexts properly.
self._values = set([x.name for x in loop_vars])
real_vars = loop_vars
if self._outer_context:
real_vars = [self._outer_context.AddValue(x) for x in loop_vars]
with ops.control_dependencies(None):
enter_vars = [_Enter(x, self._name, is_constant=False,
parallel_iterations=self._parallel_iterations)
for x in real_vars]
for x in enter_vars:
x.op._set_control_flow_context(self) # pylint: disable=protected-access
self._values = set([x.name for x in enter_vars])
merge_vars = [merge([x, x])[0] for x in enter_vars]
self._pivot_for_pred = merge_vars[0]
# Build the graph for pred.
merge_vars_with_tensor_arrays = (
_convert_flows_to_tensorarrays(original_loop_vars, merge_vars))
c = ops.convert_to_tensor(pred(*merge_vars_with_tensor_arrays))
self._pivot = loop_cond(c, name="LoopCond")
switch_vars = [_SwitchRefOrTensor(x, self._pivot) for x in merge_vars]
# Build the graph for body.
vars_for_body = [_Identity(x[1]) for x in switch_vars]
self._pivot_for_body = vars_for_body[0]
# Convert TensorArray flow variables inside the context back into
# their associated TensorArrays for calling the body.
vars_for_body_with_tensor_arrays = (
_convert_flows_to_tensorarrays(original_loop_vars, vars_for_body))
body_result = body(*vars_for_body_with_tensor_arrays)
if not isinstance(body_result, collections.Sequence):
body_result = [body_result]
# Store body_result to keep track of TensorArrays returned by body
original_body_result = body_result
# Convert TensorArrays returned by body into their flow variables
result = _convert_tensorarrays_to_flows(body_result)
result = ops.convert_n_to_tensor_or_indexed_slices(result)
next_vars = [_NextIteration(x) for x in result]
# Add the back edges to complete the loop.
assert len(merge_vars) == len(next_vars)
for x in zip(merge_vars, next_vars):
x[0].op._update_input(1, x[1])
# Add the exit ops.
exit_vars = [exit(x[0]) for x in switch_vars]
self._loop_exits = exit_vars
for m_var, n_var, e_var in zip(merge_vars, next_vars, exit_vars):
if m_var.get_shape().is_compatible_with(n_var.get_shape()):
e_var.set_shape(m_var.get_shape().merge_with(n_var.get_shape()))
# Exit the loop.
self.ExitResult(exit_vars)
# Convert TensorArray flow variables outside the context back into
# their associated TensorArrays for returning to caller.
exit_vars_with_tensor_arrays = (
_convert_flows_to_tensorarrays(original_body_result, exit_vars))
return (exit_vars_with_tensor_arrays[0]
if len(exit_vars) == 1
else exit_vars_with_tensor_arrays)
def While(cond, body, loop_vars, parallel_iterations=10, back_prop=True,
swap_memory=False, name=None):
"""Repeat `body` while the condition `cond` is true.
`cond` is a function taking a list of tensors and returning a boolean scalar
tensor. `body` is a function taking a list of tensors and returning a list of
tensors of the same length and with the same types as the input. `loop_vars`
is a list of tensors that is passed to both `cond` and `body`.
In addition to regular Tensors or IndexedSlices, the body may accept and
return TensorArray objects. The flows of the TensorArray objects will
be appropriately forwarded between loops and during gradient calculations.
While `cond` evaluates to true, `body` is executed.
Args:
cond: The termination condition of the loop.
body: A function that represents the loop body.
loop_vars: The list of variable input tensors.
parallel_iterations: The number of iterations allowed to run in parallel.
back_prop: Whether backprop is enabled for this while loop.
swap_memory: Whether GPU-CPU memory swap is enabled for this loop.
name: Optional name prefix for the returned tensors.
Returns:
The output tensors for the loop variables after the loop.
Raises:
TypeError: if `cond` or `body` is not callable.
ValueError: if `loop_var` is empty.
Example:
```python
i = constant(0)
c = lambda i: math_ops.less(i, 10)
b = lambda i: math_ops.add(i, 1)
r = While(c, b, [i])
```
"""
with ops.op_scope(loop_vars, name, "While") as name:
if not loop_vars:
raise ValueError("No loop variables provided")
if not callable(cond):
raise TypeError("cond must be callable.")
if not callable(body):
raise TypeError("body must be callable.")
context = WhileContext(parallel_iterations, back_prop, swap_memory, name)
context.Enter()
result = context.BuildLoop(cond, body, loop_vars)
context.Exit()
return result
def _AsTensorList(x, p):
"""Return x as a list of Tensors or IndexedSlices.
For entries of `x` that are Operations, this returns an Identity of `p`
with a dependency on the operation.
Args:
x: A Tensor/IndexedSlices/Operation or a list or tuple of them.
p: A Tensor to return for entries in `x` that are Operations.
Returns:
A list of Tensors or IndexedSlices.
"""
if not isinstance(x, (list, _basetuple)):
x = [x]
l = []
for v in x:
if isinstance(v, ops.Operation):
v = with_dependencies([v], p)
v = ops.convert_to_tensor_or_indexed_slices(v)
if isinstance(v, ops.Tensor):
l.append(array_ops.identity(v))
else:
l.append(ops.IndexedSlices(array_ops.identity(v.values),
array_ops.identity(v.indices)))
return l
def _CheckResults(a, b):
assert len(a) == len(b), (
"Values returned by a() and b() must have the same length.")
for x, y in zip(a, b):
assert x.dtype == y.dtype, (
"Values returned by a() [%s] and b() [%s] must have "
"the same type: %s, %s." %
(x.name, y.name, x.dtype.name, y.dtype.name))
def with_dependencies(dependencies, output_tensor, name=None):
"""Produces the content of `output_tensor` only after `dependencies`.
In some cases, a user may want the output of an operation to be
consumed externally only after some other dependencies have run
first. This function ensures returns `output_tensor`, but only after all
operations in `dependencies` have run. Note that this means that there is
no guarantee that `output_tensor` will be evaluated after any `dependencies`
have run.
See also `tuple` and `group`.
Args:
dependencies: A list of operations to run before this op finishes.
output_tensor: A `Tensor` or `IndexedSlices` that will be returned.
name: (Optional) A name for this operation.
Returns:
Same as `output_tensor`.
Raises:
TypeError: if `output_tensor` is not a `Tensor` or `IndexedSlices`.
"""
with ops.op_scope(dependencies + [output_tensor], name,
"control_dependency") as name:
with ops.colocate_with(output_tensor):
with ops.control_dependencies(dependencies):
output_tensor = ops.convert_to_tensor_or_indexed_slices(output_tensor)
if isinstance(output_tensor, ops.Tensor):
return _Identity(output_tensor, name=name)
else:
return ops.IndexedSlices(_Identity(output_tensor.values, name=name),
output_tensor.indices,
output_tensor.dense_shape)
def _GroupControlDeps(dev, deps, name=None):
with ops.control_dependencies(deps):
if dev is None:
return no_op(name=name)
else:
with ops.device(dev):
return no_op(name=name)
# TODO(touts): Accept "inputs" as a list.
def group(*inputs, **kwargs):
"""Create an op that groups multiple operations.
When this op finishes, all ops in `input` have finished. This op has no
output.
See also `tuple` and `with_dependencies`.
Args:
*inputs: One or more tensors to group.
**kwargs: Optional parameters to pass when constructing the NodeDef.
name: A name for this operation (optional).
Returns:
An Operation that executes all its inputs.
Raises:
ValueError: If an unknown keyword argument is provided, or if there are
no inputs.
"""
name = kwargs.pop("name", None)
if kwargs:
raise ValueError("Unknown keyword arguments: " + ", ".join(kwargs.keys()))
if not inputs:
# TODO(touts): Would make sense to return a NoOp.
raise ValueError("No inputs provided")
with ops.op_scope(inputs, name, "group_deps") as name:
# Sorts *inputs according to their devices.
ops_on_device = {} # device -> operations specified on the device.
for inp in inputs:
dev = inp.device
if dev in ops_on_device:
ops_on_device[dev].append(inp)
else:
ops_on_device[dev] = [inp]
if len(ops_on_device) == 1:
# 1-level tree. The root node is the returned NoOp node.
(dev, deps), = ops_on_device.items()
return _GroupControlDeps(dev, deps, name=name)
# 2-level tree. The root node is the returned NoOp node.
# deps contains 1 NoOp node for each device.
deps = []
def device_key(dev):
"""A sort key that allows None to be compared to strings."""
return "" if dev is None else dev
for dev in sorted(six.iterkeys(ops_on_device), key=device_key):
deps.append(_GroupControlDeps(dev, ops_on_device[dev]))
with ops.control_dependencies(deps):
return no_op(name=name)
def tuple(tensors, name=None, control_inputs=None):
"""Group tensors together.
This creates a tuple of tensors with the same values as the `tensors`
argument, except that the value of each tensor is only returned after the
values of all tensors have been computed.
`control_inputs` contains additional ops that have to finish before this op
finishes, but whose outputs are not returned.
This can be used as a "join" mechanism for parallel computations: all the
argument tensors can be computed in parallel, but the values of any tensor
returned by `tuple` are only available after all the parallel computations
are done.
See also `group` and `with_dependencies`.
Args:
tensors: A list of `Tensor`s or `IndexedSlices`, some entries can be `None`.
name: (optional) A name to use as a `name_scope` for the operation.
control_inputs: List of additional ops to finish before returning.
Returns:
Same as `tensors`.
Raises:
ValueError: If `tensors` does not contain any `Tensor` or `IndexedSlices`.
TypeError: If `control_inputs` is not a list of `Operation` or `Tensor`
objects.
"""
with ops.op_scope(tensors, name, "tuple") as name:
gating_ops = [t.op for t in tensors if t]
if control_inputs:
for c in control_inputs:
if isinstance(c, ops.Tensor):
c = c.op
elif not isinstance(c, ops.Operation):
raise TypeError("Control input must be Operation or Tensor: %s" % c)
gating_ops.append(c)
# Note that in order to ensure ordering in the pbtxt, we must take care to
# ensure the order here.
gating_ops = sorted(set(gating_ops), key=lambda op: op._id) # Uniquify ops.
if not gating_ops:
raise ValueError("Must have at least one Tensor: %s" % tensors)
gate = group(*gating_ops)
tpl = []
for t in tensors:
if t:
tpl.append(with_dependencies([gate], t))
else:
tpl.append(None)
return tpl
# TODO(yuanbyu, mrry): Handle stride to support sliding windows.
def foldl(fn, elems, initializer=None, name=None):
"""The foldl operator on the unpacked tensors of a tensor.
This foldl operator applies the function `fn` to a sequence of elements
from left to right. The elements are made of the tensors unpacked from
`elems`. If `initializer` is None, `elems` must contain at least one
element.
Args:
fn: The function to be performed.
elems: A tensor to be unpacked.
initializer: (optional) The initial value for the accumulator.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor resulting from applying `fn` consecutively on each
element/slice of `elems`, from left to right.
Raises:
TypeError: if `fn` is not callable.
Example:
```python
elems = [1, 2, 3, 4, 5, 6]
sum = foldl(lambda a, x: a + x, elems)
```
"""
with ops.op_scope([elems], name, "foldl") as name:
if not callable(fn):
raise TypeError("fn must be callable.")
# Convert elems to tensor array.
n = array_ops.shape(elems)[0]
elems_ta = tensor_array_ops.TensorArray(dtype=elems.dtype, size=n,
dynamic_size=False)
elems_ta = elems_ta.unpack(elems)
if initializer is None:
a = elems_ta.read(0)
i = constant_op.constant(1)
else:
a = ops.convert_to_tensor(initializer)
i = constant_op.constant(0)
def compute(i, a):
a = fn(a, elems_ta.read(i))
return [i + 1, a]
_, r_a = While(lambda i, a: i < n, compute, [i, a])
return r_a
def foldr(fn, elems, initializer=None, name=None):
"""The foldr operator operator on the unpacked tensors of a tensor.
This foldr operator applies the function `fn` to a sequence of elements
from right to left. The elements are made of the tensors unpacked from
`elems`. If `initializer` is None, `elems` must contain at least one
element.
Args:
fn: The function to be performed.
elems: A tensor that is unpacked into a sequence of tensors to apply `fn`.
initializer: (optional) The initial value for the accumulator.
use_tensor_array: (optional) use tensor_array if true.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor resulting from applying `fn` consecutively on each
element/slice of `elems`, from right to left.
Raises:
TypeError: if `fn` is not callable.
Example:
```python
elems = [1, 2, 3, 4, 5, 6]
sum = foldr(lambda a, x: a + x, elems)
```
"""
with ops.op_scope([elems], name, "foldr") as name:
if not callable(fn):
raise TypeError("fn must be callable.")
# Convert elems to tensor array.
n = array_ops.shape(elems)[0]
elems_ta = tensor_array_ops.TensorArray(dtype=elems.dtype, size=n,
dynamic_size=False)
elems_ta = elems_ta.unpack(elems)
if initializer is None:
i = n - 1
a = elems_ta.read(i)
else:
i = n
a = ops.convert_to_tensor(initializer)
def compute(i, a):
i -= 1
a = fn(a, elems_ta.read(i))
return [i, a]
_, r_a = While(lambda i, a: i > 0, compute, [i, a])
return r_a
def map_fn(fn, elems, dtype=None, name=None):
"""The map operator on the unpacked tensors of a tensor.
This map operator applies the function `fn` to a sequence of elements
from right to left. The elements are made of the tensors unpacked from
`elems`.
Args:
fn: The function to be performed.
elems: A tensor to be unpacked to apply `fn`.
dtype: (optional) The output type of `fn`.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor that packs the results of applying `fn` on each element
of `elems`.
Raises:
TypeError: if `fn` is not callable.
Example:
```python
elems = [1, 2, 3, 4, 5, 6]
squares = map_fn(lambda x: x * x, elems)
```
"""
with ops.op_scope([elems], name, "map") as name:
if not callable(fn):
raise TypeError("fn must be callable.")
dtype = dtype if dtype else elems.dtype
# Convert elems to tensor array.
n = array_ops.shape(elems)[0]
elems_ta = tensor_array_ops.TensorArray(dtype=elems.dtype, size=n,
dynamic_size=False)
elems_ta = elems_ta.unpack(elems)
i = constant_op.constant(0)
acc_ta = tensor_array_ops.TensorArray(dtype=dtype, size=n,
dynamic_size=False)
def compute(i, a):
a = a.write(i, fn(elems_ta.read(i)))
i = math_ops.add(i, 1)
return [i, a]
_, r_a = While(lambda i, a: math_ops.less(i, n), compute, [i, acc_ta])
return r_a.pack()
def case(pred_fn_pairs, default, exclusive=False, name="case"):
"""Create a case operation.
The `pred_fn_pairs` parameter is a dict or list of pairs of size N.
Each pair contains a boolean scalar tensor and a python callable that
creates the tensors to be returned if the boolean evaluates to True. `default`
is a callable generating a list of tensors. All the callables in
`pred_fn_pairs` as well as `default` should return the same number and types
of tensors.
If `exclusive==True`, all predicates are evaluated, and a logging operation
with an error is returned if more than one of the predicates evaluates to
True. If `exclusive==False`, execution stops are the first predicate which
evaluates to True, and the tensors generated by the corresponding function
are returned immediately. If none of the predicates evaluate to True, this
operation returns the tensors generated by `default`.
Example 1:
Pseudocode:
```
if (x < y) return 17;
else return 23;
```
Expressions:
```
f1 = lambda: tf.constant(17)
f2 = lambda: tf.constant(23)
r = case([(tf.less(x, y), f1)], default=f2)
```
Example 2:
Pseudocode:
```
if (x < y && x > z) raise OpError("Only one predicate may evaluate true");
if (x < y) return 17;
else if (x > z) return 23;
else return -1;
```
Expressions:
```
x = tf.constant(0)
y = tf.constant(1)
z = tf.constant(2)
def f1(): return tf.constant(17)
def f2(): return tf.constant(23)
def f3(): return tf.constant(-1)
r = case({tf.less(x, y): f1, tf.greater(x, z): f2},
default=f3, exclusive=True)
```
Args:
pred_fn_pairs: Dict or list of pairs of a boolean scalar tensor and a
callable which returns a list of tensors.
default: A callable that returns a list of tensors.
exclusive: True iff more than one predicate is allowed to evaluate to True.
name: A name for this operation (optional).
Returns:
The tensors returned by the first pair whose predicate evaluated to True, or
those returned by `default` if none does.
Raises:
TypeError: If `pred_fn_pairs` is not a list/dictionary.
TypeError: If `pred_fn_pairs` is a list but does not contain 2-tuples.
TypeError: If `fns[i]` is not callable for any i, or `default` is not
callable.
"""
pfp = pred_fn_pairs # For readability
if not (isinstance(pfp, list) or isinstance(pfp, _basetuple)
or isinstance(pfp, dict)):
raise TypeError("fns must be a list, tuple, or dict")
if isinstance(pfp, dict):
pfp = pfp.items()
if not exclusive:
logging.warn("%s: Provided dictionary of predicate/fn pairs, but "
"exclusive=False. Order of conditional tests is "
"not guaranteed.", name)
for tup in pfp:
if not isinstance(tup, _basetuple) or len(tup) != 2:
raise TypeError("Each entry in pred_fn_pairs must be a 2-tuple")
pred, fn = tup
if pred.dtype != dtypes.bool:
raise TypeError("pred must be of type bool: %s", pred.name)
if not callable(fn):
raise TypeError("fn for pred %s must be callable." % pred.name)
if not callable(default):
raise TypeError("default must be callable.")
preds, fns = map(list, zip(*pfp))
with ops.op_scope([preds], name, "case"):
if not preds:
return default()
not_preds = []
for i, p in enumerate(preds):
with ops.name_scope("not_%d" % i):
not_preds.append(math_ops.logical_not(p))
and_not_preds = [constant_op.constant(True, name="and_not_true")]
for i, notp in enumerate(not_preds[:-1]):
with ops.name_scope("and_not_%d" % i):
and_not_preds.append(math_ops.logical_and(and_not_preds[-1], notp))
# preds = [p1, p2, p3]
# fns = [f1, f2, f3]
# not_preds = [~p1, ~p2, ~p3]
# case_preds = [p1 & True,
# p2 & ~p1,
# p3 & ~p1 & ~ p2]
case_preds = []
for i, (p, and_not_p_prev) in enumerate(zip(preds, and_not_preds)):
with ops.name_scope("case_%d" % i):
case_preds.append(math_ops.logical_and(p, and_not_p_prev))
# case_sequence = [cond(p3 & ..., f3, default),
# cond(p2 & ..., f2, lambda: case_sequence[0]),
# ...
# cond(p1 & True, f1, lambda: case_sequence[i-1])]
# and prev_case_seq will loop from case_sequence[0] to case_sequence[-1]
if exclusive:
# TODO(ebrevdo): Add Where() for DT_BOOL, replace with Size(Where(preds))
preds_c = array_ops.pack(preds, name="preds_c")
num_true_conditions = math_ops.reduce_sum(
math_ops.cast(preds_c, dtypes.int32), name="num_true_conds")
at_most_one_true_condition = math_ops.less(
num_true_conditions, constant_op.constant(2, name="two_true_conds"))
error_msg = [
("More than one condition evaluated as True but "
"exclusive=True. Conditions: (%s), Values:"
% ", ".join([p.name for p in preds])),
preds_c]
with ops.control_dependencies([
logging_ops.Assert(condition=at_most_one_true_condition,
data=error_msg, summarize=len(preds))]):
prev_case_seq = None
for i, (cp, fn) in enumerate(list(zip(case_preds, fns))[::-1]):
prev_case_seq = cond(
cp, fn,
default if i == 0 else lambda: prev_case_seq,
name="If_%d" % i)
else:
prev_case_seq = None
for i, (cp, fn) in enumerate(list(zip(case_preds, fns))[::-1]):
prev_case_seq = cond(
cp, fn,
default if i == 0 else lambda: prev_case_seq,
name="If_%d" % i)
return prev_case_seq
ops.RegisterShape("Enter")(common_shapes.unchanged_shape)
ops.RegisterShape("Exit")(common_shapes.unchanged_shape)
ops.RegisterShape("NextIteration")(common_shapes.unchanged_shape)
ops.RegisterShape("RefEnter")(common_shapes.unchanged_shape)
ops.RegisterShape("RefExit")(common_shapes.unchanged_shape)
ops.RegisterShape("RefNextIteration")(common_shapes.unchanged_shape)
ops.RegisterShape("ControlTrigger")(common_shapes.no_outputs)
ops.RegisterShape("NoOp")(common_shapes.no_outputs)
@ops.RegisterShape("LoopCond")
def _LoopCondShape(op):
"""Shape function for the LoopCond op."""
return [op.inputs[0].get_shape().merge_with(tensor_shape.scalar())]
@ops.RegisterShape("Merge")
def _MergeShape(op):
"""Shape function for the Merge op.
The Merge op takes many inputs of arbitrary shapes, and produces a
first output that is one of those inputs, and a second scalar
output.
If all input shapes are known and have the same rank, the output
shape must have that rank, otherwise the output shape is unknown.
Each output dimension is specified only if that dimension in all
inputs are the same.
Args:
op: A Merge Operation.
Returns:
A single-element list containing the Shape of the Merge op.
"""
output_shape = op.inputs[0].get_shape()
if output_shape.dims is None:
return [tensor_shape.unknown_shape(), tensor_shape.scalar()]
else:
for input_ in op.inputs[1:]:
input_shape = input_.get_shape()
if input_shape.dims is None or input_shape.ndims != output_shape.ndims:
return [tensor_shape.unknown_shape(), tensor_shape.scalar()]
else:
output_shape = tensor_shape.TensorShape(
[input_dim.value if input_dim.value == output_dim.value else None
for input_dim, output_dim in zip(input_shape.dims,
output_shape.dims)])
return [output_shape, tensor_shape.scalar()]
ops.RegisterShape("RefMerge")(_MergeShape)
@ops.RegisterShape("RefSelect")
def _RefSelectShape(op):
"""Shape function for the RefSelect op.
The RefSelect takes one scalar input and N inputs of arbitrary
shapes, and produces one output, which is one of those N inputs.
This function conservatively assumes that if any of the N inputs is
not fully defined, the output shape is unknown. If all of the N
inputs have the exact same known shape, the output must have that
shape.
Args:
op: A RefSelect Operation.
Returns:
A single-element list containing the Shape of the RefSelect op.
"""
unused_shape = op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
first_input_shape = op.inputs[1].get_shape()
if first_input_shape.is_fully_defined():
for input_ in op.inputs[2:]:
input_shape = input_.get_shape()
if (not input_shape.is_fully_defined()
or not input_shape.is_compatible_with(first_input_shape)):
return [tensor_shape.unknown_shape()]
return [first_input_shape]
else:
return [tensor_shape.unknown_shape()]
@ops.RegisterShape("RefSwitch")
@ops.RegisterShape("Switch")
def _SwitchShape(op):
input_shape = op.inputs[0].get_shape()
unused_pred_shape = op.inputs[1].get_shape().merge_with(tensor_shape.scalar())
return [input_shape] * 2
|
4Quant/tensorflow
|
tensorflow/python/ops/control_flow_ops.py
|
Python
|
apache-2.0
| 76,917
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import cgi
import sqlite3, re, string, codecs, os
def cercaurbano(cNominativo):
c = sqlite3.connect('./data/catasto.db')
cur = c.cursor()
cSele = "select distinct (id_i.foglio || '-' || id_i.numero ||'-'|| id_i.subalterno), \
'<a href=\"http://nominatim.openstreetmap.org/search/' \
|| top.decodifica || ' ' || ind.indirizzo || ' ' || ltrim(ind.civico1, '0') || ',16011 Arenzano\" target=\"_blank\">W_osm </a>', \
'<a href=\"../osmlocation/dettaglio-mappa.htm?location=' \
|| top.decodifica || ' ' || ind.indirizzo || ' ' || ltrim(ind.civico1, '0') || ',16011 Arenzano\" target=\"_blank\"> L_osm </a>', \
id_i.foglio, id_i.numero, id_i.subalterno, id_i.progr, \
ui.categoria, ui.classe, ui.renditaEuro, (top.decodifica || ' ' || ind.indirizzo || ' ' || ind.civico1), \
giu.denominazione, per.cognome, per.nome, per.DataNascita \
from identificativi_immobiliari as id_i \
left join indirizzi as ind On id_i.idImmobile = ind.idImmobile \
left join titolarita as tit On id_i.idImmobile = tit.idImmobile \
left join persona_fisica as per On tit.idSoggetto = per.idSoggetto \
left join persona_giuridica as giu On tit.idSoggetto = giu.idSoggetto \
left join unita_immobiliari as ui on tit.idImmobile = ui.idImmobile \
left join cod_toponimo as top on ind.toponimo = top.codice \
where trim(per.cognome) || ' ' || trim(per.nome) like '%" + cNominativo + "%' or giu.denominazione like '%" + cNominativo + "%' group by id_i.foglio, id_i.numero, id_i.subalterno order by id_i.foglio, id_i.numero, id_i.subalterno, id_i.progr desc"
#print cSele
cur.execute(cSele)
retrows = cur.fetchall()
table = "<table>"
table += "<tr>"
table += "<th>fog-map-sub</th><th>nominatim</th><th>loc_via_norm</th>"
table += "<th>fog</th><th>map</th><th>sub</th><th>progr</th><th>cat</th>"
table += "<th>cla</th><<th>rend</th><th>Indirizzo</th><th>Cognome</th><th>Nome</th><th>data_nascita</th>"
table +="</tr>"
for row in retrows:
totcol=len(row)
table += "<tr>"
for col in range(0,totcol):
table += "<td>" + str(row[col]) + "</td>"
table += "</tr>"
table += "</table>"
print table
return ""
def main():
parametri = cgi.FieldStorage()
print "Content-Type: text/html" # HTML is following
print # blank line, end of headers
print '<html>'
print '<head>'
print '<style>'
print 'body {background-color: #ccff66;font-family: Arial, Verdana, sans-serif;font-size: 12px;color: #000000;}'
print 'table {background-color: #ccff66;font-family: Arial, Verdana, sans-serif;font-size: 14px;color: #000000;}'
print 'table {border-collapse: collapse;}'
print 'table, th, td { border: 1px solid gray; }'
print '</style>'
print '</head>'
print '<body>'
glofile='./data/catasto.db'
mess=''
if not os.path.exists(glofile):
mess+="Manca il file -- " + glofile + '<br>'
glofile='./data/catasto_cart_4326.sqlite'
if not os.path.exists(glofile):
mess+="Manca il file -- " + glofile
if len(mess)>0:
print mess + '<br>'
print '<a href=https://github.com/marcobra/opencatamap/wiki/OpenCataMap>Maggiori dettagli circa i files dati necessari<a>'
return
if (len(parametri) < 1):
print "uso:<br> http://127.0.0.1:8080/cgi-bin/genera_html_su_urbano.py?N=Dam"
print 'Ricerca per parametri -> '
for key in parametri.keys():
print "%s = %s" % (key, parametri[key].value)
cercaurbano(parametri["n"].value)
if __name__ == "__main__":
main()
|
marcobra/opencatamap
|
cgi-bin/genera_html_su_urbano.py
|
Python
|
gpl-3.0
| 3,614
|
from django.core.exceptions import ValidationError
from django.test import TestCase
from squad.core import models
class TestEmailTemplateTest(TestCase):
def setUp(self):
self.email_template = models.EmailTemplate()
self.email_template.name = 'fooTemplate'
self.email_template.plain_text = 'text template'
self.email_template.save()
def test_invalid_template_syntax(self):
emailTemplate = models.EmailTemplate.objects.get(name='fooTemplate')
emailTemplate.subject = 'This is a {{ template'
with self.assertRaises(ValidationError):
emailTemplate.full_clean()
def test_valid_template_syntax(self):
emailTemplate = models.EmailTemplate.objects.get(name='fooTemplate')
emailTemplate.subject = 'This is a {{ template }}'
self.assertEqual(emailTemplate.full_clean(), None)
|
Linaro/squad
|
test/core/test_emailtemplate.py
|
Python
|
agpl-3.0
| 878
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gc
import numpy
import logging
from openfisca_france_data import default_config_files_directory as config_files_directory
from openfisca_france_data.temporary import temporary_store_decorator
from openfisca_france_data.input_data_builders.build_openfisca_survey_data.utils import assert_dtype
from openfisca_france_data.input_data_builders.build_openfisca_survey_data.base import year_specific_by_generic_data_frame_name
from openfisca_survey_manager.survey_collections import SurveyCollection
log = logging.getLogger(__name__)
@temporary_store_decorator(config_files_directory = config_files_directory, file_name = "erfs")
def create_indivim_menagem(temporary_store = None, year = None):
"""
Création des tables ménages et individus concaténée (merged)
"""
# Prepare the some useful merged tables
assert temporary_store is not None
assert year is not None
# load data
erfs_survey_collection = SurveyCollection.load(
collection = 'erfs', config_files_directory = config_files_directory)
year_specific_by_generic = year_specific_by_generic_data_frame_name(year)
survey = erfs_survey_collection.get_survey('erfs_{}'.format(year))
erfmen = survey.get_values(table = year_specific_by_generic["erf_menage"])
eecmen = survey.get_values(table = year_specific_by_generic["eec_menage"])
erfind = survey.get_values(table = year_specific_by_generic["erf_indivi"])
eecind = survey.get_values(table = year_specific_by_generic["eec_indivi"])
# travail sur la cohérence entre les bases
noappar_m = eecmen[~(eecmen.ident.isin(erfmen.ident.values))].copy()
noappar_i = eecmen[~(eecind.ident.isin(erfind.ident.values))].copy()
noappar_i = noappar_i.drop_duplicates(subset = 'ident', take_last = True)
# TODO: vérifier qu'il n'y a théoriquement pas de doublon
difference = set(noappar_i.ident).symmetric_difference(noappar_m.ident)
intersection = set(noappar_i.ident) & set(noappar_m.ident)
log.info("There are {} differences and {} intersections".format(len(difference), len(intersection)))
del noappar_i, noappar_m, difference, intersection
gc.collect()
# fusion enquete emploi et source fiscale
menagem = erfmen.merge(eecmen)
indivim = eecind.merge(erfind, on = ['noindiv', 'ident', 'noi'], how = "inner")
# optimisation des types? Controle de l'existence en passant
# TODO: minimal dtype
# TODO: this should be done somewhere else
var_list = ([
'acteu',
'agepr',
'cohab',
'contra',
'encadr',
'forter',
'lien',
'mrec',
'naia',
'noicon',
'noimer',
'noiper',
'prosa',
'retrai',
'rstg',
'statut',
'stc',
'titc',
'txtppb',
])
for var in var_list:
assert numpy.issubdtype(indivim[var].dtype , numpy.integer), "Variable {} dtype is {} and should be an integer".format(
var, indivim[var].dtype)
########################
# création de variables#
########################
# print indivim
# actrec : activité recodée comme preconisé par l'INSEE p84 du guide utilisateur
indivim["actrec"] = numpy.nan
# Attention : Q: pas de 6 ?!! A : Non pas de 6, la variable recodée de l'INSEE (voit p84 du guide methodo), ici \
# la même nomenclature à été adopée
# 3: contrat a durée déterminée
indivim.actrec.loc[indivim.acteu == 1] = 3
# 8 : femme (homme) au foyer, autre inactif
indivim.actrec.loc[indivim.acteu == 3] = 8
# 1 : actif occupé non salarié
filter1 = (indivim.acteu == 1) & (indivim.stc.isin([1, 3])) # actifs occupés non salariés à son compte ou pour un
indivim.actrec.loc[filter1] = 1 # membre de sa famille
# 2 : salarié pour une durée non limitée
filter2 = (indivim.acteu == 1) & (((indivim.stc == 2) & (indivim.contra == 1)) | (indivim.titc == 2))
indivim.actrec.loc[filter2] = 2
# 4 : au chomage
filter4 = (indivim.acteu == 2) | ((indivim.acteu == 3) & (indivim.mrec == 1))
indivim.actrec.loc[filter4] = 4
# 5 : élève étudiant , stagiaire non rémunéré
filter5 = (indivim.acteu == 3) & ((indivim.forter == 2) | (indivim.rstg == 1))
indivim.actrec.loc[filter5] = 5
# 7 : retraité, préretraité, retiré des affaires unchecked
filter7 = (indivim.acteu == 3) & ((indivim.retrai == 1) | (indivim.retrai == 2))
indivim.actrec.loc[filter7] = 7
# 9 : probablement enfants de - de 16 ans TODO: check that fact in database and questionnaire
indivim.actrec.loc[indivim.acteu == 0] = 9
indivim.actrec = indivim.actrec.astype("int8")
assert_dtype(indivim.actrec, "int8")
assert indivim.actrec.isin(range(1, 10)).all(), 'actrec values are outside the interval [1, 9]'
# TODO : compare the result with results provided by Insee
# tu99
if year == 2009:
erfind['tu99'] = None # TODO: why ?
# Locataire
menagem["locataire"] = menagem.so.isin([3, 4, 5])
assert_dtype(menagem.locataire, "bool")
transfert = indivim.loc[indivim.lpr == 1, ['ident', 'ddipl']].copy()
menagem = menagem.merge(transfert)
# Correction
def _manually_remove_errors():
'''
This method is here because some oddities can make it through the controls throughout the procedure
It is here to remove all these individual errors that compromise the process.
'''
if year == 2006:
indivim.lien[indivim.noindiv == 603018905] = 2
indivim.noimer[indivim.noindiv == 603018905] = 1
log.info("{}".format(indivim[indivim.noindiv == 603018905].to_string()))
_manually_remove_errors()
temporary_store['menagem_{}'.format(year)] = menagem
del eecmen, erfmen, menagem, transfert
gc.collect()
temporary_store['indivim_{}'.format(year)] = indivim
del erfind, eecind
@temporary_store_decorator(config_files_directory = config_files_directory, file_name = "erfs")
def create_enfants_a_naitre(temporary_store = None, year = None):
'''
'''
assert temporary_store is not None
assert year is not None
erfs_survey_collection = SurveyCollection.load(
collection = 'erfs', config_files_directory = config_files_directory)
survey = erfs_survey_collection.get_survey('erfs_{}'.format(year))
# Enfant à naître (NN pour nouveaux nés)
individual_vars = [
'acteu',
'agepr',
'cohab',
'contra',
'forter',
'ident',
'lien',
'lpr',
'mrec',
'naia',
'naim',
'noi',
'noicon',
'noimer',
'noindiv',
'noiper',
'retrai',
'rga',
'rstg',
'sexe',
'stc',
'titc',
]
year_specific_by_generic = year_specific_by_generic_data_frame_name(year)
eeccmp1 = survey.get_values(table = year_specific_by_generic["eec_cmp_1"], variables = individual_vars)
eeccmp2 = survey.get_values(table = year_specific_by_generic["eec_cmp_2"], variables = individual_vars)
eeccmp3 = survey.get_values(table = year_specific_by_generic["eec_cmp_3"], variables = individual_vars)
tmp = eeccmp1.merge(eeccmp2, how = "outer")
enfants_a_naitre = tmp.merge(eeccmp3, how = "outer")
# optimisation des types? Controle de l'existence en passant
# pourquoi pas des int quand c'est possible
# TODO: minimal dtype TODO: shoudln't be here
for var in individual_vars:
assert_dtype(enfants_a_naitre[var], 'float')
del eeccmp1, eeccmp2, eeccmp3, individual_vars, tmp
gc.collect()
# création de variables
enfants_a_naitre['declar1'] = ''
enfants_a_naitre['noidec'] = 0
enfants_a_naitre['ztsai'] = 0
enfants_a_naitre['year'] = year
enfants_a_naitre.year = enfants_a_naitre.year.astype("float32") # TODO: should be an integer but NaN are present
enfants_a_naitre['agepf'] = enfants_a_naitre.year - enfants_a_naitre.naia
enfants_a_naitre.loc[enfants_a_naitre.naim >= 7,'agepf'] -= 1
enfants_a_naitre['actrec'] = 9
enfants_a_naitre['quelfic'] = 'ENF_NN'
enfants_a_naitre['persfip'] = ""
# TODO: deal with agepf
for series_name in ['actrec', 'noidec', 'ztsai']:
assert_dtype(enfants_a_naitre[series_name], "int")
# selection
enfants_a_naitre = enfants_a_naitre[
(
(enfants_a_naitre.naia == enfants_a_naitre.year) & (enfants_a_naitre.naim >= 10)
) | (
(enfants_a_naitre.naia == enfants_a_naitre.year + 1) & (enfants_a_naitre.naim <= 5)
)
].copy()
temporary_store["enfants_a_naitre_{}".format(year)] = enfants_a_naitre
if __name__ == '__main__':
log.info('Entering 01_pre_proc')
import sys
import time
logging.basicConfig(level = logging.INFO, stream = sys.stdout)
deb = time.clock()
year = 2009
create_indivim_menagem(year = year)
create_enfants_a_naitre(year = year)
log.info("etape 01 pre-processing terminee en {}".format(time.clock() - deb))
|
adrienpacifico/openfisca-france-data
|
openfisca_france_data/input_data_builders/build_openfisca_survey_data/step_01_pre_processing.py
|
Python
|
agpl-3.0
| 10,072
|
# $Id$
#
# pjsua Setup script for Visual Studio
#
# Copyright (C) 2003-2008 Benny Prijono <benny@prijono.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from distutils.core import setup, Extension
import os
import sys
# Find version
pj_version=""
pj_version_major=""
pj_version_minor=""
pj_version_rev=""
pj_version_suffix=""
f = open('../../../version.mak', 'r')
for line in f:
if line.find("export PJ_VERSION_MAJOR") != -1:
tokens=line.split("=")
if len(tokens)>1:
pj_version_major= tokens[1].strip()
elif line.find("export PJ_VERSION_MINOR") != -1:
tokens=line.split("=")
if len(tokens)>1:
pj_version_minor= line.split("=")[1].strip()
elif line.find("export PJ_VERSION_REV") != -1:
tokens=line.split("=")
if len(tokens)>1:
pj_version_rev= line.split("=")[1].strip()
elif line.find("export PJ_VERSION_SUFFIX") != -1:
tokens=line.split("=")
if len(tokens)>1:
pj_version_suffix= line.split("=")[1].strip()
f.close()
if not pj_version_major:
print 'Unable to get PJ_VERSION_MAJOR'
sys.exit(1)
pj_version = pj_version_major + "." + pj_version_minor
if pj_version_rev:
pj_version += "." + pj_version_rev
if pj_version_suffix:
pj_version += "-" + pj_version_suffix
#print 'PJ_VERSION = "'+ pj_version + '"'
# Check that extension has been built
if not os.access('../../lib/_pjsua.pyd', os.R_OK):
print 'Error: file "../../lib/_pjsua.pyd" does not exist!'
print ''
print 'Please build the extension with Visual Studio first'
print 'For more info, see http://trac.pjsip.org/repos/wiki/Python_SIP_Tutorial'
sys.exit(1)
setup(name="pjsua",
version=pj_version,
description='SIP User Agent Library based on PJSIP',
url='http://trac.pjsip.org/repos/wiki/Python_SIP_Tutorial',
data_files=[('lib/site-packages', ['../../lib/_pjsua.pyd'])],
py_modules=["pjsua"]
)
|
xhook/asterisk-v11
|
res/pjproject/pjsip-apps/src/python/setup-vc.py
|
Python
|
gpl-2.0
| 2,533
|
table = []
for i in range(8):
input()
table += [list(map(lambda x: x[1:-1], input().split('|')[1:-1]))]
input()
table.reverse()
pieces = {'K': [], 'Q': [], 'R': [], 'B': [], 'N': [], 'P': [], 'k': [], 'q': [], 'r': [], 'b': [], 'n': [], 'p': []}
for i in range(8):
for j in range(8):
if table[i][j] in pieces.keys():
pieces[table[i][j]] += [((lambda c: c.upper() if c.lower() != 'p' else '')(table[i][j]), chr(97+j), i + 1)]
for key, val in pieces.items():
if key.isupper():
val.sort(key=lambda x: (x[2], x[1]))
else:
val.sort(key=lambda x: (-x[2], x[1]))
print('White: ', end='')
print(','.join(list(map(lambda x: (x[0]+x[1]+str(x[2])), (pieces['K'] + pieces['Q'] + pieces['R'] + pieces['B'] + pieces['N'] + pieces['P'])))))
print('Black: ', end='')
print(','.join(list(map(lambda x: (x[0]+x[1]+str(x[2])), (pieces['k'] + pieces['q'] + pieces['r'] + pieces['b'] + pieces['n'] + pieces['p'])))))
|
JonSteinn/Kattis-Solutions
|
src/Help Me With The Game/Python 3/helpgame.py
|
Python
|
gpl-3.0
| 955
|
#/***********************************************************************
# * Licensed Materials - Property of IBM
# *
# * IBM SPSS Products: Statistics Common
# *
# * (C) Copyright IBM Corp. 1989, 2020
# *
# * US Government Users Restricted Rights - Use, duplication or disclosure
# * restricted by GSA ADP Schedule Contract with IBM Corp.
# ************************************************************************/
__author__ = 'IBM SPSS, JKP'
__version__ = '1.0.2'
version = __version__
# history
# 04-jun-2014 Original version
# 05-jun-2014 Adjust for different Datasets table structure before V22
helptext = """
Manage datasets.
STATS DATASET NAME=dsname ACTIVATE=existing dsname
WINDOW=ASIS* or FRONT
CLOSE = list of dsnames or ALL
KEEP = list of dsnames
DISPLAY = NO* or YES.
Examples.
STATS DATASET CLOSE=ALL KEEP=w.
This closes all datasets except the one named w.
STATS DATASET NAME=a ACTIVATE=b CLOSE=c d.
assigns the name a to the active dataset; then
activates dataset b, and finally closes
datasets c and d.
All keywords are optional, but if none are given, the command
will do nothing. * indicates the default choice for keyword
values.
All of the functionality of STATS DATASET maps to built-in
DATASET commands, but it provides some conveniences and can
handle the "almost all" case for closing datasets.
Note that closing a dataset does not necessarily remove
it from the session. The active dataset does not have
to have a name. However, an unnamed active dataset
would be removed from the session if another (named)
dataset is activated.
NAME specifies a dataset name to assign to the active dataset.
ACTIVATE specifies a dataset to be activated.
WINDOW=ASIS or FRONT specifies the window behavior for ACTIVATE.
CLOSE specifies datasets to be closed. It can be a list of
dataset names or ALL.
KEEP specifies datasets not to close whether or not
they are listed or implied in CLOSE. Specifying KEEP without
CLOSE means that all datasets except the KEEP list will be
closed.
Any names specified that do not exist are silently ignored,
but errors in NAME or ACTIVATE stop the command.
DISPLAY specifies whether or not to run a DATASET DISPLAY
command.
The steps are executed in the order that these keywords are
listed. That is, first NAME; then ACTIVATE/WINDOW;
then CLOSE/KEEP; finally DISPLAY.
/HELP displays this help and does nothing else."""
from extension import Template, Syntax, processcmd
import spss
import random
def dodataset(name=None, activate=None, window="asis", close=None,
keep="", display=False):
# debugging
# makes debug apply only to the current thread
#try:
#import wingdbstub
#if wingdbstub.debugger != None:
#import time
#wingdbstub.debugger.StopDebug()
#time.sleep(1)
#wingdbstub.debugger.StartDebug()
#import thread
#wingdbstub.debugger.SetDebugThreads({thread.get_ident(): 1}, default_policy=0)
## for V19 use
### ###SpssClient._heartBeat(False)
#except:
#pass
if name:
spss.Submit("DATASET NAME %s." % name)
if activate:
# stops with error if no such dataset
spss.Submit("DATASET ACTIVATE %s WINDOW=%s" % (activate, window))
# CLOSE processing
if keep and not close:
close = "all"
if close:
close = set([ds.lower() for ds in close])
keep = set([ds.lower() for ds in keep])
allds = getallds()
if "all" in close:
if len(close) > 1:
raise ValueError(_("""ALL cannot be used with other names in the close list"""))
close = allds
close = close.intersection(allds) # remove undefined names
for d in close - keep:
spss.Submit("DATASET CLOSE %s" % d)
if display:
spss.Submit("DATASET DISPLAY")
def getallds():
# return set of all datasets with names in lower case
randomname = "D" + str(random.uniform(.1,1))
spss.Submit("""oms /destination viewer=no xmlworkspace="%(randomname)s" format=oxml
/tag = "%(randomname)s".
dataset display.
omsend tag="%(randomname)s".""" % locals())
# take care to get only the first-column attributes
if spss.GetDefaultPlugInVersion() >= "spss220":
existingdsnames = set([s.lower() for s in spss.EvaluateXPath(randomname, "/outputTree",
"""//pivotTable[@subType="Datasets"]//dimension/category/dimension/category[1]/cell/@text""")])
else:
existingdsnames = set([s.lower() for s in spss.EvaluateXPath(randomname, "/outputTree",
"""//pivotTable[@subType="Datasets"]/dimension/category/cell/@text""")])
spss.DeleteXPathHandle(randomname)
# The unnamed dataset would be listed as (unnamed) or (translation of unnamed)
for item in existingdsnames:
if item.startswith("("):
existingdsnames.discard(item)
break
return(existingdsnames)
def Run(args):
"""Execute the STATS DATASET extension command"""
args = args[list(args.keys())[0]]
oobj = Syntax([
Template("NAME", subc="", ktype="varname", var="name"),
Template("ACTIVATE", subc="", ktype="varname", var="activate"),
Template("WINDOW", subc="", ktype="str", var="window",
vallist=["asis", "front"]),
Template("CLOSE", subc="", ktype="varname", var="close", islist=True),
Template("KEEP", subc="", ktype="varname", var="keep", islist=True),
Template("DISPLAY", subc="", ktype="bool", var="display"),
Template("HELP", subc="", ktype="bool")])
#enable localization
global _
try:
_("---")
except:
def _(msg):
return msg
# A HELP subcommand overrides all else
if "HELP" in args:
#print helptext
helper()
else:
processcmd(oobj, args, dodataset)
def helper():
"""open html help in default browser window
The location is computed from the current module name"""
import webbrowser, os.path
path = os.path.splitext(__file__)[0]
helpspec = "file://" + path + os.path.sep + \
"markdown.html"
# webbrowser.open seems not to work well
browser = webbrowser.get()
if not browser.open_new(helpspec):
print(("Help file not found:" + helpspec))
try: #override
from extension import helper
except:
pass
#class NonProcPivotTable(object):
#"""Accumulate an object that can be turned into a basic pivot table once a procedure state can be established"""
#def __init__(self, omssubtype, outlinetitle="", tabletitle="", caption="", rowdim="", coldim="", columnlabels=[],
#procname="Messages"):
#"""omssubtype is the OMS table subtype.
#caption is the table caption.
#tabletitle is the table title.
#columnlabels is a sequence of column labels.
#If columnlabels is empty, this is treated as a one-column table, and the rowlabels are used as the values with
#the label column hidden
#procname is the procedure name. It must not be translated."""
#attributesFromDict(locals())
#self.rowlabels = []
#self.columnvalues = []
#self.rowcount = 0
#def addrow(self, rowlabel=None, cvalues=None):
#"""Append a row labelled rowlabel to the table and set value(s) from cvalues.
#rowlabel is a label for the stub.
#cvalues is a sequence of values with the same number of values are there are columns in the table."""
#if cvalues is None:
#cvalues = []
#self.rowcount += 1
#if rowlabel is None:
#self.rowlabels.append(str(self.rowcount))
#else:
#self.rowlabels.append(rowlabel)
#self.columnvalues.extend(cvalues)
#def generate(self):
#"""Produce the table assuming that a procedure state is now in effect if it has any rows."""
#privateproc = False
#if self.rowcount > 0:
#try:
#table = spss.BasePivotTable(self.tabletitle, self.omssubtype)
#except:
#spss.StartProcedure(self.procname)
#privateproc = True
#table = spss.BasePivotTable(self.tabletitle, self.omssubtype)
#if self.caption:
#table.Caption(self.caption)
#if self.columnlabels != []:
#table.SimplePivotTable(self.rowdim, self.rowlabels, self.coldim, self.columnlabels, self.columnvalues)
#else:
#table.Append(spss.Dimension.Place.row,"rowdim",hideName=True,hideLabels=True)
#table.Append(spss.Dimension.Place.column,"coldim",hideName=True,hideLabels=True)
#colcat = spss.CellText.String("Message")
#for r in self.rowlabels:
#cellr = spss.CellText.String(r)
#table[(cellr, colcat)] = cellr
#if privateproc:
#spss.EndProcedure()
|
IBMPredictiveAnalytics/STATS_DATASET
|
src/STATS_DATASET.py
|
Python
|
apache-2.0
| 9,091
|
from google.appengine.ext.deferred import defer
from google.appengine.runtime import DeadlineExceededError
def _process_shard(model, instance_ids, callback):
for instance in model.objects.filter(pk__in=instance_ids):
callback(instance)
def _shard(model, query, callback, shard_size, queue, offset=0):
keys_queryset = model.objects.all()
keys_queryset.query = query
keys_queryset = keys_queryset.values_list("pk", flat=True)
# Keep iterating until we are done, or we only have 10 seconds to spare!
while True:
try:
ids = list(keys_queryset.all()[offset:offset+shard_size])
if not ids:
# We're done!
return
# Fire off the first shard
defer(_process_shard, model, ids, callback, _queue=queue)
# Increment the offset
offset += shard_size
except DeadlineExceededError:
# If we run out of time, then defer this function again, continuing from the offset
defer(
_shard,
model,
query,
callback,
shard_size,
queue,
offset=offset,
_queue=queue
)
def defer_iteration(queryset, callback, shard_size=500, _queue="default", _target=None):
"""
Shards background tasks to call 'callback' with each instance in queryset
- `queryset` - The queryset to iterate
- `callback` - A callable which accepts an instance as a parameter
- `shard_size` - The number instances to process per shard (default 500)
- `_queue` - The name of the queue to run the shards on
Note, your callback must be indempotent, shards may retry and your callback
may be called multiple times on the same instance. If you notice that your
tasks are receiving DeadlineExceededErrors you probably need to reduce the
shard size. The shards will work in parallel and will not be sequential.
"""
# We immediately defer the _shard function so that we don't hold up execution
defer(_shard, queryset.model, queryset.query, callback, shard_size, _queue, _queue=_queue, _target=_target)
|
Ali-aqrabawi/ezclinic
|
lib/djangae/contrib/mappers/defer.py
|
Python
|
mit
| 2,243
|
class OutResourcesError (Exception):
""" Out of free resources """
pass
class NotFoundError (Exception):
""" Not found resource in allocated """
pass
|
AHAPX/resm
|
src/errors.py
|
Python
|
gpl-3.0
| 168
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from rml2txt import parseString, parseNode
""" This engine is the minimalistic renderer of RML documents into text files,
using spaces and newlines to format.
It was needed in some special applications, where legal reports need to be
printed in special (dot-matrix) printers.
"""
|
vileopratama/vitech
|
src/openerp/report/render/rml2txt/__init__.py
|
Python
|
mit
| 395
|
# -*- coding: utf-8 -*-
"""This module contains some Sobol functions.
"""
__author__ = 'Fei Xie'
__all__ = []
def getconstant(name='',**kwarg):
"""Get constants from file by name."""
from . import __path__ as path
from numpy import load
from os.path import join
from os import listdir
path = path[0]
if not name.endswith(".npy"):
name+='.npy'
if not name in listdir(path):
raise ValueError("File {0} not exists.".format(name))
temp = load(join(path, name))
return temp
|
wzmao/fmath
|
fmath/Random/Constant.py
|
Python
|
gpl-2.0
| 530
|
from docker.api import APIClient
from docker.errors import APIError
from docker.types import SwarmSpec
from .resource import Model
class Swarm(Model):
"""
The server's Swarm state. This a singleton that must be reloaded to get
the current state of the Swarm.
"""
id_attribute = 'ID'
def __init__(self, *args, **kwargs):
super(Swarm, self).__init__(*args, **kwargs)
if self.client:
try:
self.reload()
except APIError as e:
# FIXME: https://github.com/docker/docker/issues/29192
if e.response.status_code not in (406, 503):
raise
@property
def version(self):
"""
The version number of the swarm. If this is not the same as the
server, the :py:meth:`update` function will not work and you will
need to call :py:meth:`reload` before calling it again.
"""
return self.attrs.get('Version').get('Index')
def init(self, advertise_addr=None, listen_addr='0.0.0.0:2377',
force_new_cluster=False, **kwargs):
"""
Initialize a new swarm on this Engine.
Args:
advertise_addr (str): Externally reachable address advertised to
other nodes. This can either be an address/port combination in
the form ``192.168.1.1:4567``, or an interface followed by a
port number, like ``eth0:4567``. If the port number is omitted,
the port number from the listen address is used.
If not specified, it will be automatically detected when
possible.
listen_addr (str): Listen address used for inter-manager
communication, as well as determining the networking interface
used for the VXLAN Tunnel Endpoint (VTEP). This can either be
an address/port combination in the form ``192.168.1.1:4567``,
or an interface followed by a port number, like ``eth0:4567``.
If the port number is omitted, the default swarm listening port
is used. Default: ``0.0.0.0:2377``
force_new_cluster (bool): Force creating a new Swarm, even if
already part of one. Default: False
task_history_retention_limit (int): Maximum number of tasks
history stored.
snapshot_interval (int): Number of logs entries between snapshot.
keep_old_snapshots (int): Number of snapshots to keep beyond the
current snapshot.
log_entries_for_slow_followers (int): Number of log entries to
keep around to sync up slow followers after a snapshot is
created.
heartbeat_tick (int): Amount of ticks (in seconds) between each
heartbeat.
election_tick (int): Amount of ticks (in seconds) needed without a
leader to trigger a new election.
dispatcher_heartbeat_period (int): The delay for an agent to send
a heartbeat to the dispatcher.
node_cert_expiry (int): Automatic expiry for nodes certificates.
external_ca (dict): Configuration for forwarding signing requests
to an external certificate authority. Use
``docker.types.SwarmExternalCA``.
name (string): Swarm's name
Returns:
``True`` if the request went through.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> client.swarm.init(
advertise_addr='eth0', listen_addr='0.0.0.0:5000',
force_new_cluster=False, snapshot_interval=5000,
log_entries_for_slow_followers=1200
)
"""
init_kwargs = {
'advertise_addr': advertise_addr,
'listen_addr': listen_addr,
'force_new_cluster': force_new_cluster
}
init_kwargs['swarm_spec'] = SwarmSpec(**kwargs)
self.client.api.init_swarm(**init_kwargs)
self.reload()
def join(self, *args, **kwargs):
return self.client.api.join_swarm(*args, **kwargs)
join.__doc__ = APIClient.join_swarm.__doc__
def leave(self, *args, **kwargs):
return self.client.api.leave_swarm(*args, **kwargs)
leave.__doc__ = APIClient.leave_swarm.__doc__
def reload(self):
"""
Inspect the swarm on the server and store the response in
:py:attr:`attrs`.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
self.attrs = self.client.api.inspect_swarm()
def update(self, rotate_worker_token=False, rotate_manager_token=False,
**kwargs):
"""
Update the swarm's configuration.
It takes the same arguments as :py:meth:`init`, except
``advertise_addr``, ``listen_addr``, and ``force_new_cluster``. In
addition, it takes these arguments:
Args:
rotate_worker_token (bool): Rotate the worker join token. Default:
``False``.
rotate_manager_token (bool): Rotate the manager join token.
Default: ``False``.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
# this seems to have to be set
if kwargs.get('node_cert_expiry') is None:
kwargs['node_cert_expiry'] = 7776000000000000
return self.client.api.update_swarm(
version=self.version,
swarm_spec=SwarmSpec(**kwargs),
rotate_worker_token=rotate_worker_token,
rotate_manager_token=rotate_manager_token
)
|
shakamunyi/docker-py
|
docker/models/swarm.py
|
Python
|
apache-2.0
| 5,852
|
"""Long enough spam checker backend for Zinnia"""
from zinnia.settings import COMMENT_MIN_WORDS
def backend(comment, content_object, request):
"""
Backend checking if the comment posted is long enough to be public.
Generally a comments with few words is useless.
The will avoid comments like this:
- First !
- I don't like.
- Check http://spam-ads.com/
"""
return len(comment.comment.split()) < COMMENT_MIN_WORDS
|
pczhaoyun/obtainfo
|
zinnia/spam_checker/backends/long_enough.py
|
Python
|
apache-2.0
| 452
|
import os
import sys
import re
if __name__ == '__main__':
script, workingDir, jdkDir, outputPath, logPath = sys.argv
os.chdir(workingDir)
outputDir = os.path.dirname(outputPath)
if not os.path.exists(outputDir):
os.makedirs(outputDir)
oldPathEnv = os.environ['PATH']
os.environ['PATH'] = "%s%sbin%s%s" % (jdkDir, os.sep, os.pathsep, oldPathEnv)
os.environ['JAVA_HOME'] = "%s\jre" % (jdkDir)
cmd = "jar cvf \"%s\" * > \"%s\" 2>&1" % (outputPath, logPath)
exitcode = os.system(cmd)
with open(logPath, "rt") as logFile:
logContents = logFile.read()
if re.search("warning:|error:", logContents, re.MULTILINE):
print("%s" % logContents)
sys.exit(exitcode)
|
fifoforlifo/pynja
|
packages/pynja/scripts/jar-invoke.py
|
Python
|
apache-2.0
| 728
|
###############################################################################################
# $Id: ttest.py,v 1.1 2003/09/14 04:31:39 riq Exp $
###############################################################################################
import pygame
from pygame.locals import *
import twidget
from tbutton import TButton
from twidget_manager import TWidgetManager
from tdialog import *
import theme
class TTest(TDialog):
def __init__ (self):
"Creates the dialog."
# init superclass
TDialog.__init__ (self, TWidgetManager.sdl_surface)
def createWidgets (self):
"Creates all widgets for the dialog."
# create the cancel button
self.wm.register ( TButton ( 'Connect', theme.Theme.loadImage('icon_connect.png'), {twidget.MOUSEBUTTONUP : self.ok } ) )
def ok (self, trigger, event):
"""Callback triggered when the user clicks the 'Ok' button. Simply closes the dialog and
returns to the main dialog."""
# we're cancelling the dialog
self.state = ACCEPTED
return twidget.DONE
# Local Variables:
# mode: auto-fill
# fill-column: 100
# End:
|
JeroenDeDauw/teg
|
python/client/gui/ttest.py
|
Python
|
gpl-3.0
| 1,186
|
# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE.
# Copyright (C) NIWA & British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from cylc.flow.cycling.integer import (
IntegerSequence, IntegerPoint, IntegerInterval)
class TestIntegerSequence(unittest.TestCase):
"""Contains unit tests for the IntegerSequence class."""
def test_exclusions_simple(self):
"""Test the generation of points for integer sequences with exclusions.
"""
sequence = IntegerSequence('R/P1!3', 1, 5)
output = []
point = sequence.get_start_point()
while point:
output.append(point)
point = sequence.get_next_point(point)
self.assertEqual([int(out) for out in output], [1, 2, 4, 5])
def test_multiple_exclusions_simple(self):
"""Tests the multiple exclusion syntax for integer notation"""
sequence = IntegerSequence('R/P1!(2,3,7)', 1, 10)
output = []
point = sequence.get_start_point()
while point:
output.append(point)
point = sequence.get_next_point(point)
self.assertEqual([int(out) for out in output], [1, 4, 5, 6, 8, 9, 10])
def test_multiple_exclusions_integer_sequence(self):
"""Tests the multiple exclusion syntax for integer notation"""
sequence = IntegerSequence('P1 ! P2', 1, 10)
output = []
point = sequence.get_start_point()
while point:
output.append(point)
point = sequence.get_next_point(point)
self.assertEqual([int(out) for out in output], [2, 4, 6, 8, 10])
def test_multiple_exclusions_integer_sequence2(self):
"""Tests the multiple exclusion syntax for integer notation"""
sequence = IntegerSequence('P1 ! +P1/P2', 1, 10)
output = []
point = sequence.get_start_point()
while point:
output.append(point)
point = sequence.get_next_point(point)
self.assertEqual([int(out) for out in output], [1, 3, 5, 7, 9])
def test_multiple_exclusions_integer_sequence3(self):
"""Tests the multiple exclusion syntax for integer notation"""
sequence = IntegerSequence('P1 ! (P2, 6, 8) ', 1, 10)
output = []
point = sequence.get_start_point()
while point:
output.append(point)
point = sequence.get_next_point(point)
self.assertEqual([int(out) for out in output], [2, 4, 10])
def test_multiple_exclusions_integer_sequence_weird_valid_formatting(self):
"""Tests the multiple exclusion syntax for integer notation"""
sequence = IntegerSequence('P1 !(P2, 6,8) ', 1, 10)
output = []
point = sequence.get_start_point()
while point:
output.append(point)
point = sequence.get_next_point(point)
self.assertEqual([int(out) for out in output], [2, 4, 10])
def test_multiple_exclusions_integer_sequence_invalid_formatting(self):
"""Tests the multiple exclusion syntax for integer notation"""
sequence = 'P1 !(6,8), P2 '
self.assertRaises(Exception, IntegerSequence, sequence, 1, 10)
def test_multiple_exclusions_extensive(self):
"""Tests IntegerSequence methods for sequences with multi-exclusions"""
points = [IntegerPoint(i) for i in range(10)]
sequence = IntegerSequence('R/P1!(2,3,7)', 1, 10)
self.assertFalse(sequence.is_on_sequence(points[3]))
self.assertFalse(sequence.is_valid(points[3]))
self.assertEqual(sequence.get_prev_point(points[3]), points[1])
self.assertEqual(sequence.get_prev_point(points[4]), points[1])
self.assertEqual(sequence.get_nearest_prev_point(points[3]), points[1])
self.assertEqual(sequence.get_nearest_prev_point(points[4]), points[1])
self.assertEqual(sequence.get_next_point(points[3]), points[4])
self.assertEqual(sequence.get_next_point(points[2]), points[4])
self.assertEqual(sequence.get_next_point_on_sequence(
points[3]),
points[4])
self.assertEqual(sequence.get_next_point_on_sequence(
points[6]),
points[8])
sequence = IntegerSequence('R/P1!(1,3,4)', 1, 10)
self.assertEqual(sequence.get_first_point(points[1]), points[2])
self.assertEqual(sequence.get_first_point(points[0]), points[2])
self.assertEqual(sequence.get_start_point(), points[2])
sequence = IntegerSequence('R/P1!(8,9,10)', 1, 10)
self.assertEqual(sequence.get_stop_point(), points[7])
def test_exclusions_extensive(self):
"""Test IntegerSequence methods for sequences with exclusions."""
point_0 = IntegerPoint(0)
point_1 = IntegerPoint(1)
point_2 = IntegerPoint(2)
point_3 = IntegerPoint(3)
point_4 = IntegerPoint(4)
sequence = IntegerSequence('R/P1!3', 1, 5)
self.assertFalse(sequence.is_on_sequence(point_3))
self.assertFalse(sequence.is_valid(point_3))
self.assertEqual(sequence.get_prev_point(point_3), point_2)
self.assertEqual(sequence.get_prev_point(point_4), point_2)
self.assertEqual(sequence.get_nearest_prev_point(point_3), point_2)
self.assertEqual(sequence.get_nearest_prev_point(point_3), point_2)
self.assertEqual(sequence.get_next_point(point_3), point_4)
self.assertEqual(sequence.get_next_point(point_2), point_4)
self.assertEqual(sequence.get_next_point_on_sequence(point_3), point_4)
self.assertEqual(sequence.get_next_point_on_sequence(point_2), point_4)
sequence = IntegerSequence('R/P1!1', 1, 5)
self.assertEqual(sequence.get_first_point(point_1), point_2)
self.assertEqual(sequence.get_first_point(point_0), point_2)
self.assertEqual(sequence.get_start_point(), point_2)
sequence = IntegerSequence('R/P1!5', 1, 5)
self.assertEqual(sequence.get_stop_point(), point_4)
def test_simple(self):
"""Run some simple tests for integer cycling."""
sequence = IntegerSequence('R/1/P3', 1, 10)
start = sequence.p_start
stop = sequence.p_stop
# Test point generation forwards.
point = start
output = []
while point and stop and point <= stop:
output.append(point)
point = sequence.get_next_point(point)
self.assertEqual([int(out) for out in output], [1, 4, 7, 10])
# Test point generation backwards.
point = stop
output = []
while point and start and point >= start:
output.append(point)
point = sequence.get_prev_point(point)
self.assertEqual([int(out) for out in output], [10, 7, 4, 1])
# Test sequence comparison
sequence1 = IntegerSequence('R/1/P2', 1, 10)
sequence2 = IntegerSequence('R/1/P2', 1, 10)
self.assertEqual(sequence1, sequence2)
sequence2.set_offset(IntegerInterval('-P2'))
self.assertEqual(sequence1, sequence2)
sequence2.set_offset(IntegerInterval('-P1'))
self.assertNotEqual(sequence1, sequence2)
|
cylc/cylc
|
tests/unit/cycling/test_integer.py
|
Python
|
gpl-3.0
| 7,781
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant l'éditeur 'spedit'.
Si des redéfinitions de contexte-éditeur standard doivent être faites, elles
seront placées dans ce package.
Note importante : ce package contient la définition d'un éditeur, mais
celui-ci peut très bien être étendu par d'autres modules. Auquel cas,
les extensions n'apparaîtront pas ici.
"""
from primaires.interpreteur.editeur.choix import Choix
from primaires.interpreteur.editeur.description import Description
from primaires.interpreteur.editeur.entier import Entier
from primaires.interpreteur.editeur.flag import Flag
from primaires.interpreteur.editeur.presentation import Presentation
from primaires.interpreteur.editeur.selection import Selection
from primaires.interpreteur.editeur.uniligne import Uniligne
from primaires.scripting.editeurs.edt_script import EdtScript
from secondaires.magie.constantes import ELEMENTS
from .edt_difficulte import EdtDifficulte
from .supprimer import NSupprimer
class EdtSpedit(Presentation):
"""Classe définissant l'éditeur de sort 'spedit'.
"""
nom = "spedit"
def __init__(self, personnage, sort):
"""Constructeur de l'éditeur"""
if personnage:
instance_connexion = personnage.instance_connexion
else:
instance_connexion = None
Presentation.__init__(self, instance_connexion, sort)
if personnage and sort:
self.construire(sort)
def __getnewargs__(self):
return (None, None)
def construire(self, sort):
"""Construction de l'éditeur"""
# Nom
nom = self.ajouter_choix("nom", "n", Uniligne, sort, "nom")
nom.parent = self
nom.prompt = "Nom du sort (sans article) : "
nom.apercu = "{objet.nom}"
nom.aide_courte = \
"Entrez le |ent|nom|ff| du sort ou |cmd|/|ff| pour revenir " \
"à la fenêtre parente.\n\nNom actuel : |bc|{objet.nom}|ff|"
# Description
description = self.ajouter_choix("description", "d", Description, \
sort)
description.parent = self
description.apercu = "{objet.description.paragraphes_indentes}"
description.aide_courte = \
"| |tit|" + "Description du sort {}".format(sort.cle).ljust(76) + \
"|ff||\n" + self.opts.separateur
# Points de tribut
tribut = self.ajouter_choix("points de tribut", "tr", Entier, sort,
"points_tribut", 1)
tribut.parent = self
tribut.prompt = "Points de tribut nécessaire pour apprendre le sort : "
tribut.apercu = "{objet.points_tribut}"
tribut.aide_courte = \
"Entrez le |ent|nombre de points de tribut|ff| nécessaires "\
"pour apprendre le sort\nou |cmd|/|ff| pour revenir à la " \
"fenêtre parente.\n\nPoints de tribut actuels : " \
"|bc|{objet.points_tribut}|ff|"
# Éléments
elements = self.ajouter_choix("eléments", "e", Selection, sort,
"elements", ELEMENTS)
elements.parent = self
elements.apercu = "{objet.str_elements}"
elements.aide_courte = \
"Entrez un |ent|élément|ff| pour l'ajouter " \
"ou le retirer\nou |cmd|/|ff| " \
"pour revenir à la fenêtre parente.\n\n" \
"Éléments existants : |cmd|" + "|ff|, |cmd|".join(
ELEMENTS) + "\n" \
"Éléments actuels : |bc|{objet.str_elements}|ff|"
# Type de sort
types = ["destruction", "alteration", "invocation", "illusion"]
type = self.ajouter_choix("type de sort", "s", Choix, sort,
"type", types)
type.parent = self
type.prompt = "Type de sort : "
type.apercu = "{objet.type}"
type.aide_courte = \
"Entrez le |ent|type|ff| du sort ou |cmd|/|ff| " \
"pour revenir à la fenêtre parente.\nTypes disponibles : |cmd|" \
"{}|ff|.\n\nType actuel : |bc|{{objet.type}}|ff|".format(
"|ff|, |cmd|".join(types))
# Cible
types = ["aucune", "personnage", "objet", "salle"]
cible = self.ajouter_choix("type de cible", "c", Choix, sort,
"type_cible", types)
cible.parent = self
cible.prompt = "Type de cible : "
cible.apercu = "{objet.type_cible}"
cible.aide_courte = \
"Entrez le |ent|type de cible|ff| du sort ou |cmd|/|ff| " \
"pour revenir à la fenêtre parente.\nTypes disponibles : |cmd|" \
"{}|ff|.\n\nType actuel : |bc|{{objet.type_cible}}|ff|".format(
"|ff|, |cmd|".join(types))
# Stats
stats = self.ajouter_choix("stats", "st", Selection, sort,
"stats", ("agilite", "intelligence", "sensibilite"))
stats.parent = self
stats.apercu = "{objet.str_stats}"
stats.aide_courte = \
"Entrez une |ent|stat|ff| pour l'ajouter " \
"ou la retirer\nou |cmd|/|ff| " \
"pour revenir à la fenêtre parente.\n\n" \
"stats actuelles : |bc|{objet.str_stats}|ff|"
# Difficulté
difficulte = self.ajouter_choix("difficulté", "i", Entier, sort,
"difficulte", 0, 100)
difficulte.parent = self
difficulte.prompt = "Difficulté d'apprentissage : "
difficulte.apercu = "{objet.difficulte}"
difficulte.aide_courte = \
"Paramétrez la |ent|difficulté|ff| d'apprentissage du sort " \
"entre |cmd|0|ff| et |cmd|100|ff| ou entrez\n|cmd|/|ff| pour " \
"revenir à la fenêtre parente. |cmd|100|ff| signifie que le sort " \
"ne peut pas\nêtre appris par la pratique.\n\n" \
"Difficulté actuelle : |bc|{objet.difficulte}|ff|"
# Coût
cout = self.ajouter_choix("coût", "o", Entier, sort, "cout")
cout.parent = self
cout.prompt = "Coùt en mana : "
cout.apercu = "{objet.cout}"
cout.aide_courte = \
"Entrez la |ent|quantité|ff| d'énergie magique nécessaire pour " \
"lancer ce sort ou |cmd|/|ff| pour\nrevenir à la fenêtre " \
"parente.\n\n" \
"Coût : |bc|{objet.cout}|ff|"
# Durée
duree = self.ajouter_choix("durée de concentration", "u", Entier, sort,
"duree", 1)
duree.parent = self
duree.prompt = "Durée de concentration : "
duree.apercu = "{objet.duree}"
duree.aide_courte = \
"Entrez la |ent|durée|ff| de concentration du sort, en " \
"secondes, ou |cmd|/|ff| pour revenir à\nla fenêtre parente. La " \
"durée diminue automatiquement quand la maîtrise du sort\n" \
"augmente ; la valeur entrée correspond au temps maximum.\n\n" \
"Durée actuelle : |bc|{objet.duree}|ff|"
# Offensif
offensif = self.ajouter_choix("offensif", "of", Flag, sort,
"offensif")
offensif.parent = self
# Distance
distance = self.ajouter_choix("distance", "t", Flag, sort,
"distance")
distance.parent = self
# Script
scripts = self.ajouter_choix("scripts", "sc", EdtScript,
sort.script)
scripts.parent = self
# Suppression
suppression = self.ajouter_choix("supprimer", "sup", NSupprimer, \
sort)
suppression.parent = self
suppression.aide_courte = "Souhaitez-vous réellement supprimer " \
"le sort {} ?".format(sort.nom)
suppression.action = "magie.supprimer_sort"
suppression.confirme = "Le sort {} a bien été supprimé.".format(
sort.nom)
|
vlegoff/tsunami
|
src/secondaires/magie/editeurs/spedit/__init__.py
|
Python
|
bsd-3-clause
| 9,326
|
"""Convert a MiSeq samplesheet to a valid HiSeq samplesheet
"""
import argparse
from scilifelab.illumina.miseq import MiSeqSampleSheet
from scilifelab.illumina.hiseq import HiSeqSampleSheet
def main(miseq_samplesheet, hiseq_samplesheet):
m_samplesheet = MiSeqSampleSheet(miseq_samplesheet)
h_samplesheet = HiSeqSampleSheet(m_samplesheet.to_hiseq())
if hiseq_samplesheet is None:
hiseq_samplesheet = miseq_samplesheet + ".out"
h_samplesheet.write(hiseq_samplesheet)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("miseq_samplesheet", \
help="Samplesheet in MiSeq Samplesheet format")
parser.add_argument("-o", "--out", \
default=None, \
help="Output Samplesheet in HiSeq format")
args = parser.parse_args()
main(args.miseq_samplesheet, args.out)
|
SciLifeLab/scilifelab
|
scripts/mi2hi_samplesheet.py
|
Python
|
mit
| 858
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Dimitrios Tydeas Mengidis <tydeas.dr@gmail.com>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: composer
author:
- "Dimitrios Tydeas Mengidis (@dmtrs)"
- "René Moser (@resmo)"
short_description: Dependency Manager for PHP
version_added: "1.6"
description:
- Composer is a tool for dependency management in PHP. It allows you to declare the dependent libraries your project needs and it will install them in your project for you
options:
command:
version_added: "1.8"
description:
- Composer command like "install", "update" and so on
required: false
default: install
arguments:
version_added: "2.0"
description:
- Composer arguments like required package, version and so on
required: false
default: null
working_dir:
description:
- Directory of your project ( see --working-dir )
required: true
default: null
aliases: [ "working-dir" ]
prefer_source:
description:
- Forces installation from package sources when possible ( see --prefer-source )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "prefer-source" ]
prefer_dist:
description:
- Forces installation from package dist even for dev versions ( see --prefer-dist )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "prefer-dist" ]
no_dev:
description:
- Disables installation of require-dev packages ( see --no-dev )
required: false
default: "yes"
choices: [ "yes", "no" ]
aliases: [ "no-dev" ]
no_scripts:
description:
- Skips the execution of all scripts defined in composer.json ( see --no-scripts )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "no-scripts" ]
no_plugins:
description:
- Disables all plugins ( see --no-plugins )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "no-plugins" ]
optimize_autoloader:
description:
- Optimize autoloader during autoloader dump ( see --optimize-autoloader ). Convert PSR-0/4 autoloading to classmap to get a faster autoloader. This is recommended especially for production, but can take a bit of time to run so it is currently not done by default.
required: false
default: "yes"
choices: [ "yes", "no" ]
aliases: [ "optimize-autoloader" ]
ignore_platform_reqs:
version_added: "2.0"
description:
- Ignore php, hhvm, lib-* and ext-* requirements and force the installation even if the local machine does not fulfill these.
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "ignore-platform-reqs" ]
requirements:
- php
- composer installed in bin path (recommended /usr/local/bin)
notes:
- Default options that are always appended in each execution are --no-ansi, --no-interaction and --no-progress if available.
- We received reports about issues on macOS if composer was installed by Homebrew. Please use the official install method to avoid it.
'''
EXAMPLES = '''
# Downloads and installs all the libs and dependencies outlined in the /path/to/project/composer.lock
- composer:
command: install
working_dir: /path/to/project
- composer:
command: require
arguments: my/package
working_dir: /path/to/project
# Clone project and install with all dependencies
- composer:
command: create-project
arguments: package/package /path/to/project ~1.0
working_dir: /path/to/project
prefer_dist: yes
'''
import os
import re
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
# Let snippet from module_utils/basic.py return a proper error in this case
pass
def parse_out(string):
return re.sub("\s+", " ", string).strip()
def has_changed(string):
return "Nothing to install or update" not in string
def get_available_options(module, command='install'):
# get all available options from a composer command using composer help to json
rc, out, err = composer_command(module, "help %s --format=json" % command)
if rc != 0:
output = parse_out(err)
module.fail_json(msg=output)
command_help_json = json.loads(out)
return command_help_json['definition']['options']
def composer_command(module, command, arguments = "", options=[]):
php_path = module.get_bin_path("php", True, ["/usr/local/bin"])
composer_path = module.get_bin_path("composer", True, ["/usr/local/bin"])
cmd = "%s %s %s %s %s" % (php_path, composer_path, command, " ".join(options), arguments)
return module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec = dict(
command = dict(default="install", type="str", required=False),
arguments = dict(default="", type="str", required=False),
working_dir = dict(aliases=["working-dir"], required=True),
prefer_source = dict(default="no", type="bool", aliases=["prefer-source"]),
prefer_dist = dict(default="no", type="bool", aliases=["prefer-dist"]),
no_dev = dict(default="yes", type="bool", aliases=["no-dev"]),
no_scripts = dict(default="no", type="bool", aliases=["no-scripts"]),
no_plugins = dict(default="no", type="bool", aliases=["no-plugins"]),
optimize_autoloader = dict(default="yes", type="bool", aliases=["optimize-autoloader"]),
ignore_platform_reqs = dict(default="no", type="bool", aliases=["ignore-platform-reqs"]),
),
supports_check_mode=True
)
# Get composer command with fallback to default
command = module.params['command']
if re.search(r"\s", command):
module.fail_json(msg="Use the 'arguments' param for passing arguments with the 'command'")
arguments = module.params['arguments']
available_options = get_available_options(module=module, command=command)
options = []
# Default options
default_options = [
'no-ansi',
'no-interaction',
'no-progress',
]
for option in default_options:
if option in available_options:
option = "--%s" % option
options.append(option)
options.extend(['--working-dir', os.path.abspath(module.params['working_dir'])])
option_params = {
'prefer_source': 'prefer-source',
'prefer_dist': 'prefer-dist',
'no_dev': 'no-dev',
'no_scripts': 'no-scripts',
'no_plugins': 'no_plugins',
'optimize_autoloader': 'optimize-autoloader',
'ignore_platform_reqs': 'ignore-platform-reqs',
}
for param, option in option_params.items():
if module.params.get(param) and option in available_options:
option = "--%s" % option
options.append(option)
if module.check_mode:
options.append('--dry-run')
rc, out, err = composer_command(module, command, arguments, options)
if rc != 0:
output = parse_out(err)
module.fail_json(msg=output, stdout=err)
else:
# Composer version > 1.0.0-alpha9 now use stderr for standard notification messages
output = parse_out(out + err)
module.exit_json(changed=has_changed(output), msg=output, stdout=out+err)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
slank/ansible
|
lib/ansible/modules/packaging/language/composer.py
|
Python
|
gpl-3.0
| 8,639
|
"""
This block defines a Staff Graded Assignment. Students are shown a rubric
and invited to upload a file which is then graded by staff.
"""
import datetime
import hashlib
import json
import logging
import mimetypes
import os
import pkg_resources
import pytz
from functools import partial
from courseware.models import StudentModule
from django.core.exceptions import PermissionDenied
from django.core.files import File
from django.core.files.storage import default_storage
from django.conf import settings
from django.template import Context, Template
from student.models import user_by_anonymous_id
from submissions import api as submissions_api
from submissions.models import StudentItem as SubmissionsStudent
from webob.response import Response
from xblock.core import XBlock
from xblock.exceptions import JsonHandlerError
from xblock.fields import DateTime, Scope, String, Float, Integer
from xblock.fragment import Fragment
from xmodule.util.duedate import get_extended_due_date
log = logging.getLogger(__name__)
BLOCK_SIZE = 8 * 1024
def reify(meth):
"""
Decorator which caches value so it is only computed once.
Keyword arguments:
inst
"""
def getter(inst):
"""
Set value to meth name in dict and returns value.
"""
value = meth(inst)
inst.__dict__[meth.__name__] = value
return value
return property(getter)
class StaffGradedAssignmentXBlock(XBlock):
"""
This block defines a Staff Graded Assignment. Students are shown a rubric
and invited to upload a file which is then graded by staff.
"""
has_score = True
icon_class = 'problem'
STUDENT_FILEUPLOAD_MAX_SIZE = 4 * 1000 * 1000 # 4 MB
display_name = String(
default='Staff Graded Assignment', scope=Scope.settings,
help="This name appears in the horizontal navigation at the top of "
"the page."
)
weight = Float(
display_name="Problem Weight",
help=("Defines the number of points each problem is worth. "
"If the value is not set, the problem is worth the sum of the "
"option point values."),
values={"min": 0, "step": .1},
scope=Scope.settings
)
points = Integer(
display_name="Maximum score",
help=("Maximum grade score given to assignment by staff."),
default=100,
scope=Scope.settings
)
staff_score = Integer(
display_name="Score assigned by non-instructor staff",
help=("Score will need to be approved by instructor before being "
"published."),
default=None,
scope=Scope.settings
)
comment = String(
display_name="Instructor comment",
default='',
scope=Scope.user_state,
help="Feedback given to student by instructor."
)
annotated_sha1 = String(
display_name="Annotated SHA1",
scope=Scope.user_state,
default=None,
help=("sha1 of the annotated file uploaded by the instructor for "
"this assignment.")
)
annotated_filename = String(
display_name="Annotated file name",
scope=Scope.user_state,
default=None,
help="The name of the annotated file uploaded for this assignment."
)
annotated_mimetype = String(
display_name="Mime type of annotated file",
scope=Scope.user_state,
default=None,
help="The mimetype of the annotated file uploaded for this assignment."
)
annotated_timestamp = DateTime(
display_name="Timestamp",
scope=Scope.user_state,
default=None,
help="When the annotated file was uploaded")
def max_score(self):
"""
Return the maximum score possible.
"""
return self.points
@reify
def block_id(self):
"""
Return the usage_id of the block.
"""
return self.scope_ids.usage_id
def student_submission_id(self, submission_id=None):
# pylint: disable=no-member
"""
Returns dict required by the submissions app for creating and
retrieving submissions for a particular student.
"""
if submission_id is None:
submission_id = self.xmodule_runtime.anonymous_student_id
assert submission_id != (
'MOCK', "Forgot to call 'personalize' in test."
)
return {
"student_id": submission_id,
"course_id": self.course_id,
"item_id": self.block_id,
"item_type": 'sga', # ???
}
def get_submission(self, submission_id=None):
"""
Get student's most recent submission.
"""
submissions = submissions_api.get_submissions(
self.student_submission_id(submission_id))
if submissions:
# If I understand docs correctly, most recent submission should
# be first
return submissions[0]
def get_score(self, submission_id=None):
"""
Return student's current score.
"""
score = submissions_api.get_score(
self.student_submission_id(submission_id)
)
if score:
return score['points_earned']
@reify
def score(self):
"""
Return score from submissions.
"""
return self.get_score()
def student_view(self, context=None):
# pylint: disable=no-member
"""
The primary view of the StaffGradedAssignmentXBlock, shown to students
when viewing courses.
"""
context = {
"student_state": json.dumps(self.student_state()),
"id": self.location.name.replace('.', '_'),
"max_file_size": getattr(
settings, "STUDENT_FILEUPLOAD_MAX_SIZE",
self.STUDENT_FILEUPLOAD_MAX_SIZE
)
}
if self.show_staff_grading_interface():
context['is_course_staff'] = True
self.update_staff_debug_context(context)
fragment = Fragment()
fragment.add_content(
render_template(
'templates/staff_graded_assignment/show.html',
context
)
)
fragment.add_css(_resource("static/css/edx_sga.css"))
fragment.add_javascript(_resource("static/js/src/edx_sga.js"))
fragment.add_javascript(_resource("static/js/src/jquery.tablesorter.min.js"))
fragment.initialize_js('StaffGradedAssignmentXBlock')
return fragment
def update_staff_debug_context(self, context):
# pylint: disable=no-member
"""
Add context info for the Staff Debug interface.
"""
published = self.start
context['is_released'] = published and published < _now()
context['location'] = self.location
context['category'] = type(self).__name__
context['fields'] = [
(name, field.read_from(self))
for name, field in self.fields.items()]
def student_state(self):
"""
Returns a JSON serializable representation of student's state for
rendering in client view.
"""
submission = self.get_submission()
if submission:
uploaded = {"filename": submission['answer']['filename']}
else:
uploaded = None
if self.annotated_sha1:
annotated = {"filename": self.annotated_filename}
else:
annotated = None
score = self.score
if score is not None:
graded = {'score': score, 'comment': self.comment}
else:
graded = None
return {
"display_name": self.display_name,
"uploaded": uploaded,
"annotated": annotated,
"graded": graded,
"max_score": self.max_score(),
"upload_allowed": self.upload_allowed(),
}
def staff_grading_data(self):
"""
Return student assignment information for display on the
grading screen.
"""
def get_student_data():
# pylint: disable=no-member
"""
Returns a dict of student assignment information along with
annotated file name, student id and module id, this
information will be used on grading screen
"""
# Submissions doesn't have API for this, just use model directly.
students = SubmissionsStudent.objects.filter(
course_id=self.course_id,
item_id=self.block_id)
for student in students:
submission = self.get_submission(student.student_id)
if not submission:
continue
user = user_by_anonymous_id(student.student_id)
if not user:
continue
module, created = StudentModule.objects.get_or_create(
course_id=self.course_id,
module_state_key=self.location,
student=user,
defaults={
'state': '{}',
'module_type': self.category,
})
if created:
log.info(
"Init for course:%s module:%s student:%s ",
module.course_id,
module.module_state_key,
module.student.username
)
state = json.loads(module.state)
score = self.get_score(student.student_id)
approved = score is not None
if score is None:
score = state.get('staff_score')
needs_approval = score is not None
else:
needs_approval = False
instructor = self.is_instructor()
yield {
'module_id': module.id,
'student_id': student.student_id,
'submission_id': submission['uuid'],
'username': module.student.username,
'fullname': module.student.profile.name,
'filename': submission['answer']["filename"],
'timestamp': submission['created_at'].strftime(
DateTime.DATETIME_FORMAT
),
'score': score,
'approved': approved,
'needs_approval': instructor and needs_approval,
'may_grade': instructor or not approved,
'annotated': state.get("annotated_filename"),
'comment': state.get("comment", ''),
}
return {
'assignments': list(get_student_data()),
'max_score': self.max_score(),
'display_name': self.display_name
}
def studio_view(self, context=None):
"""
Return fragment for editing block in studio.
"""
try:
cls = type(self)
def none_to_empty(data):
"""
Return empty string if data is None else return data.
"""
return data if data is not None else ''
edit_fields = (
(field, none_to_empty(getattr(self, field.name)), validator)
for field, validator in (
(cls.display_name, 'string'),
(cls.points, 'number'),
(cls.weight, 'number'))
)
context = {
'fields': edit_fields
}
fragment = Fragment()
fragment.add_content(
render_template(
'templates/staff_graded_assignment/edit.html',
context
)
)
fragment.add_javascript(_resource("static/js/src/studio.js"))
fragment.initialize_js('StaffGradedAssignmentXBlock')
return fragment
except: # pragma: NO COVER
log.error("Don't swallow my exceptions", exc_info=True)
raise
@XBlock.json_handler
def save_sga(self, data, suffix=''):
# pylint: disable=unused-argument
"""
Persist block data when updating settings in studio.
"""
self.display_name = data.get('display_name', self.display_name)
# Validate points before saving
points = data.get('points', self.points)
# Check that we are an int
try:
points = int(points)
except ValueError:
raise JsonHandlerError(400, 'Points must be an integer')
# Check that we are positive
if points < 0:
raise JsonHandlerError(400, 'Points must be a positive integer')
self.points = points
# Validate weight before saving
weight = data.get('weight', self.weight)
# Check that weight is a float.
if weight:
try:
weight = float(weight)
except ValueError:
raise JsonHandlerError(400, 'Weight must be a decimal number')
# Check that we are positive
if weight < 0:
raise JsonHandlerError(
400, 'Weight must be a positive decimal number'
)
self.weight = weight
@XBlock.handler
def upload_assignment(self, request, suffix=''):
# pylint: disable=unused-argument
"""
Save a students submission file.
"""
require(self.upload_allowed())
upload = request.params['assignment']
sha1 = _get_sha1(upload.file)
answer = {
"sha1": sha1,
"filename": upload.file.name,
"mimetype": mimetypes.guess_type(upload.file.name)[0],
}
student_id = self.student_submission_id()
submissions_api.create_submission(student_id, answer)
path = self._file_storage_path(sha1, upload.file.name)
if not default_storage.exists(path):
default_storage.save(path, File(upload.file))
return Response(json_body=self.student_state())
@XBlock.handler
def staff_upload_annotated(self, request, suffix=''):
# pylint: disable=unused-argument
"""
Save annotated assignment from staff.
"""
require(self.is_course_staff())
upload = request.params['annotated']
module = StudentModule.objects.get(pk=request.params['module_id'])
state = json.loads(module.state)
state['annotated_sha1'] = sha1 = _get_sha1(upload.file)
state['annotated_filename'] = filename = upload.file.name
state['annotated_mimetype'] = mimetypes.guess_type(upload.file.name)[0]
state['annotated_timestamp'] = _now().strftime(
DateTime.DATETIME_FORMAT
)
path = self._file_storage_path(sha1, filename)
if not default_storage.exists(path):
default_storage.save(path, File(upload.file))
module.state = json.dumps(state)
module.save()
log.info(
"staff_upload_annotated for course:%s module:%s student:%s ",
module.course_id,
module.module_state_key,
module.student.username
)
return Response(json_body=self.staff_grading_data())
@XBlock.handler
def download_assignment(self, request, suffix=''):
# pylint: disable=unused-argument
"""
Fetch student assignment from storage and return it.
"""
answer = self.get_submission()['answer']
path = self._file_storage_path(answer['sha1'], answer['filename'])
return self.download(path, answer['mimetype'], answer['filename'])
@XBlock.handler
def download_annotated(self, request, suffix=''):
# pylint: disable=unused-argument
"""
Fetch assignment with staff annotations from storage and return it.
"""
path = self._file_storage_path(
self.annotated_sha1,
self.annotated_filename,
)
return self.download(
path,
self.annotated_mimetype,
self.annotated_filename
)
@XBlock.handler
def staff_download(self, request, suffix=''):
# pylint: disable=unused-argument
"""
Return an assignment file requested by staff.
"""
require(self.is_course_staff())
submission = self.get_submission(request.params['student_id'])
answer = submission['answer']
path = self._file_storage_path(answer['sha1'], answer['filename'])
return self.download(
path,
answer['mimetype'],
answer['filename'],
require_staff=True
)
@XBlock.handler
def staff_download_annotated(self, request, suffix=''):
# pylint: disable=unused-argument
"""
Return annotated assignment file requested by staff.
"""
require(self.is_course_staff())
module = StudentModule.objects.get(pk=request.params['module_id'])
state = json.loads(module.state)
path = self._file_storage_path(
state['annotated_sha1'],
state['annotated_filename']
)
return self.download(
path,
state['annotated_mimetype'],
state['annotated_filename'],
require_staff=True
)
def download(self, path, mime_type, filename, require_staff=False):
"""
Return a file from storage and return in a Response.
"""
try:
file_descriptor = default_storage.open(path)
app_iter = iter(partial(file_descriptor.read, BLOCK_SIZE), '')
return Response(
app_iter=app_iter,
content_type=mime_type,
content_disposition="attachment; filename=" + filename.encode('utf-8'))
except IOError:
if require_staff:
return Response(
"Sorry, assignment {} cannot be found at"
" {}. Please contact {}".format(
filename.encode('utf-8'), path, settings.TECH_SUPPORT_EMAIL
),
status_code=404
)
return Response(
"Sorry, the file you uploaded, {}, cannot be"
" found. Please try uploading it again or contact"
" course staff".format(filename.encode('utf-8')),
status_code=404
)
@XBlock.handler
def get_staff_grading_data(self, request, suffix=''):
# pylint: disable=unused-argument
"""
Return the html for the staff grading view
"""
require(self.is_course_staff())
return Response(json_body=self.staff_grading_data())
@XBlock.handler
def enter_grade(self, request, suffix=''):
# pylint: disable=unused-argument
"""
Persist a score for a student given by staff.
"""
require(self.is_course_staff())
module = StudentModule.objects.get(pk=request.params['module_id'])
state = json.loads(module.state)
score = int(request.params['grade'])
if self.is_instructor():
uuid = request.params['submission_id']
submissions_api.set_score(uuid, score, self.max_score())
else:
state['staff_score'] = score
state['comment'] = request.params.get('comment', '')
module.state = json.dumps(state)
module.save()
log.info(
"enter_grade for course:%s module:%s student:%s",
module.course_id,
module.module_state_key,
module.student.username
)
return Response(json_body=self.staff_grading_data())
@XBlock.handler
def remove_grade(self, request, suffix=''):
# pylint: disable=unused-argument
"""
Reset a students score request by staff.
"""
require(self.is_course_staff())
student_id = request.params['student_id']
submissions_api.reset_score(student_id, self.course_id, self.block_id)
module = StudentModule.objects.get(pk=request.params['module_id'])
state = json.loads(module.state)
state['staff_score'] = None
state['comment'] = ''
state['annotated_sha1'] = None
state['annotated_filename'] = None
state['annotated_mimetype'] = None
state['annotated_timestamp'] = None
module.state = json.dumps(state)
module.save()
log.info(
"remove_grade for course:%s module:%s student:%s",
module.course_id,
module.module_state_key,
module.student.username
)
return Response(json_body=self.staff_grading_data())
def is_course_staff(self):
# pylint: disable=no-member
"""
Check if user is course staff.
"""
return getattr(self.xmodule_runtime, 'user_is_staff', False)
def is_instructor(self):
# pylint: disable=no-member
"""
Check if user role is instructor.
"""
return self.xmodule_runtime.get_user_role() == 'instructor'
def show_staff_grading_interface(self):
"""
Return if current user is staff and not in studio.
"""
in_studio_preview = self.scope_ids.user_id is None
return self.is_course_staff() and not in_studio_preview
def past_due(self):
"""
Return whether due date has passed.
"""
due = get_extended_due_date(self)
if due is not None:
return _now() > due
return False
def upload_allowed(self):
"""
Return whether student is allowed to submit an assignment.
"""
return not self.past_due() and self.score is None
def _file_storage_path(self, sha1, filename):
# pylint: disable=no-member
"""
Get file path of storage.
"""
path = (
'{loc.org}/{loc.course}/{loc.block_type}/{loc.block_id}'
'/{sha1}{ext}'.format(
loc=self.location,
sha1=sha1,
ext=os.path.splitext(filename)[1]
)
)
return path
def _get_sha1(file_descriptor):
"""
Get file hex digest (fingerprint).
"""
sha1 = hashlib.sha1()
for block in iter(partial(file_descriptor.read, BLOCK_SIZE), ''):
sha1.update(block)
file_descriptor.seek(0)
return sha1.hexdigest()
def _resource(path): # pragma: NO COVER
"""
Handy helper for getting resources from our kit.
"""
data = pkg_resources.resource_string(__name__, path)
return data.decode("utf8")
def _now():
"""
Get current date and time.
"""
return datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
def load_resource(resource_path): # pragma: NO COVER
"""
Gets the content of a resource
"""
resource_content = pkg_resources.resource_string(__name__, resource_path)
return unicode(resource_content)
def render_template(template_path, context=None): # pragma: NO COVER
"""
Evaluate a template by resource path, applying the provided context.
"""
if context is None:
context = {}
template_str = load_resource(template_path)
template = Template(template_str)
return template.render(Context(context))
def require(assertion):
"""
Raises PermissionDenied if assertion is not true.
"""
if not assertion:
raise PermissionDenied
|
RPI-OPENEDX/edx-sga
|
edx_sga/sga.py
|
Python
|
agpl-3.0
| 23,821
|
import os
from setuptools import setup, find_packages
with open(
os.path.join(os.path.dirname(__file__), 'README.md'),
encoding='utf-8'
) as fh:
long_description = fh.read()
setup(name='damn-simple-jsonrpc-server',
version='0.4.4.post1',
description='Damn simple, framework-agnostic JSON-RPC server',
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
author='Marcin Nowak',
author_email='marcin.j.nowak@gmail.com',
url='https://github.com/marcinn/json-rpc-server',
keywords='web json rpc python server',
long_description=long_description,
long_description_content_type='text/markdown',
packages=find_packages('.'),
include_package_data=True,
zip_safe=False,
)
|
marcinn/json-rpc-server
|
setup.py
|
Python
|
bsd-2-clause
| 1,124
|
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
import stevedore
from neutron.api.v2 import attributes
from neutron.common import exceptions as exc
from neutron.extensions import multiprovidernet as mpnet
from neutron.extensions import portbindings
from neutron.extensions import providernet as provider
from neutron.i18n import _LE, _LI
from neutron.openstack.common import log
from neutron.plugins.ml2.common import exceptions as ml2_exc
from neutron.plugins.ml2 import db
from neutron.plugins.ml2 import driver_api as api
LOG = log.getLogger(__name__)
class TypeManager(stevedore.named.NamedExtensionManager):
"""Manage network segment types using drivers."""
def __init__(self):
# Mapping from type name to DriverManager
self.drivers = {}
LOG.info(_LI("Configured type driver names: %s"),
cfg.CONF.ml2.type_drivers)
super(TypeManager, self).__init__('neutron.ml2.type_drivers',
cfg.CONF.ml2.type_drivers,
invoke_on_load=True)
LOG.info(_LI("Loaded type driver names: %s"), self.names())
self._register_types()
self._check_tenant_network_types(cfg.CONF.ml2.tenant_network_types)
def _register_types(self):
for ext in self:
network_type = ext.obj.get_type()
if network_type in self.drivers:
LOG.error(_LE("Type driver '%(new_driver)s' ignored because"
" type driver '%(old_driver)s' is already"
" registered for type '%(type)s'"),
{'new_driver': ext.name,
'old_driver': self.drivers[network_type].name,
'type': network_type})
else:
self.drivers[network_type] = ext
LOG.info(_LI("Registered types: %s"), self.drivers.keys())
def _check_tenant_network_types(self, types):
self.tenant_network_types = []
for network_type in types:
if network_type in self.drivers:
self.tenant_network_types.append(network_type)
else:
LOG.error(_LE("No type driver for tenant network_type: %s. "
"Service terminated!"), network_type)
raise SystemExit(1)
LOG.info(_LI("Tenant network_types: %s"), self.tenant_network_types)
def _process_provider_segment(self, segment):
network_type = self._get_attribute(segment, provider.NETWORK_TYPE)
physical_network = self._get_attribute(segment,
provider.PHYSICAL_NETWORK)
segmentation_id = self._get_attribute(segment,
provider.SEGMENTATION_ID)
if attributes.is_attr_set(network_type):
segment = {api.NETWORK_TYPE: network_type,
api.PHYSICAL_NETWORK: physical_network,
api.SEGMENTATION_ID: segmentation_id}
self.validate_provider_segment(segment)
return segment
msg = _("network_type required")
raise exc.InvalidInput(error_message=msg)
def _process_provider_create(self, network):
if any(attributes.is_attr_set(network.get(f))
for f in (provider.NETWORK_TYPE, provider.PHYSICAL_NETWORK,
provider.SEGMENTATION_ID)):
# Verify that multiprovider and provider attributes are not set
# at the same time.
if attributes.is_attr_set(network.get(mpnet.SEGMENTS)):
raise mpnet.SegmentsSetInConjunctionWithProviders()
network_type = self._get_attribute(network, provider.NETWORK_TYPE)
physical_network = self._get_attribute(network,
provider.PHYSICAL_NETWORK)
segmentation_id = self._get_attribute(network,
provider.SEGMENTATION_ID)
segments = [{provider.NETWORK_TYPE: network_type,
provider.PHYSICAL_NETWORK: physical_network,
provider.SEGMENTATION_ID: segmentation_id}]
return [self._process_provider_segment(s) for s in segments]
elif attributes.is_attr_set(network.get(mpnet.SEGMENTS)):
segments = [self._process_provider_segment(s)
for s in network[mpnet.SEGMENTS]]
mpnet.check_duplicate_segments(
segments,
self.is_partial_segment)
return segments
def _get_attribute(self, attrs, key):
value = attrs.get(key)
if value is attributes.ATTR_NOT_SPECIFIED:
value = None
return value
def _extend_network_dict_provider(self, context, network):
id = network['id']
segments = db.get_network_segments(context.session, id)
if not segments:
LOG.error(_LE("Network %s has no segments"), id)
network[provider.NETWORK_TYPE] = None
network[provider.PHYSICAL_NETWORK] = None
network[provider.SEGMENTATION_ID] = None
elif len(segments) > 1:
network[mpnet.SEGMENTS] = [
{provider.NETWORK_TYPE: segment[api.NETWORK_TYPE],
provider.PHYSICAL_NETWORK: segment[api.PHYSICAL_NETWORK],
provider.SEGMENTATION_ID: segment[api.SEGMENTATION_ID]}
for segment in segments]
else:
segment = segments[0]
network[provider.NETWORK_TYPE] = segment[api.NETWORK_TYPE]
network[provider.PHYSICAL_NETWORK] = segment[api.PHYSICAL_NETWORK]
network[provider.SEGMENTATION_ID] = segment[api.SEGMENTATION_ID]
def initialize(self):
for network_type, driver in self.drivers.iteritems():
LOG.info(_LI("Initializing driver for type '%s'"), network_type)
driver.obj.initialize()
def create_network_segments(self, context, network, tenant_id):
"""Call type drivers to create network segments."""
segments = self._process_provider_create(network)
session = context.session
with session.begin(subtransactions=True):
network_id = network['id']
if segments:
for segment_index, segment in enumerate(segments):
segment = self.reserve_provider_segment(
session, segment)
db.add_network_segment(session, network_id,
segment, segment_index)
else:
segment = self.allocate_tenant_segment(session)
db.add_network_segment(session, network_id, segment)
def is_partial_segment(self, segment):
network_type = segment[api.NETWORK_TYPE]
driver = self.drivers.get(network_type)
if driver:
return driver.obj.is_partial_segment(segment)
else:
msg = _("network_type value '%s' not supported") % network_type
raise exc.InvalidInput(error_message=msg)
def validate_provider_segment(self, segment):
network_type = segment[api.NETWORK_TYPE]
driver = self.drivers.get(network_type)
if driver:
driver.obj.validate_provider_segment(segment)
else:
msg = _("network_type value '%s' not supported") % network_type
raise exc.InvalidInput(error_message=msg)
def reserve_provider_segment(self, session, segment):
network_type = segment.get(api.NETWORK_TYPE)
driver = self.drivers.get(network_type)
return driver.obj.reserve_provider_segment(session, segment)
def allocate_tenant_segment(self, session):
for network_type in self.tenant_network_types:
driver = self.drivers.get(network_type)
segment = driver.obj.allocate_tenant_segment(session)
if segment:
return segment
raise exc.NoNetworkAvailable()
def release_network_segments(self, session, network_id):
segments = db.get_network_segments(session, network_id,
filter_dynamic=None)
for segment in segments:
network_type = segment.get(api.NETWORK_TYPE)
driver = self.drivers.get(network_type)
if driver:
driver.obj.release_segment(session, segment)
else:
LOG.error(_LE("Failed to release segment '%s' because "
"network type is not supported."), segment)
def allocate_dynamic_segment(self, session, network_id, segment):
"""Allocate a dynamic segment using a partial or full segment dict."""
dynamic_segment = db.get_dynamic_segment(
session, network_id, segment.get(api.PHYSICAL_NETWORK),
segment.get(api.SEGMENTATION_ID))
if dynamic_segment:
return dynamic_segment
driver = self.drivers.get(segment.get(api.NETWORK_TYPE))
dynamic_segment = driver.obj.reserve_provider_segment(session, segment)
db.add_network_segment(session, network_id, dynamic_segment,
is_dynamic=True)
return dynamic_segment
def release_dynamic_segment(self, session, segment_id):
"""Delete a dynamic segment."""
segment = db.get_segment_by_id(session, segment_id)
if segment:
driver = self.drivers.get(segment.get(api.NETWORK_TYPE))
if driver:
driver.obj.release_segment(session, segment)
db.delete_network_segment(session, segment_id)
else:
LOG.error(_LE("Failed to release segment '%s' because "
"network type is not supported."), segment)
else:
LOG.debug("No segment found with id %(segment_id)s", segment_id)
class MechanismManager(stevedore.named.NamedExtensionManager):
"""Manage networking mechanisms using drivers."""
def __init__(self):
# Registered mechanism drivers, keyed by name.
self.mech_drivers = {}
# Ordered list of mechanism drivers, defining
# the order in which the drivers are called.
self.ordered_mech_drivers = []
LOG.info(_LI("Configured mechanism driver names: %s"),
cfg.CONF.ml2.mechanism_drivers)
super(MechanismManager, self).__init__('neutron.ml2.mechanism_drivers',
cfg.CONF.ml2.mechanism_drivers,
invoke_on_load=True,
name_order=True)
LOG.info(_LI("Loaded mechanism driver names: %s"), self.names())
self._register_mechanisms()
def _register_mechanisms(self):
"""Register all mechanism drivers.
This method should only be called once in the MechanismManager
constructor.
"""
for ext in self:
self.mech_drivers[ext.name] = ext
self.ordered_mech_drivers.append(ext)
LOG.info(_LI("Registered mechanism drivers: %s"),
[driver.name for driver in self.ordered_mech_drivers])
def initialize(self):
for driver in self.ordered_mech_drivers:
LOG.info(_LI("Initializing mechanism driver '%s'"), driver.name)
driver.obj.initialize()
def _call_on_drivers(self, method_name, context,
continue_on_failure=False):
"""Helper method for calling a method across all mechanism drivers.
:param method_name: name of the method to call
:param context: context parameter to pass to each method call
:param continue_on_failure: whether or not to continue to call
all mechanism drivers once one has raised an exception
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver call fails.
"""
error = False
for driver in self.ordered_mech_drivers:
try:
getattr(driver.obj, method_name)(context)
except Exception:
LOG.exception(
_LE("Mechanism driver '%(name)s' failed in %(method)s"),
{'name': driver.name, 'method': method_name}
)
error = True
if not continue_on_failure:
break
if error:
raise ml2_exc.MechanismDriverError(
method=method_name
)
def create_network_precommit(self, context):
"""Notify all mechanism drivers during network creation.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver create_network_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("create_network_precommit", context)
def create_network_postcommit(self, context):
"""Notify all mechanism drivers after network creation.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver create_network_postcommit call fails.
Called after the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propagated
to the caller, where the network will be deleted, triggering
any required cleanup. There is no guarantee that all mechanism
drivers are called in this case.
"""
self._call_on_drivers("create_network_postcommit", context)
def update_network_precommit(self, context):
"""Notify all mechanism drivers during network update.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver update_network_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("update_network_precommit", context)
def update_network_postcommit(self, context):
"""Notify all mechanism drivers after network update.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver update_network_postcommit call fails.
Called after the database transaction. If any mechanism driver
raises an error, then the error is logged but we continue to
call every other mechanism driver. A MechanismDriverError is
then reraised at the end to notify the caller of a failure.
"""
self._call_on_drivers("update_network_postcommit", context,
continue_on_failure=True)
def delete_network_precommit(self, context):
"""Notify all mechanism drivers during network deletion.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver delete_network_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("delete_network_precommit", context)
def delete_network_postcommit(self, context):
"""Notify all mechanism drivers after network deletion.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver delete_network_postcommit call fails.
Called after the database transaction. If any mechanism driver
raises an error, then the error is logged but we continue to
call every other mechanism driver. A MechanismDriverError is
then reraised at the end to notify the caller of a failure. In
general we expect the caller to ignore the error, as the
network resource has already been deleted from the database
and it doesn't make sense to undo the action by recreating the
network.
"""
self._call_on_drivers("delete_network_postcommit", context,
continue_on_failure=True)
def create_subnet_precommit(self, context):
"""Notify all mechanism drivers during subnet creation.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver create_subnet_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("create_subnet_precommit", context)
def create_subnet_postcommit(self, context):
"""Notify all mechanism drivers after subnet creation.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver create_subnet_postcommit call fails.
Called after the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propagated
to the caller, where the subnet will be deleted, triggering
any required cleanup. There is no guarantee that all mechanism
drivers are called in this case.
"""
self._call_on_drivers("create_subnet_postcommit", context)
def update_subnet_precommit(self, context):
"""Notify all mechanism drivers during subnet update.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver update_subnet_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("update_subnet_precommit", context)
def update_subnet_postcommit(self, context):
"""Notify all mechanism drivers after subnet update.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver update_subnet_postcommit call fails.
Called after the database transaction. If any mechanism driver
raises an error, then the error is logged but we continue to
call every other mechanism driver. A MechanismDriverError is
then reraised at the end to notify the caller of a failure.
"""
self._call_on_drivers("update_subnet_postcommit", context,
continue_on_failure=True)
def delete_subnet_precommit(self, context):
"""Notify all mechanism drivers during subnet deletion.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver delete_subnet_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("delete_subnet_precommit", context)
def delete_subnet_postcommit(self, context):
"""Notify all mechanism drivers after subnet deletion.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver delete_subnet_postcommit call fails.
Called after the database transaction. If any mechanism driver
raises an error, then the error is logged but we continue to
call every other mechanism driver. A MechanismDriverError is
then reraised at the end to notify the caller of a failure. In
general we expect the caller to ignore the error, as the
subnet resource has already been deleted from the database
and it doesn't make sense to undo the action by recreating the
subnet.
"""
self._call_on_drivers("delete_subnet_postcommit", context,
continue_on_failure=True)
def create_port_precommit(self, context):
"""Notify all mechanism drivers during port creation.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver create_port_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("create_port_precommit", context)
def create_port_postcommit(self, context):
"""Notify all mechanism drivers of port creation.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver create_port_postcommit call fails.
Called after the database transaction. Errors raised by
mechanism drivers are left to propagate to the caller, where
the port will be deleted, triggering any required
cleanup. There is no guarantee that all mechanism drivers are
called in this case.
"""
self._call_on_drivers("create_port_postcommit", context)
def update_port_precommit(self, context):
"""Notify all mechanism drivers during port update.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver update_port_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("update_port_precommit", context)
def update_port_postcommit(self, context):
"""Notify all mechanism drivers after port update.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver update_port_postcommit call fails.
Called after the database transaction. If any mechanism driver
raises an error, then the error is logged but we continue to
call every other mechanism driver. A MechanismDriverError is
then reraised at the end to notify the caller of a failure.
"""
self._call_on_drivers("update_port_postcommit", context,
continue_on_failure=True)
def delete_port_precommit(self, context):
"""Notify all mechanism drivers during port deletion.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver delete_port_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("delete_port_precommit", context)
def delete_port_postcommit(self, context):
"""Notify all mechanism drivers after port deletion.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver delete_port_postcommit call fails.
Called after the database transaction. If any mechanism driver
raises an error, then the error is logged but we continue to
call every other mechanism driver. A MechanismDriverError is
then reraised at the end to notify the caller of a failure. In
general we expect the caller to ignore the error, as the
port resource has already been deleted from the database
and it doesn't make sense to undo the action by recreating the
port.
"""
self._call_on_drivers("delete_port_postcommit", context,
continue_on_failure=True)
def bind_port(self, context):
"""Attempt to bind a port using registered mechanism drivers.
:param context: PortContext instance describing the port
Called outside any transaction to attempt to establish a port
binding.
"""
binding = context._binding
LOG.debug("Attempting to bind port %(port)s on host %(host)s "
"for vnic_type %(vnic_type)s with profile %(profile)s",
{'port': context._port['id'],
'host': binding.host,
'vnic_type': binding.vnic_type,
'profile': binding.profile})
for driver in self.ordered_mech_drivers:
try:
driver.obj.bind_port(context)
if binding.segment:
binding.driver = driver.name
LOG.debug("Bound port: %(port)s, host: %(host)s, "
"vnic_type: %(vnic_type)s, "
"profile: %(profile)s, "
"driver: %(driver)s, vif_type: %(vif_type)s, "
"vif_details: %(vif_details)s, "
"segment: %(segment)s",
{'port': context._port['id'],
'host': binding.host,
'vnic_type': binding.vnic_type,
'profile': binding.profile,
'driver': binding.driver,
'vif_type': binding.vif_type,
'vif_details': binding.vif_details,
'segment': binding.segment})
return
except Exception:
LOG.exception(_LE("Mechanism driver %s failed in "
"bind_port"),
driver.name)
binding.vif_type = portbindings.VIF_TYPE_BINDING_FAILED
LOG.error(_LE("Failed to bind port %(port)s on host %(host)s"),
{'port': context._port['id'],
'host': binding.host})
class ExtensionManager(stevedore.named.NamedExtensionManager):
"""Manage extension drivers using drivers."""
def __init__(self):
# Ordered list of extension drivers, defining
# the order in which the drivers are called.
self.ordered_ext_drivers = []
LOG.info(_LI("Configured extension driver names: %s"),
cfg.CONF.ml2.extension_drivers)
super(ExtensionManager, self).__init__('neutron.ml2.extension_drivers',
cfg.CONF.ml2.extension_drivers,
invoke_on_load=True,
name_order=True)
LOG.info(_LI("Loaded extension driver names: %s"), self.names())
self._register_drivers()
def _register_drivers(self):
"""Register all extension drivers.
This method should only be called once in the ExtensionManager
constructor.
"""
for ext in self:
self.ordered_ext_drivers.append(ext)
LOG.info(_LI("Registered extension drivers: %s"),
[driver.name for driver in self.ordered_ext_drivers])
def initialize(self):
# Initialize each driver in the list.
for driver in self.ordered_ext_drivers:
LOG.info(_LI("Initializing extension driver '%s'"), driver.name)
driver.obj.initialize()
def extension_aliases(self):
exts = []
for driver in self.ordered_ext_drivers:
alias = driver.obj.extension_alias
exts.append(alias)
LOG.info(_LI("Got %(alias)s extension from driver '%(drv)s'"),
{'alias': alias, 'drv': driver.name})
return exts
def _call_on_ext_drivers(self, method_name, session, data, result):
"""Helper method for calling a method across all extension drivers."""
for driver in self.ordered_ext_drivers:
try:
getattr(driver.obj, method_name)(session, data, result)
except Exception:
LOG.exception(
_LE("Extension driver '%(name)s' failed in %(method)s"),
{'name': driver.name, 'method': method_name}
)
def process_create_network(self, session, data, result):
"""Notify all extension drivers during network creation."""
self._call_on_ext_drivers("process_create_network", session, data,
result)
def process_update_network(self, session, data, result):
"""Notify all extension drivers during network update."""
self._call_on_ext_drivers("process_update_network", session, data,
result)
def process_create_subnet(self, session, data, result):
"""Notify all extension drivers during subnet creation."""
self._call_on_ext_drivers("process_create_subnet", session, data,
result)
def process_update_subnet(self, session, data, result):
"""Notify all extension drivers during subnet update."""
self._call_on_ext_drivers("process_update_subnet", session, data,
result)
def process_create_port(self, session, data, result):
"""Notify all extension drivers during port creation."""
self._call_on_ext_drivers("process_create_port", session, data, result)
def process_update_port(self, session, data, result):
"""Notify all extension drivers during port update."""
self._call_on_ext_drivers("process_update_port", session, data, result)
def extend_network_dict(self, session, result):
"""Notify all extension drivers to extend network dictionary."""
for driver in self.ordered_ext_drivers:
driver.obj.extend_network_dict(session, result)
LOG.info(_LI("Extended network dict for driver '%(drv)s'"),
{'drv': driver.name})
def extend_subnet_dict(self, session, result):
"""Notify all extension drivers to extend subnet dictionary."""
for driver in self.ordered_ext_drivers:
driver.obj.extend_subnet_dict(session, result)
LOG.info(_LI("Extended subnet dict for driver '%(drv)s'"),
{'drv': driver.name})
def extend_port_dict(self, session, result):
"""Notify all extension drivers to extend port dictionary."""
for driver in self.ordered_ext_drivers:
driver.obj.extend_port_dict(session, result)
LOG.info(_LI("Extended port dict for driver '%(drv)s'"),
{'drv': driver.name})
|
blueboxgroup/neutron
|
neutron/plugins/ml2/managers.py
|
Python
|
apache-2.0
| 31,930
|
# Copyright 2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests for the `BugContextMenu`."""
__metaclass__ = type
from zope.component import getUtility
from lp.bugs.browser.bug import BugContextMenu
from lp.bugs.enums import BugNotificationLevel
from lp.services.features import get_relevant_feature_controller
from lp.services.webapp.interfaces import IOpenLaunchBag
from lp.services.webapp.servers import LaunchpadTestRequest
from lp.testing import (
feature_flags,
person_logged_in,
TestCaseWithFactory,
)
from lp.testing.layers import DatabaseFunctionalLayer
from lp.testing.views import create_initialized_view
class TestBugContextMenu(TestCaseWithFactory):
layer = DatabaseFunctionalLayer
def setUp(self):
super(TestBugContextMenu, self).setUp()
self.bug = self.factory.makeBug()
# We need to put the Bug and default BugTask into the LaunchBag
# because BugContextMenu relies on the LaunchBag to populate its
# context property
launchbag = getUtility(IOpenLaunchBag)
launchbag.add(self.bug)
launchbag.add(self.bug.default_bugtask)
self.context_menu = BugContextMenu(self.bug)
def test_text_for_muted_subscriptions(self):
# If a user has a mute on a bug it's recorded internally as a
# type of subscription. However, the subscription text of the
# BugContextMenu will still read 'Subscribe'.
person = self.factory.makePerson()
with feature_flags():
with person_logged_in(person):
self.bug.mute(person, person)
link = self.context_menu.subscription()
self.assertEqual('Subscribe', link.text)
def test_mute_subscription_link(self):
# The mute_subscription() method of BugContextMenu will return a
# Link whose text will alter depending on whether or not they
# have a mute on the bug.
person = self.factory.makePerson()
with feature_flags():
with person_logged_in(person):
# If the user hasn't muted the bug, the link text will
# reflect this.
link = self.context_menu.mute_subscription()
self.assertEqual("Mute bug mail", link.text)
# Once the user has muted the bug, the link text will
# change.
self.bug.mute(person, person)
link = self.context_menu.mute_subscription()
self.assertEqual("Unmute bug mail", link.text)
def test_mute_help_available(self):
# There is a help link available next to the mute/unmute button.
person = self.factory.makePerson()
with feature_flags():
with person_logged_in(person):
self.bug.subscribe(
person, person, level=BugNotificationLevel.METADATA)
self.bug.mute(person, person)
request = LaunchpadTestRequest()
request.features = get_relevant_feature_controller()
view = create_initialized_view(
self.bug, name="+portlet-subscription", request=request)
html = view.render()
self.assertTrue('class="sprite maybe action-icon mute-help"' in html)
|
abramhindle/UnnaturalCodeFork
|
python/testdata/launchpad/lib/lp/bugs/browser/tests/test_bug_context_menu.py
|
Python
|
agpl-3.0
| 3,360
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import reversion
from django.core.exceptions import ImproperlyConfigured
from django.core.paginator import InvalidPage, Paginator
from django.db.models.query import QuerySet
from django.http import Http404
from django.utils import six
from django.utils.translation import ugettext as _
from django.views.generic import DetailView, UpdateView
from reversion.models import Version
__all__ = (
'MultipleVersionObjectMixin',
'DetailVersionListView',
'UpdateVersionListView'
)
class MultipleVersionObjectMixin(object):
"""
A mixin for views manipulating multiple django-reversion Versions of object.
"""
version_allow_empty = True
version_queryset = None
version_model = Version
version_paginate_by = None
version_paginate_orphans = 0
version_context_object_name = None
version_paginator_class = Paginator
version_page_kwarg = 'versionpage'
version_ordering = '-revision__date_created'
version_object_list = None
def get_version_queryset(self):
"""
Return the list of version items for this view.
The return value must be an iterable and may be an instance of
`QuerySet` in which case `QuerySet` specific behavior will be enabled.
"""
if self.version_queryset is not None:
queryset = self.version_queryset
if isinstance(queryset, QuerySet):
queryset = queryset.all()
elif self.version_model is not None:
queryset = reversion.get_for_object(self.get_object())
else:
raise ImproperlyConfigured(
"%(cls)s is missing a Version QuerySet. Define "
"%(cls)s.version_model, %(cls)s.version_queryset, or override "
"%(cls)s.get_version_queryset()." % {
'cls': self.__class__.__name__
}
)
ordering = self.get_version_ordering()
if ordering:
if isinstance(ordering, six.string_types):
ordering = (ordering,)
queryset = queryset.order_by(*ordering)
return queryset
def get_version_ordering(self):
"""
Return the field or fields to use for ordering the version queryset.
"""
return self.version_ordering
def paginate_version_queryset(self, queryset, page_size):
"""
Paginate the version queryset, if needed.
"""
paginator = self.get_version_paginator(
queryset, page_size, orphans=self.get_version_paginate_orphans(),
allow_empty_first_page=self.get_version_allow_empty())
page_kwarg = self.version_page_kwarg
page = self.kwargs.get(page_kwarg) or self.request.GET.get(page_kwarg) or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
try:
page = paginator.page(page_number)
return (paginator, page, page.object_list, page.has_other_pages())
except InvalidPage as e:
raise Http404(_('Invalid page (%(page_number)s): %(message)s') % {
'page_number': page_number,
'message': str(e)
})
def get_version_paginate_by(self, queryset):
"""
Get the number of version items to paginate by, or ``None`` for no pagination.
"""
return self.version_paginate_by
def get_version_paginator(self, queryset, per_page, orphans=0,
allow_empty_first_page=True, **kwargs):
"""
Return an instance of the version paginator for this view.
"""
return self.version_paginator_class(
queryset, per_page, orphans=orphans,
allow_empty_first_page=allow_empty_first_page, **kwargs)
def get_version_paginate_orphans(self):
"""
Returns the maximum number of orphans extend the last page by when
paginating.
"""
return self.version_paginate_orphans
def get_version_allow_empty(self):
"""
Returns ``True`` if the view should display empty version lists, and ``False``
if a 404 should be raised instead.
"""
return self.version_allow_empty
def get_version_context_object_name(self):
"""
Get the name of the version item to be used in the context.
"""
if self.version_context_object_name:
return self.version_context_object_name
elif hasattr(self, 'model'):
return '%s_versions_list' % self.model._meta.model_name
else:
return None
def get_context_data(self, **kwargs):
"""
Get the context for this view.
"""
version_queryset = kwargs.pop('version_object_list', self.version_object_list)
version_page_size = self.get_version_paginate_by(version_queryset)
version_context_object_name = self.get_version_context_object_name()
if version_page_size:
version_paginator, version_page, version_queryset, version_is_paginated = self.paginate_version_queryset(
version_queryset, version_page_size)
context = {
'version_paginator': version_paginator,
'version_page_obj': version_page,
'version_is_paginated': version_is_paginated,
'object_versions_list': version_queryset
}
else:
context = {
'version_paginator': None,
'version_page_obj': None,
'version_is_paginated': False,
'object_versions_list': version_queryset
}
if version_context_object_name is not None:
context[version_context_object_name] = version_queryset
context.update(kwargs)
return super(MultipleVersionObjectMixin, self).get_context_data(**context)
def get(self, request, *args, **kwargs):
self.version_object_list = self.get_version_queryset()
version_allow_empty = self.get_version_allow_empty()
if not version_allow_empty:
# When pagination is enabled and object_versions_list is a queryset,
# it's better to do a cheap query than to load the unpaginated
# queryset in memory.
if (self.get_version_paginate_by(self.version_object_list) is not None and hasattr(self.version_object_list, 'exists')): # noqa
is_empty = not self.version_object_list.exists()
else:
is_empty = len(self.version_object_list) == 0
if is_empty:
raise Http404(
_("Empty Version list and '%(class_name)s.version_allow_empty' is False.")
% {'class_name': self.__class__.__name__})
return super(MultipleVersionObjectMixin, self).get(self, request, *args, **kwargs)
class DetailVersionListView(MultipleVersionObjectMixin, DetailView):
"""
Render some list of django-reversion Versions of object, set by `self.model` or
`self.queryset`. `self.queryset` can actually be any iterable of items, not just a queryset.
"""
template_name_suffix = '_version_list'
class UpdateVersionListView(MultipleVersionObjectMixin, UpdateView):
"""
Render some list of versions of object, set by `self.model` or `self.queryset`.
`self.queryset` can actually be any iterable of items, not just a queryset.
"""
template_name_suffix = '_form_version_list'
|
luzfcb/django-reversion-extras
|
reversion_extras/views.py
|
Python
|
bsd-3-clause
| 7,718
|
from django.db import models
from django.test import TestCase
from django.utils import six
from .. import utils
from ..views import IndexView
class UtilsTestModel(models.Model):
field1 = models.CharField(max_length=23)
field2 = models.CharField('second field', max_length=42)
def simple_method(self):
return 42
def was_published_recently(self):
return True
was_published_recently.boolean = True
was_published_recently.short_description = 'Published recently?'
class Meta:
verbose_name = "Utils Test Model"
verbose_name_plural = "Utils Test Models"
class UtilsTest(TestCase):
def setUp(self):
self.instance = UtilsTestModel()
def test_as_model_class(self):
self.assertEquals(
UtilsTestModel._meta,
utils.model_options(UtilsTestModel)
)
UtilsTestModel._meta.verbose_name = "Utils Test Model is singular"
UtilsTestModel._meta.verbose_name_plural = "Utils Test Model are " +\
" plural"
self.assertEquals(
UtilsTestModel._meta,
utils.model_options(UtilsTestModel)
)
UtilsTestModel._meta.verbose_name = "Utils Test Model"
UtilsTestModel._meta.verbose_name_plural = "Utils Test Models"
def test_as_model_instance(self):
self.assertEquals(
self.instance._meta,
utils.model_options(self.instance)
)
self.instance._meta.verbose_name = "Utils Test Model is singular"
self.instance._meta.verbose_name_plural = "Utils Test Model are " +\
" plural"
self.assertEquals(
self.instance._meta,
utils.model_options(self.instance)
)
self.instance._meta.verbose_name = "Utils Test Model"
self.instance._meta.verbose_name_plural = "Utils Test Models"
def test_admin2_urlname(self):
self.assertEquals(
"admin2:None_None_index",
utils.admin2_urlname(IndexView, "index")
)
def test_model_app_label_as_model_class(self):
self.assertEquals(
UtilsTestModel._meta.app_label,
utils.model_app_label(UtilsTestModel)
)
def test_model_app_label_as_model_instance(self):
self.assertEquals(
self.instance._meta.app_label,
utils.model_app_label(UtilsTestModel)
)
def test_model_verbose_name_as_model_class(self):
self.assertEquals(
UtilsTestModel._meta.verbose_name,
utils.model_verbose_name(UtilsTestModel)
)
def test_model_verbose_name_as_model_instance(self):
self.assertEquals(
self.instance._meta.verbose_name,
utils.model_verbose_name(self.instance)
)
def test_model_verbose_name_plural_as_model_class(self):
self.assertEquals(
UtilsTestModel._meta.verbose_name_plural,
utils.model_verbose_name_plural(UtilsTestModel)
)
def test_model_verbose_name_plural_as_model_instance(self):
self.assertEquals(
self.instance._meta.verbose_name_plural,
utils.model_verbose_name_plural(self.instance)
)
def test_model_field_verbose_name_autogenerated(self):
self.assertEquals(
'field1',
utils.model_field_verbose_name(self.instance, 'field1')
)
def test_model_field_verbose_name_overridden(self):
self.assertEquals(
'second field',
utils.model_field_verbose_name(self.instance, 'field2')
)
def test_model_method_verbose_name(self):
self.assertEquals(
'Published recently?',
utils.model_method_verbose_name(self.instance, 'was_published_recently')
)
def test_model_method_verbose_name_fallback(self):
self.assertEquals(
'simple_method',
utils.model_method_verbose_name(self.instance, 'simple_method')
)
def test_app_label_as_model_class(self):
self.assertEquals(
UtilsTestModel._meta.app_label,
utils.model_app_label(UtilsTestModel)
)
def test_app_label_as_model_instance(self):
self.assertEquals(
self.instance._meta.app_label,
utils.model_app_label(self.instance)
)
def test_get_attr_callable(self):
class Klass(object):
def hello(self):
return "hello"
self.assertEquals(
utils.get_attr(Klass(), "hello"),
"hello"
)
def test_get_attr_str(self):
class Klass(object):
def __str__(self):
return "str"
def __unicode__(self):
return "unicode"
if six.PY2:
self.assertEquals(
utils.get_attr(Klass(), "__str__"),
"unicode"
)
else:
self.assertEquals(
utils.get_attr(Klass(), "__str__"),
"str"
)
def test_get_attr(self):
class Klass(object):
attr = "value"
self.assertEquals(
utils.get_attr(Klass(), "attr"),
"value"
)
|
hnakamur/django-admin2
|
djadmin2/tests/test_utils.py
|
Python
|
bsd-3-clause
| 5,246
|
# Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: TTbar_Tauola_13TeV_cfi.py --conditions auto:startup -n 1000 --eventcontent FEVTDEBUG --relval 9000,100 -s GEN,SIM --datatier GEN-SIM --no_exec
import FWCore.ParameterSet.Config as cms
process = cms.Process('SIM')
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('SimGeneral.MixingModule.mixNoPU_cfi')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.Geometry.GeometrySimDB_cff')
process.load('Configuration.StandardSequences.MagneticField_38T_cff')
process.load('Configuration.StandardSequences.Generator_cff')
process.load('IOMC.EventVertexGenerators.VtxSmearedRealistic8TeVCollision_cfi')
process.load('GeneratorInterface.Core.genFilterSummary_cff')
process.load('Configuration.StandardSequences.SimIdeal_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1000)
)
# Input source
process.source = cms.Source("EmptySource")
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
version = cms.untracked.string('$Revision: 1.19 $'),
annotation = cms.untracked.string('TTbar_Tauola_13TeV_cfi.py nevts:1000'),
name = cms.untracked.string('Applications')
)
# Output definition
process.FEVTDEBUGoutput = cms.OutputModule("PoolOutputModule",
splitLevel = cms.untracked.int32(0),
eventAutoFlushCompressedSize = cms.untracked.int32(5242880),
outputCommands = process.FEVTDEBUGEventContent.outputCommands,
fileName = cms.untracked.string('TTbar_Tauola_13TeV_cfi_py_GEN_SIM.root'),
dataset = cms.untracked.PSet(
filterName = cms.untracked.string(''),
dataTier = cms.untracked.string('GEN-SIM')
),
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('generation_step')
)
)
# Additional output definition
# Other statements
process.genstepfilter.triggerConditions=cms.vstring("generation_step")
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'auto:startup', '')
process.generator = cms.EDFilter("Pythia6GeneratorFilter",
ExternalDecays = cms.PSet(
Tauola = cms.untracked.PSet(
UseTauolaPolarization = cms.bool(True),
InputCards = cms.PSet(
mdtau = cms.int32(0),
pjak2 = cms.int32(0),
pjak1 = cms.int32(0)
)
),
parameterSets = cms.vstring('Tauola')
),
pythiaPylistVerbosity = cms.untracked.int32(0),
filterEfficiency = cms.untracked.double(1.0),
pythiaHepMCVerbosity = cms.untracked.bool(False),
comEnergy = cms.double(13000.0),
maxEventsToPrint = cms.untracked.int32(0),
PythiaParameters = cms.PSet(
pythiaUESettings = cms.vstring('MSTU(21)=1 ! Check on possible errors during program execution',
'MSTJ(22)=2 ! Decay those unstable particles',
'PARJ(71)=10 . ! for which ctau 10 mm',
'MSTP(33)=0 ! no K factors in hard cross sections',
'MSTP(2)=1 ! which order running alphaS',
'MSTP(51)=10042 ! structure function chosen (external PDF CTEQ6L1)',
'MSTP(52)=2 ! work with LHAPDF',
'PARP(82)=1.921 ! pt cutoff for multiparton interactions',
'PARP(89)=1800. ! sqrts for which PARP82 is set',
'PARP(90)=0.227 ! Multiple interactions: rescaling power',
'MSTP(95)=6 ! CR (color reconnection parameters)',
'PARP(77)=1.016 ! CR',
'PARP(78)=0.538 ! CR',
'PARP(80)=0.1 ! Prob. colored parton from BBR',
'PARP(83)=0.356 ! Multiple interactions: matter distribution parameter',
'PARP(84)=0.651 ! Multiple interactions: matter distribution parameter',
'PARP(62)=1.025 ! ISR cutoff',
'MSTP(91)=1 ! Gaussian primordial kT',
'PARP(93)=10.0 ! primordial kT-max',
'MSTP(81)=21 ! multiple parton interactions 1 is Pythia default',
'MSTP(82)=4 ! Defines the multi-parton model'),
processParameters = cms.vstring('MSEL = 0 ! User defined processes',
'MSUB(81) = 1 ! qqbar to QQbar',
'MSUB(82) = 1 ! gg to QQbar',
'MSTP(7) = 6 ! flavour = top',
'PMAS(6,1) = 175. ! top quark mass'),
parameterSets = cms.vstring('pythiaUESettings',
'processParameters')
)
)
# Path and EndPath definitions
process.generation_step = cms.Path(process.pgen)
process.simulation_step = cms.Path(process.psim)
process.genfiltersummary_step = cms.EndPath(process.genFilterSummary)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.FEVTDEBUGoutput_step = cms.EndPath(process.FEVTDEBUGoutput)
# Schedule definition
process.schedule = cms.Schedule(process.generation_step,process.genfiltersummary_step,process.simulation_step,process.endjob_step,process.FEVTDEBUGoutput_step)
# filter all path with the production filter sequence
for path in process.paths:
getattr(process,path)._seq = process.generator * getattr(process,path)._seq
|
rovere/productions
|
TTbar_Tauola_13TeV_cfi_py_GEN_SIM.py
|
Python
|
gpl-3.0
| 5,681
|
from GUIComponent import GUIComponent
from VariableText import VariableText
from os import statvfs
from enigma import eLabel
# TODO: Harddisk.py has similiar functions, but only similiar.
# fix this to use same code
class DiskInfo(VariableText, GUIComponent):
FREE = 0
USED = 1
SIZE = 2
def __init__(self, path, type, update = True):
GUIComponent.__init__(self)
VariableText.__init__(self)
self.type = type
self.path = path
if update:
self.update()
def update(self):
try:
stat = statvfs(self.path)
except OSError:
return -1
if self.type == self.FREE:
try:
percent = '(' + str((100 * stat.f_bavail) // stat.f_blocks) + '%)'
free = stat.f_bfree * stat.f_bsize
if free < 10000000:
free = _("%d Kb") % (free >> 10)
elif free < 10000000000:
free = _("%d Mb") % (free >> 20)
else:
free = _("%d Gb") % (free >> 30)
self.setText(" ".join((free, percent, _("free diskspace"))))
except:
# occurs when f_blocks is 0 or a similar error
self.setText("-?-")
GUI_WIDGET = eLabel
|
bally12345/enigma2
|
lib/python/Components/DiskInfo.py
|
Python
|
gpl-2.0
| 1,054
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from datetime import datetime, date
from dateutil.relativedelta import relativedelta
from flask import session
from MaKaC.webinterface import urlHandlers as UH
from MaKaC.webinterface.pages.base import WPNotDecorated
from MaKaC.webinterface.wcomponents import WTemplated
from indico.modules.rb.models.locations import Location
from indico.modules.rb.util import rb_is_admin
from indico.modules.rb.views import WPRoomBookingBase
from indico.modules.rb.views.calendar import RoomBookingCalendarWidget
from indico.util.i18n import _
from indico.web.flask.util import url_for
class WPRoomBookingMapOfRooms(WPRoomBookingBase):
sidemenu_option = 'map'
def __init__(self, rh, **params):
WPRoomBookingBase.__init__(self, rh)
self._rh = rh
self._params = params
def _getTitle(self):
return '{} - {}'.format(WPRoomBookingBase._getTitle(self), _('Map of rooms'))
def _getBody(self, params):
return WRoomBookingMapOfRooms(**self._params).getHTML(params)
class WRoomBookingMapOfRooms(WTemplated):
def __init__(self, **params):
WTemplated.__init__(self)
self._params = params if params else {}
def getVars(self):
wvars = WTemplated.getVars(self)
wvars['mapOfRoomsWidgetURL'] = UH.UHRoomBookingMapOfRoomsWidget.getURL(None, **self._params)
return wvars
class WPRoomBookingMapOfRoomsWidget(WPNotDecorated):
sidemenu_option = 'map'
def getCSSFiles(self):
return WPNotDecorated.getCSSFiles(self) + ['css/mapofrooms.css']
def getJSFiles(self):
return WPNotDecorated.getJSFiles(self) + self._includeJSPackage('RoomBooking')
def _getTitle(self):
return '{} - {}'.format(WPNotDecorated._getTitle(self), _('Map of rooms'))
def _getBody(self, params):
return WTemplated('RoomBookingMapOfRoomsWidget').getHTML(params)
class WPRoomBookingSearchRooms(WPRoomBookingBase):
sidemenu_option = 'search_rooms'
def _getTitle(self):
return '{} - {}'.format(WPRoomBookingBase._getTitle(self), _('Search for rooms'))
def _getBody(self, params):
params['startDT'] = datetime.combine(date.today(), Location.working_time_start)
params['endDT'] = datetime.combine(date.today(), Location.working_time_end)
params['startT'] = params['startDT'].strftime('%H:%M')
params['endT'] = params['endDT'].strftime('%H:%M')
return WTemplated('RoomBookingSearchRooms').getHTML(params)
class WPRoomBookingSearchRoomsResults(WPRoomBookingBase):
def __init__(self, rh, menu_item, **kwargs):
self.sidemenu_option = menu_item
WPRoomBookingBase.__init__(self, rh, **kwargs)
def _getTitle(self):
return '{} - {}'.format(WPRoomBookingBase._getTitle(self), _('Search results'))
def _getBody(self, params):
return WTemplated('RoomBookingSearchRoomsResults').getHTML(params)
class WPRoomBookingRoomDetails(WPRoomBookingBase):
endpoints = {
'room_book': 'rooms.room_book'
}
def _getTitle(self):
return '{} - {}'.format(WPRoomBookingBase._getTitle(self), _('Room Details'))
def _getBody(self, params):
params['endpoints'] = self.endpoints
calendar = RoomBookingCalendarWidget(params['occurrences'], params['start_dt'], params['end_dt'],
specific_room=params['room'])
params['calendar'] = calendar.render(show_navbar=False, can_navigate=False)
return WRoomBookingRoomDetails(self._rh, standalone=True).getHTML(params)
class WRoomBookingRoomDetails(WTemplated):
DEFAULT_CALENDAR_RANGE = relativedelta(month=3)
def __init__(self, rh, standalone=False):
self._rh = rh
self._standalone = standalone
def getVars(self):
wvars = WTemplated.getVars(self)
wvars['standalone'] = self._standalone
room = wvars['room']
wvars['attrs'] = {attr.attribute.name: attr for attr in room.attributes
if not attr.attribute.is_hidden or rb_is_admin(session.user)}
wvars['owner_name'] = room.owner.full_name
wvars['bookable_hours'] = room.bookable_hours.all()
wvars['nonbookable_periods'] = room.nonbookable_periods.all()
# URLs
wvars['stats_url'] = UH.UHRoomBookingRoomStats.getURL(room)
wvars['delete_room_url'] = url_for('rooms_admin.delete_room', room)
wvars['modify_room_url'] = url_for('rooms_admin.modify_room', room)
if not self._standalone:
wvars['conference'] = self._rh._conf
wvars['show_on_map'] = room.map_url if room.map_url else url_for('rooms.roomBooking-mapOfRooms', room)
return wvars
class WPRoomBookingRoomStats(WPRoomBookingBase):
def _getBody(self, params):
params['period_options'] = [
('pastmonth', _('Past month')),
('thisyear', _('This year')),
('sinceever', _('Since ever'))
]
return WTemplated('RoomBookingRoomStats').getHTML(params)
|
belokop/indico_bare
|
indico/modules/rb/views/user/rooms.py
|
Python
|
gpl-3.0
| 5,749
|
# (c) Copyright IBM Corp. 2021
# (c) Copyright Instana Inc. 2018
from __future__ import absolute_import
from distutils.version import LooseVersion
import opentracing
import opentracing.ext.tags as ext
import wrapt
from ..log import logger
from ..util.traceutils import get_active_tracer
try:
import suds # noqa
if (LooseVersion(suds.version.__version__) <= LooseVersion('0.6')):
class_method = 'SoapClient.send'
else:
class_method = '_SoapClient.send'
@wrapt.patch_function_wrapper('suds.client', class_method)
def send_with_instana(wrapped, instance, args, kwargs):
active_tracer = get_active_tracer()
# If we're not tracing, just return
if active_tracer is None:
return wrapped(*args, **kwargs)
with active_tracer.start_active_span("soap", child_of=active_tracer.active_span) as scope:
try:
scope.span.set_tag('soap.action', instance.method.name)
scope.span.set_tag(ext.HTTP_URL, instance.method.location)
scope.span.set_tag(ext.HTTP_METHOD, 'POST')
active_tracer.inject(scope.span.context, opentracing.Format.HTTP_HEADERS, instance.options.headers)
rv = wrapped(*args, **kwargs)
except Exception as e:
scope.span.log_exception(e)
scope.span.set_tag(ext.HTTP_STATUS_CODE, 500)
raise
else:
scope.span.set_tag(ext.HTTP_STATUS_CODE, 200)
return rv
logger.debug("Instrumenting suds-jurko")
except ImportError:
pass
|
instana/python-sensor
|
instana/instrumentation/sudsjurko.py
|
Python
|
mit
| 1,613
|
from collections import deque
class Solution(object):
magic = 250
height = 0
length = 0
def updateMatrix(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: List[List[int]]
"""
if len(matrix) == 0:
return matrix
self.height, self.length = len(matrix), len(matrix[0])
for i in range(self.height):
for j in range(self.length):
if matrix[i][j] != 0:
matrix[i][j] = self.magic
queue = deque([])
for i in range(self.height):
for j in range(self.length):
if matrix[i][j] == 0:
self.BFS(matrix, queue, i, j, 1)
while len(queue) != 0:
point = queue.popleft()
self.BFS(matrix, queue, point[0], point[1], matrix[point[0]][point[1]] + 1)
return matrix
def BFS(self, matrix, queue, i, j, weight):
if i > 0 and matrix[i - 1][j] == self.magic:
matrix[i - 1][j] = weight
queue.append([i - 1, j])
if j > 0 and matrix[i][j - 1] == self.magic:
matrix[i][j - 1] = weight
queue.append([i, j - 1])
if i + 1 < self.height and matrix[i + 1][j] == self.magic:
matrix[i + 1][j] = weight
queue.append([i + 1, j])
if j + 1 < self.length and matrix[i][j + 1] == self.magic:
matrix[i][j + 1] = weight
queue.append([i, j + 1])
|
liupangzi/codekata
|
leetcode/Algorithms/542.01Matrix/Solution.py
|
Python
|
mit
| 1,479
|
'''
Created on 2015/10/28
@author: michael
'''
__title__ = 'NewsDog'
__version__ = '0.1'
__author__ = 'Michael Findlater'
__license__ = 'GNU V2'
__copyright__ = 'Copyright 2015 Michael Findlater'
from .newsdog import NewsDog
|
michaelfindlater/NewsDog
|
NewsDog/__init__.py
|
Python
|
gpl-2.0
| 227
|
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
from color import Coloring
from command import InteractiveCommand
from git_command import GitCommand
class _ProjectList(Coloring):
def __init__(self, gc):
Coloring.__init__(self, gc, 'interactive')
self.prompt = self.printer('prompt', fg='blue', attr='bold')
self.header = self.printer('header', attr='bold')
self.help = self.printer('help', fg='red', attr='bold')
class Stage(InteractiveCommand):
common = True
helpSummary = "Stage file(s) for commit"
helpUsage = """
%prog -i [<project>...]
"""
helpDescription = """
The '%prog' command stages files to prepare the next commit.
"""
def _Options(self, p):
p.add_option('-i', '--interactive',
dest='interactive', action='store_true',
help='use interactive staging')
def Execute(self, opt, args):
if opt.interactive:
self._Interactive(opt, args)
else:
self.Usage()
def _Interactive(self, opt, args):
all_projects = [p for p in self.GetProjects(args) if p.IsDirty()]
if not all_projects:
print('no projects have uncommitted modifications', file=sys.stderr)
return
out = _ProjectList(self.manifest.manifestProject.config)
while True:
out.header(' %s', 'project')
out.nl()
for i in range(len(all_projects)):
project = all_projects[i]
out.write('%3d: %s', i + 1, project.relpath + '/')
out.nl()
out.nl()
out.write('%3d: (', 0)
out.prompt('q')
out.write('uit)')
out.nl()
out.prompt('project> ')
try:
a = sys.stdin.readline()
except KeyboardInterrupt:
out.nl()
break
if a == '':
out.nl()
break
a = a.strip()
if a.lower() in ('q', 'quit', 'exit'):
break
if not a:
continue
try:
a_index = int(a)
except ValueError:
a_index = None
if a_index is not None:
if a_index == 0:
break
if 0 < a_index and a_index <= len(all_projects):
_AddI(all_projects[a_index - 1])
continue
projects = [p for p in all_projects if a in [p.name, p.relpath]]
if len(projects) == 1:
_AddI(projects[0])
continue
print('Bye.')
def _AddI(project):
p = GitCommand(project, ['add', '--interactive'], bare=False)
p.Wait()
|
couchbasedeps/git-repo
|
subcmds/stage.py
|
Python
|
apache-2.0
| 3,034
|
from __future__ import division, print_function, absolute_import
from decimal import Decimal
from numpy.testing import (TestCase, run_module_suite, assert_equal,
assert_almost_equal, assert_array_equal, assert_array_almost_equal,
assert_raises, assert_allclose, assert_, dec)
import scipy.signal as signal
from scipy.signal import (correlate, convolve, convolve2d, fftconvolve,
hilbert, hilbert2, lfilter, lfilter_zi, filtfilt, butter, tf2zpk,
invres, vectorstrength, signaltools)
from numpy import array, arange
import numpy as np
class _TestConvolve(TestCase):
def test_basic(self):
a = [3,4,5,6,5,4]
b = [1,2,3]
c = convolve(a,b)
assert_array_equal(c,array([3,10,22,28,32,32,23,12]))
def test_complex(self):
x = array([1+1j, 2+1j, 3+1j])
y = array([1+1j, 2+1j])
z = convolve(x, y)
assert_array_equal(z, array([2j, 2+6j, 5+8j, 5+5j]))
def test_zero_rank(self):
a = 1289
b = 4567
c = convolve(a,b)
assert_equal(c,a*b)
def test_single_element(self):
a = array([4967])
b = array([3920])
c = convolve(a,b)
assert_equal(c,a*b)
def test_2d_arrays(self):
a = [[1,2,3],[3,4,5]]
b = [[2,3,4],[4,5,6]]
c = convolve(a,b)
d = array([[2,7,16,17,12],
[10,30,62,58,38],
[12,31,58,49,30]])
assert_array_equal(c,d)
def test_valid_mode(self):
a = [1,2,3,6,5,3]
b = [2,3,4,5,3,4,2,2,1]
c = convolve(a,b,'valid')
assert_array_equal(c,array([70,78,73,65]))
class TestConvolve(_TestConvolve):
def test_valid_mode(self):
# 'valid' mode if b.size > a.size does not make sense with the new
# behavior
a = [1,2,3,6,5,3]
b = [2,3,4,5,3,4,2,2,1]
def _test():
convolve(a,b,'valid')
self.assertRaises(ValueError, _test)
def test_same_mode(self):
a = [1,2,3,3,1,2]
b = [1,4,3,4,5,6,7,4,3,2,1,1,3]
c = convolve(a,b,'same')
d = array([57,61,63,57,45,36])
assert_array_equal(c,d)
class _TestConvolve2d(TestCase):
def test_2d_arrays(self):
a = [[1,2,3],[3,4,5]]
b = [[2,3,4],[4,5,6]]
d = array([[2,7,16,17,12],
[10,30,62,58,38],
[12,31,58,49,30]])
e = convolve2d(a, b)
assert_array_equal(e, d)
def test_valid_mode(self):
e = [[2,3,4,5,6,7,8], [4,5,6,7,8,9,10]]
f = [[1,2,3], [3,4,5]]
g = convolve2d(e, f, 'valid')
h = array([[62,80,98,116,134]])
assert_array_equal(g, h)
def test_valid_mode_complx(self):
e = [[2,3,4,5,6,7,8], [4,5,6,7,8,9,10]]
f = np.array([[1,2,3], [3,4,5]], dtype=np.complex) + 1j
g = convolve2d(e, f, 'valid')
h = array([[62.+24.j, 80.+30.j, 98.+36.j, 116.+42.j, 134.+48.j]])
assert_array_almost_equal(g, h)
def test_fillvalue(self):
a = [[1,2,3],[3,4,5]]
b = [[2,3,4],[4,5,6]]
fillval = 1
c = convolve2d(a,b,'full','fill',fillval)
d = array([[24,26,31,34,32],
[28,40,62,64,52],
[32,46,67,62,48]])
assert_array_equal(c, d)
def test_wrap_boundary(self):
a = [[1,2,3],[3,4,5]]
b = [[2,3,4],[4,5,6]]
c = convolve2d(a,b,'full','wrap')
d = array([[80,80,74,80,80],
[68,68,62,68,68],
[80,80,74,80,80]])
assert_array_equal(c,d)
def test_sym_boundary(self):
a = [[1,2,3],[3,4,5]]
b = [[2,3,4],[4,5,6]]
c = convolve2d(a,b,'full','symm')
d = array([[34,30,44, 62, 66],
[52,48,62, 80, 84],
[82,78,92,110,114]])
assert_array_equal(c,d)
class TestConvolve2d(_TestConvolve2d):
def test_same_mode(self):
e = [[1,2,3],[3,4,5]]
f = [[2,3,4,5,6,7,8],[4,5,6,7,8,9,10]]
g = convolve2d(e,f,'same')
h = array([[22,28,34],
[80,98,116]])
assert_array_equal(g,h)
def test_valid_mode2(self):
# Test when in2.size > in1.size
e = [[1,2,3],[3,4,5]]
f = [[2,3,4,5,6,7,8],[4,5,6,7,8,9,10]]
def _test():
convolve2d(e,f,'valid')
self.assertRaises(ValueError, _test)
def test_consistency_convolve_funcs(self):
# Compare np.convolve, signal.convolve, signal.convolve2d
a = np.arange(5)
b = np.array([3.2, 1.4, 3])
for mode in ['full', 'valid', 'same']:
assert_almost_equal(np.convolve(a, b, mode=mode),
signal.convolve(a, b, mode=mode))
assert_almost_equal(np.squeeze(signal.convolve2d([a], [b],
mode=mode)),
signal.convolve(a, b, mode=mode))
class TestFFTConvolve(TestCase):
def test_real(self):
x = array([1,2,3])
assert_array_almost_equal(signal.fftconvolve(x,x), [1,4,10,12,9.])
def test_complex(self):
x = array([1+1j,2+2j,3+3j])
assert_array_almost_equal(signal.fftconvolve(x,x),
[0+2.0j, 0+8j, 0+20j, 0+24j, 0+18j])
def test_2d_real_same(self):
a = array([[1,2,3],[4,5,6]])
assert_array_almost_equal(signal.fftconvolve(a,a),
array([[1,4,10,12,9],
[8,26,56,54,36],
[16,40,73,60,36]]))
def test_2d_complex_same(self):
a = array([[1+2j,3+4j,5+6j],[2+1j,4+3j,6+5j]])
c = fftconvolve(a,a)
d = array([[-3+4j,-10+20j,-21+56j,-18+76j,-11+60j],
[10j,44j,118j,156j,122j],
[3+4j,10+20j,21+56j,18+76j,11+60j]])
assert_array_almost_equal(c,d)
def test_real_same_mode(self):
a = array([1,2,3])
b = array([3,3,5,6,8,7,9,0,1])
c = fftconvolve(a,b,'same')
d = array([35., 41., 47.])
assert_array_almost_equal(c,d)
def test_real_same_mode2(self):
a = array([3,3,5,6,8,7,9,0,1])
b = array([1,2,3])
c = fftconvolve(a,b,'same')
d = array([9.,20.,25.,35.,41.,47.,39.,28.,2.])
assert_array_almost_equal(c,d)
def test_real_valid_mode(self):
a = array([3,2,1])
b = array([3,3,5,6,8,7,9,0,1])
def _test():
fftconvolve(a,b,'valid')
self.assertRaises(ValueError, _test)
def test_real_valid_mode2(self):
a = array([3,3,5,6,8,7,9,0,1])
b = array([3,2,1])
c = fftconvolve(a,b,'valid')
d = array([24.,31.,41.,43.,49.,25.,12.])
assert_array_almost_equal(c,d)
def test_empty(self):
# Regression test for #1745: crashes with 0-length input.
assert_(fftconvolve([], []).size == 0)
assert_(fftconvolve([5, 6], []).size == 0)
assert_(fftconvolve([], [7]).size == 0)
def test_zero_rank(self):
a = array(4967)
b = array(3920)
c = fftconvolve(a,b)
assert_equal(c,a*b)
def test_single_element(self):
a = array([4967])
b = array([3920])
c = fftconvolve(a,b)
assert_equal(c,a*b)
def test_random_data(self):
np.random.seed(1234)
a = np.random.rand(1233) + 1j*np.random.rand(1233)
b = np.random.rand(1321) + 1j*np.random.rand(1321)
c = fftconvolve(a, b, 'full')
d = np.convolve(a, b, 'full')
assert_(np.allclose(c, d, rtol=1e-10))
@dec.slow
def test_many_sizes(self):
np.random.seed(1234)
def ns():
for j in range(1, 100):
yield j
for j in range(1000, 1500):
yield j
for k in range(50):
yield np.random.randint(1001, 10000)
for n in ns():
msg = 'n=%d' % (n,)
a = np.random.rand(n) + 1j*np.random.rand(n)
b = np.random.rand(n) + 1j*np.random.rand(n)
c = fftconvolve(a, b, 'full')
d = np.convolve(a, b, 'full')
assert_allclose(c, d, atol=1e-10, err_msg=msg)
def test_next_regular(self):
np.random.seed(1234)
def ns():
for j in range(1, 1000):
yield j
yield 2**5 * 3**5 * 4**5 + 1
for n in ns():
m = signaltools._next_regular(n)
msg = "n=%d, m=%d" % (n, m)
assert_(m >= n, msg)
# check regularity
k = m
for d in [2, 3, 5]:
while True:
a, b = divmod(k, d)
if b == 0:
k = a
else:
break
assert_equal(k, 1, err_msg=msg)
def test_next_regular_strict(self):
hams = {
1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 8, 8: 8, 14: 15, 15: 15,
16: 16, 17: 18, 1021: 1024, 1536: 1536, 51200000: 51200000,
510183360: 510183360, 510183360+1: 512000000, 511000000: 512000000,
854296875: 854296875, 854296875+1: 859963392,
196608000000: 196608000000, 196608000000+1: 196830000000,
8789062500000: 8789062500000, 8789062500000+1: 8796093022208,
206391214080000: 206391214080000, 206391214080000+1: 206624260800000,
470184984576000: 470184984576000, 470184984576000+1: 470715894135000,
7222041363087360: 7222041363087360,
7222041363087360+1: 7230196133913600,
# power of 5 5**23
11920928955078125: 11920928955078125,
11920928955078125-1: 11920928955078125,
# power of 3 3**34
16677181699666569: 16677181699666569,
16677181699666569-1: 16677181699666569,
# power of 2 2**54
18014398509481984: 18014398509481984,
18014398509481984-1: 18014398509481984,
# above this, int(ceil(n)) == int(ceil(n+1))
19200000000000000: 19200000000000000,
19200000000000000+1: 19221679687500000,
288230376151711744: 288230376151711744,
288230376151711744+1: 288325195312500000,
288325195312500000-1: 288325195312500000,
288325195312500000: 288325195312500000,
288325195312500000+1: 288555831593533440,
# power of 3 3**83
3990838394187339929534246675572349035227-1:
3990838394187339929534246675572349035227,
3990838394187339929534246675572349035227:
3990838394187339929534246675572349035227,
# power of 2 2**135
43556142965880123323311949751266331066368-1:
43556142965880123323311949751266331066368,
43556142965880123323311949751266331066368:
43556142965880123323311949751266331066368,
# power of 5 5**57
6938893903907228377647697925567626953125-1:
6938893903907228377647697925567626953125,
6938893903907228377647697925567626953125:
6938893903907228377647697925567626953125,
# http://www.drdobbs.com/228700538
# 2**96 * 3**1 * 5**13
290142196707511001929482240000000000000-1:
290142196707511001929482240000000000000,
290142196707511001929482240000000000000:
290142196707511001929482240000000000000,
290142196707511001929482240000000000000+1:
290237644800000000000000000000000000000,
# 2**36 * 3**69 * 5**7
4479571262811807241115438439905203543080960000000-1:
4479571262811807241115438439905203543080960000000,
4479571262811807241115438439905203543080960000000:
4479571262811807241115438439905203543080960000000,
4479571262811807241115438439905203543080960000000+1:
4480327901140333639941336854183943340032000000000,
# 2**37 * 3**44 * 5**42
30774090693237851027531250000000000000000000000000000000000000-1:
30774090693237851027531250000000000000000000000000000000000000,
30774090693237851027531250000000000000000000000000000000000000:
30774090693237851027531250000000000000000000000000000000000000,
30774090693237851027531250000000000000000000000000000000000000+1:
30778180617309082445871527002041377406962596539492679680000000,
}
for x, y in hams.items():
assert_equal(signaltools._next_regular(x), y)
class TestMedFilt(TestCase):
def test_basic(self):
f = [[50, 50, 50, 50, 50, 92, 18, 27, 65, 46],
[50, 50, 50, 50, 50, 0, 72, 77, 68, 66],
[50, 50, 50, 50, 50, 46, 47, 19, 64, 77],
[50, 50, 50, 50, 50, 42, 15, 29, 95, 35],
[50, 50, 50, 50, 50, 46, 34, 9, 21, 66],
[70, 97, 28, 68, 78, 77, 61, 58, 71, 42],
[64, 53, 44, 29, 68, 32, 19, 68, 24, 84],
[3, 33, 53, 67, 1, 78, 74, 55, 12, 83],
[7, 11, 46, 70, 60, 47, 24, 43, 61, 26],
[32, 61, 88, 7, 39, 4, 92, 64, 45, 61]]
d = signal.medfilt(f, [7, 3])
e = signal.medfilt2d(np.array(f, np.float), [7, 3])
assert_array_equal(d, [[0, 50, 50, 50, 42, 15, 15, 18, 27, 0],
[0, 50, 50, 50, 50, 42, 19, 21, 29, 0],
[50, 50, 50, 50, 50, 47, 34, 34, 46, 35],
[50, 50, 50, 50, 50, 50, 42, 47, 64, 42],
[50, 50, 50, 50, 50, 50, 46, 55, 64, 35],
[33, 50, 50, 50, 50, 47, 46, 43, 55, 26],
[32, 50, 50, 50, 50, 47, 46, 45, 55, 26],
[7, 46, 50, 50, 47, 46, 46, 43, 45, 21],
[0, 32, 33, 39, 32, 32, 43, 43, 43, 0],
[0, 7, 11, 7, 4, 4, 19, 19, 24, 0]])
assert_array_equal(d, e)
def test_none(self):
# Ticket #1124. Ensure this does not segfault.
try:
signal.medfilt(None)
except:
pass
# Expand on this test to avoid a regression with possible contiguous
# numpy arrays that have odd strides. The stride value below gets
# us into wrong memory if used (but it does not need to be used)
dummy = np.arange(10, dtype=np.float64)
a = dummy[5:6]
a.strides = 16
assert_(signal.medfilt(a, 1) == 5.)
class TestWiener(TestCase):
def test_basic(self):
g = array([[5,6,4,3],[3,5,6,2],[2,3,5,6],[1,6,9,7]],'d')
h = array([[2.16374269,3.2222222222, 2.8888888889, 1.6666666667],
[2.666666667, 4.33333333333, 4.44444444444, 2.8888888888],
[2.222222222, 4.4444444444, 5.4444444444, 4.801066874837],
[1.33333333333, 3.92735042735, 6.0712560386, 5.0404040404]])
assert_array_almost_equal(signal.wiener(g), h, decimal=6)
assert_array_almost_equal(signal.wiener(g, mysize=3), h, decimal=6)
class TestCSpline1DEval(TestCase):
def test_basic(self):
y = array([1,2,3,4,3,2,1,2,3.0])
x = arange(len(y))
dx = x[1]-x[0]
cj = signal.cspline1d(y)
x2 = arange(len(y)*10.0)/10.0
y2 = signal.cspline1d_eval(cj, x2, dx=dx,x0=x[0])
# make sure interpolated values are on knot points
assert_array_almost_equal(y2[::10], y, decimal=5)
class TestOrderFilt(TestCase):
def test_basic(self):
assert_array_equal(signal.order_filter([1,2,3],[1,0,1],1),
[2,3,2])
class _TestLinearFilter(TestCase):
dt = None
def test_rank1(self):
x = np.linspace(0, 5, 6).astype(self.dt)
b = np.array([1, -1]).astype(self.dt)
a = np.array([0.5, -0.5]).astype(self.dt)
# Test simple IIR
y_r = np.array([0, 2, 4, 6, 8, 10.]).astype(self.dt)
assert_array_almost_equal(lfilter(b, a, x), y_r)
# Test simple FIR
b = np.array([1, 1]).astype(self.dt)
a = np.array([1]).astype(self.dt)
y_r = np.array([0, 1, 3, 5, 7, 9.]).astype(self.dt)
assert_array_almost_equal(lfilter(b, a, x), y_r)
# Test IIR with initial conditions
b = np.array([1, 1]).astype(self.dt)
a = np.array([1]).astype(self.dt)
zi = np.array([1]).astype(self.dt)
y_r = np.array([1, 1, 3, 5, 7, 9.]).astype(self.dt)
zf_r = np.array([5]).astype(self.dt)
y, zf = lfilter(b, a, x, zi=zi)
assert_array_almost_equal(y, y_r)
assert_array_almost_equal(zf, zf_r)
b = np.array([1, 1, 1]).astype(self.dt)
a = np.array([1]).astype(self.dt)
zi = np.array([1, 1]).astype(self.dt)
y_r = np.array([1, 2, 3, 6, 9, 12.]).astype(self.dt)
zf_r = np.array([9, 5]).astype(self.dt)
y, zf = lfilter(b, a, x, zi=zi)
assert_array_almost_equal(y, y_r)
assert_array_almost_equal(zf, zf_r)
def test_rank2(self):
shape = (4, 3)
x = np.linspace(0, np.prod(shape) - 1, np.prod(shape)).reshape(shape)
x = x.astype(self.dt)
b = np.array([1, -1]).astype(self.dt)
a = np.array([0.5, 0.5]).astype(self.dt)
y_r2_a0 = np.array([[0, 2, 4], [6, 4, 2], [0, 2, 4], [6,4,2]],
dtype=self.dt)
y_r2_a1 = np.array([[0, 2, 0], [6, -4, 6], [12, -10, 12],
[18, -16, 18]], dtype=self.dt)
y = lfilter(b, a, x, axis=0)
assert_array_almost_equal(y_r2_a0, y)
y = lfilter(b, a, x, axis=1)
assert_array_almost_equal(y_r2_a1, y)
def test_rank2_init_cond_a1(self):
# Test initial condition handling along axis 1
shape = (4, 3)
x = np.linspace(0, np.prod(shape) - 1, np.prod(shape)).reshape(shape)
x = x.astype(self.dt)
b = np.array([1, -1]).astype(self.dt)
a = np.array([0.5, 0.5]).astype(self.dt)
y_r2_a0_1 = np.array([[1, 1, 1], [7, -5, 7], [13, -11, 13],
[19, -17, 19]], dtype=self.dt)
zf_r = np.array([-5, -17, -29, -41])[:, np.newaxis].astype(self.dt)
y, zf = lfilter(b, a, x, axis=1, zi=np.ones((4, 1)))
assert_array_almost_equal(y_r2_a0_1, y)
assert_array_almost_equal(zf, zf_r)
def test_rank2_init_cond_a0(self):
# Test initial condition handling along axis 0
shape = (4, 3)
x = np.linspace(0, np.prod(shape) - 1, np.prod(shape)).reshape(shape)
x = x.astype(self.dt)
b = np.array([1, -1]).astype(self.dt)
a = np.array([0.5, 0.5]).astype(self.dt)
y_r2_a0_0 = np.array([[1, 3, 5], [5, 3, 1], [1, 3, 5], [5,3,1]],
dtype=self.dt)
zf_r = np.array([[-23, -23, -23]], dtype=self.dt)
y, zf = lfilter(b, a, x, axis=0, zi=np.ones((1, 3)))
assert_array_almost_equal(y_r2_a0_0, y)
assert_array_almost_equal(zf, zf_r)
def test_rank3(self):
shape = (4, 3, 2)
x = np.linspace(0, np.prod(shape) - 1, np.prod(shape)).reshape(shape)
b = np.array([1, -1]).astype(self.dt)
a = np.array([0.5, 0.5]).astype(self.dt)
# Test last axis
y = lfilter(b, a, x)
for i in range(x.shape[0]):
for j in range(x.shape[1]):
assert_array_almost_equal(y[i, j], lfilter(b, a, x[i, j]))
def test_empty_zi(self):
# Regression test for #880: empty array for zi crashes.
a = np.ones(1).astype(self.dt)
b = np.ones(1).astype(self.dt)
x = np.arange(5).astype(self.dt)
zi = np.ones(0).astype(self.dt)
y, zf = lfilter(b, a, x, zi=zi)
assert_array_almost_equal(y, x)
self.assertTrue(zf.dtype == self.dt)
self.assertTrue(zf.size == 0)
class TestLinearFilterFloat32(_TestLinearFilter):
dt = np.float32
class TestLinearFilterFloat64(_TestLinearFilter):
dt = np.float64
class TestLinearFilterFloatExtended(_TestLinearFilter):
dt = np.longdouble
class TestLinearFilterComplex64(_TestLinearFilter):
dt = np.complex64
class TestLinearFilterComplex128(_TestLinearFilter):
dt = np.complex128
class TestLinearFilterComplexxxiExtended28(_TestLinearFilter):
dt = np.longcomplex
class TestLinearFilterDecimal(_TestLinearFilter):
dt = np.dtype(Decimal)
class TestLinearFilterObject(_TestLinearFilter):
dt = np.object_
def test_lfilter_bad_object():
# lfilter: object arrays with non-numeric objects raise TypeError.
# Regression test for ticket #1452.
assert_raises(TypeError, lfilter, [1.0], [1.0], [1.0, None, 2.0])
assert_raises(TypeError, lfilter, [1.0], [None], [1.0, 2.0, 3.0])
assert_raises(TypeError, lfilter, [None], [1.0], [1.0, 2.0, 3.0])
class _TestCorrelateReal(TestCase):
dt = None
def _setup_rank1(self):
# a.size should be greated than b.size for the tests
a = np.linspace(0, 3, 4).astype(self.dt)
b = np.linspace(1, 2, 2).astype(self.dt)
y_r = np.array([0, 2, 5, 8, 3]).astype(self.dt)
return a, b, y_r
def test_rank1_valid(self):
a, b, y_r = self._setup_rank1()
y = correlate(a, b, 'valid')
assert_array_almost_equal(y, y_r[1:4])
self.assertTrue(y.dtype == self.dt)
def test_rank1_same(self):
a, b, y_r = self._setup_rank1()
y = correlate(a, b, 'same')
assert_array_almost_equal(y, y_r[:-1])
self.assertTrue(y.dtype == self.dt)
def test_rank1_full(self):
a, b, y_r = self._setup_rank1()
y = correlate(a, b, 'full')
assert_array_almost_equal(y, y_r)
self.assertTrue(y.dtype == self.dt)
def _setup_rank3(self):
a = np.linspace(0, 39, 40).reshape((2, 4, 5), order='F').astype(self.dt)
b = np.linspace(0, 23, 24).reshape((2, 3, 4), order='F').astype(self.dt)
y_r = array([[[0., 184., 504., 912., 1360., 888., 472., 160.,],
[46., 432., 1062., 1840., 2672., 1698., 864., 266.,],
[134., 736., 1662., 2768., 3920., 2418., 1168., 314.,],
[260., 952., 1932., 3056., 4208., 2580., 1240., 332.,],
[202., 664., 1290., 1984., 2688., 1590., 712., 150.,],
[114., 344., 642., 960., 1280., 726., 296., 38.,]],
[[23., 400., 1035., 1832., 2696., 1737., 904., 293.,],
[134., 920., 2166., 3680., 5280., 3306., 1640., 474.,],
[325., 1544., 3369., 5512., 7720., 4683., 2192., 535.,],
[571., 1964., 3891., 6064., 8272., 4989., 2324., 565.,],
[434., 1360., 2586., 3920., 5264., 3054., 1312., 230.,],
[241., 700., 1281., 1888., 2496., 1383., 532., 39.,]],
[[22., 214., 528., 916., 1332., 846., 430., 132.,],
[86., 484., 1098., 1832., 2600., 1602., 772., 206.,],
[188., 802., 1698., 2732., 3788., 2256., 1018., 218.,],
[308., 1006., 1950., 2996., 4052., 2400., 1078., 230.,],
[230., 692., 1290., 1928., 2568., 1458., 596., 78.,],
[126., 354., 636., 924., 1212., 654., 234., 0.,]]],
dtype=self.dt)
return a, b, y_r
def test_rank3_valid(self):
a, b, y_r = self._setup_rank3()
y = correlate(a, b, "valid")
assert_array_almost_equal(y, y_r[1:2,2:4,3:5])
self.assertTrue(y.dtype == self.dt)
def test_rank3_same(self):
a, b, y_r = self._setup_rank3()
y = correlate(a, b, "same")
assert_array_almost_equal(y, y_r[0:-1,1:-1,1:-2])
self.assertTrue(y.dtype == self.dt)
def test_rank3_all(self):
a, b, y_r = self._setup_rank3()
y = correlate(a, b)
assert_array_almost_equal(y, y_r)
self.assertTrue(y.dtype == self.dt)
def _get_testcorrelate_class(datatype, base):
class TestCorrelateX(base):
dt = datatype
TestCorrelateX.__name__ = "TestCorrelate%s" % datatype.__name__.title()
return TestCorrelateX
for datatype in [np.ubyte, np.byte, np.ushort, np.short, np.uint, np.int,
np.ulonglong, np.ulonglong, np.float32, np.float64, np.longdouble,
Decimal]:
cls = _get_testcorrelate_class(datatype, _TestCorrelateReal)
globals()[cls.__name__] = cls
class _TestCorrelateComplex(TestCase):
# The numpy data type to use.
dt = None
# The decimal precision to be used for comparing results.
# This value will be passed as the 'decimal' keyword argument of
# assert_array_almost_equal().
decimal = None
def _setup_rank1(self, mode):
np.random.seed(9)
a = np.random.randn(10).astype(self.dt)
a += 1j * np.random.randn(10).astype(self.dt)
b = np.random.randn(8).astype(self.dt)
b += 1j * np.random.randn(8).astype(self.dt)
y_r = (correlate(a.real, b.real, mode=mode) +
correlate(a.imag, b.imag, mode=mode)).astype(self.dt)
y_r += 1j * (-correlate(a.real, b.imag, mode=mode) +
correlate(a.imag, b.real, mode=mode))
return a, b, y_r
def test_rank1_valid(self):
a, b, y_r = self._setup_rank1('valid')
y = correlate(a, b, 'valid')
assert_array_almost_equal(y, y_r, decimal=self.decimal)
self.assertTrue(y.dtype == self.dt)
def test_rank1_same(self):
a, b, y_r = self._setup_rank1('same')
y = correlate(a, b, 'same')
assert_array_almost_equal(y, y_r, decimal=self.decimal)
self.assertTrue(y.dtype == self.dt)
def test_rank1_full(self):
a, b, y_r = self._setup_rank1('full')
y = correlate(a, b, 'full')
assert_array_almost_equal(y, y_r, decimal=self.decimal)
self.assertTrue(y.dtype == self.dt)
def test_rank3(self):
a = np.random.randn(10, 8, 6).astype(self.dt)
a += 1j * np.random.randn(10, 8, 6).astype(self.dt)
b = np.random.randn(8, 6, 4).astype(self.dt)
b += 1j * np.random.randn(8, 6, 4).astype(self.dt)
y_r = (correlate(a.real, b.real)
+ correlate(a.imag, b.imag)).astype(self.dt)
y_r += 1j * (-correlate(a.real, b.imag) + correlate(a.imag, b.real))
y = correlate(a, b, 'full')
assert_array_almost_equal(y, y_r, decimal=self.decimal-1)
self.assertTrue(y.dtype == self.dt)
class TestCorrelate2d(TestCase):
def test_consistency_correlate_funcs(self):
# Compare np.correlate, signal.correlate, signal.correlate2d
a = np.arange(5)
b = np.array([3.2, 1.4, 3])
for mode in ['full', 'valid', 'same']:
assert_almost_equal(np.correlate(a, b, mode=mode),
signal.correlate(a, b, mode=mode))
assert_almost_equal(np.squeeze(signal.correlate2d([a], [b],
mode=mode)),
signal.correlate(a, b, mode=mode))
# Create three classes, one for each complex data type. The actual class
# name will be TestCorrelateComplex###, where ### is the number of bits.
for datatype in [np.csingle, np.cdouble, np.clongdouble]:
cls = _get_testcorrelate_class(datatype, _TestCorrelateComplex)
cls.decimal = int(2 * np.finfo(datatype).precision / 3)
globals()[cls.__name__] = cls
class TestLFilterZI(TestCase):
def test_basic(self):
a = np.array([1.0, -1.0, 0.5])
b = np.array([1.0, 0.0, 2.0])
zi_expected = np.array([5.0, -1.0])
zi = lfilter_zi(b, a)
assert_array_almost_equal(zi, zi_expected)
class TestFiltFilt(TestCase):
def test_basic(self):
out = signal.filtfilt([1, 2, 3], [1, 2, 3], np.arange(12))
assert_equal(out, arange(12))
def test_sine(self):
rate = 2000
t = np.linspace(0, 1.0, rate + 1)
# A signal with low frequency and a high frequency.
xlow = np.sin(5 * 2 * np.pi * t)
xhigh = np.sin(250 * 2 * np.pi * t)
x = xlow + xhigh
b, a = butter(8, 0.125)
z, p, k = tf2zpk(b, a)
# r is the magnitude of the largest pole.
r = np.abs(p).max()
eps = 1e-5
# n estimates the number of steps for the
# transient to decay by a factor of eps.
n = int(np.ceil(np.log(eps) / np.log(r)))
# High order lowpass filter...
y = filtfilt(b, a, x, padlen=n)
# Result should be just xlow.
err = np.abs(y - xlow).max()
assert_(err < 1e-4)
# A 2D case.
x2d = np.vstack([xlow, xlow + xhigh])
y2d = filtfilt(b, a, x2d, padlen=n, axis=1)
assert_equal(y2d.shape, x2d.shape)
err = np.abs(y2d - xlow).max()
assert_(err < 1e-4)
# Use the previous result to check the use of the axis keyword.
# (Regression test for ticket #1620)
y2dt = filtfilt(b, a, x2d.T, padlen=n, axis=0)
assert_equal(y2d, y2dt.T)
def test_axis(self):
# Test the 'axis' keyword on a 3D array.
x = np.arange(10.0 * 11.0 * 12.0).reshape(10, 11, 12)
b, a = butter(3, 0.125)
y0 = filtfilt(b, a, x, padlen=0, axis=0)
y1 = filtfilt(b, a, np.swapaxes(x, 0, 1), padlen=0, axis=1)
assert_array_equal(y0, np.swapaxes(y1, 0, 1))
y2 = filtfilt(b, a, np.swapaxes(x, 0, 2), padlen=0, axis=2)
assert_array_equal(y0, np.swapaxes(y2, 0, 2))
class TestDecimate(TestCase):
def test_basic(self):
x = np.arange(6)
assert_array_equal(signal.decimate(x, 2, n=1).round(), x[::2])
def test_shape(self):
# Regression test for ticket #1480.
z = np.zeros((10, 10))
d0 = signal.decimate(z, 2, axis=0)
assert_equal(d0.shape, (5, 10))
d1 = signal.decimate(z, 2, axis=1)
assert_equal(d1.shape, (10, 5))
class TestHilbert(object):
def test_bad_args(self):
x = np.array([1.0+0.0j])
assert_raises(ValueError, hilbert, x)
x = np.arange(8.0)
assert_raises(ValueError, hilbert, x, N=0)
def test_hilbert_theoretical(self):
#test cases by Ariel Rokem
decimal = 14
pi = np.pi
t = np.arange(0, 2*pi, pi/256)
a0 = np.sin(t)
a1 = np.cos(t)
a2 = np.sin(2*t)
a3 = np.cos(2*t)
a = np.vstack([a0,a1,a2,a3])
h = hilbert(a)
h_abs = np.abs(h)
h_angle = np.angle(h)
h_real = np.real(h)
#The real part should be equal to the original signals:
assert_almost_equal(h_real, a, decimal)
#The absolute value should be one everywhere, for this input:
assert_almost_equal(h_abs, np.ones(a.shape), decimal)
#For the 'slow' sine - the phase should go from -pi/2 to pi/2 in
#the first 256 bins:
assert_almost_equal(h_angle[0,:256], np.arange(-pi/2,pi/2,pi/256),
decimal)
#For the 'slow' cosine - the phase should go from 0 to pi in the
#same interval:
assert_almost_equal(h_angle[1,:256], np.arange(0,pi,pi/256), decimal)
#The 'fast' sine should make this phase transition in half the time:
assert_almost_equal(h_angle[2,:128], np.arange(-pi/2,pi/2,pi/128),
decimal)
#Ditto for the 'fast' cosine:
assert_almost_equal(h_angle[3,:128], np.arange(0,pi,pi/128), decimal)
#The imaginary part of hilbert(cos(t)) = sin(t) Wikipedia
assert_almost_equal(h[1].imag, a0, decimal)
def test_hilbert_axisN(self):
# tests for axis and N arguments
a = np.arange(18).reshape(3,6)
# test axis
aa = hilbert(a, axis=-1)
yield assert_equal, hilbert(a.T, axis=0), aa.T
# test 1d
yield assert_equal, hilbert(a[0]), aa[0]
# test N
aan = hilbert(a, N=20, axis=-1)
yield assert_equal, aan.shape, [3,20]
yield assert_equal, hilbert(a.T, N=20, axis=0).shape, [20,3]
#the next test is just a regression test,
#no idea whether numbers make sense
a0hilb = np.array([0.000000000000000e+00-1.72015830311905j,
1.000000000000000e+00-2.047794505137069j,
1.999999999999999e+00-2.244055555687583j,
3.000000000000000e+00-1.262750302935009j,
4.000000000000000e+00-1.066489252384493j,
5.000000000000000e+00+2.918022706971047j,
8.881784197001253e-17+3.845658908989067j,
-9.444121133484362e-17+0.985044202202061j,
-1.776356839400251e-16+1.332257797702019j,
-3.996802888650564e-16+0.501905089898885j,
1.332267629550188e-16+0.668696078880782j,
-1.192678053963799e-16+0.235487067862679j,
-1.776356839400251e-16+0.286439612812121j,
3.108624468950438e-16+0.031676888064907j,
1.332267629550188e-16-0.019275656884536j,
-2.360035624836702e-16-0.1652588660287j,
0.000000000000000e+00-0.332049855010597j,
3.552713678800501e-16-0.403810179797771j,
8.881784197001253e-17-0.751023775297729j,
9.444121133484362e-17-0.79252210110103j])
yield assert_almost_equal, aan[0], a0hilb, 14, 'N regression'
class TestHilbert2(object):
def test_bad_args(self):
# x must be real.
x = np.array([[1.0 + 0.0j]])
assert_raises(ValueError, hilbert2, x)
# x must be rank 2.
x = np.arange(24).reshape(2, 3, 4)
assert_raises(ValueError, hilbert2, x)
# Bad value for N.
x = np.arange(16).reshape(4, 4)
assert_raises(ValueError, hilbert2, x, N=0)
assert_raises(ValueError, hilbert2, x, N=(2,0))
assert_raises(ValueError, hilbert2, x, N=(2,))
class TestPartialFractionExpansion(TestCase):
def test_invres_distinct_roots(self):
# This test was inspired by github issue 2496.
r = [3/10, -1/6, -2/15]
p = [0, -2, -5]
k = []
a_expected = [1, 3]
b_expected = [1, 7, 10, 0]
a_observed, b_observed = invres(r, p, k)
assert_allclose(a_observed, a_expected)
assert_allclose(b_observed, b_expected)
rtypes = ('avg', 'mean', 'min', 'minimum', 'max', 'maximum')
# With the default tolerance, the rtype does not matter
# for this example.
for rtype in rtypes:
a_observed, b_observed = invres(r, p, k, rtype=rtype)
assert_allclose(a_observed, a_expected)
assert_allclose(b_observed, b_expected)
# With unrealistically large tolerances, repeated roots may be inferred
# and the rtype comes into play.
ridiculous_tolerance = 1e10
for rtype in rtypes:
a, b = invres(r, p, k, tol=ridiculous_tolerance, rtype=rtype)
def test_invres_repeated_roots(self):
r = [3/20, -7/36, -1/6, 2/45]
p = [0, -2, -2, -5]
k = []
a_expected = [1, 3]
b_expected = [1, 9, 24, 20, 0]
rtypes = ('avg', 'mean', 'min', 'minimum', 'max', 'maximum')
for rtype in rtypes:
a_observed, b_observed = invres(r, p, k, rtype=rtype)
assert_allclose(a_observed, a_expected)
assert_allclose(b_observed, b_expected)
def test_invres_bad_rtype(self):
r = [3/20, -7/36, -1/6, 2/45]
p = [0, -2, -2, -5]
k = []
assert_raises(ValueError, invres, r, p, k, rtype='median')
class TestVectorstrength(TestCase):
def test_single_1dperiod(self):
events = np.array([.5])
period = 5.
targ_strength = 1.
targ_phase = .1
strength, phase = vectorstrength(events, period)
assert_equal(strength.ndim, 0)
assert_equal(phase.ndim, 0)
assert_almost_equal(strength, targ_strength)
assert_almost_equal(phase, 2*np.pi*targ_phase)
def test_single_2dperiod(self):
events = np.array([.5])
period = [1, 2, 5.]
targ_strength = [1.]*3
targ_phase = np.array([.5, .25, .1])
strength, phase = vectorstrength(events, period)
assert_equal(strength.ndim, 1)
assert_equal(phase.ndim, 1)
assert_array_almost_equal(strength, targ_strength)
assert_almost_equal(phase, 2*np.pi*targ_phase)
def test_equal_1dperiod(self):
events = np.array([.25, .25, .25, .25, .25, .25])
period = 2
targ_strength = 1.
targ_phase = .125
strength, phase = vectorstrength(events, period)
assert_equal(strength.ndim, 0)
assert_equal(phase.ndim, 0)
assert_almost_equal(strength, targ_strength)
assert_almost_equal(phase, 2*np.pi*targ_phase)
def test_equal_2dperiod(self):
events = np.array([.25, .25, .25, .25, .25, .25])
period = [1, 2,]
targ_strength = [1.]*2
targ_phase = np.array([.25, .125])
strength, phase = vectorstrength(events, period)
assert_equal(strength.ndim, 1)
assert_equal(phase.ndim, 1)
assert_almost_equal(strength, targ_strength)
assert_almost_equal(phase, 2*np.pi*targ_phase)
def test_spaced_1dperiod(self):
events = np.array([.1, 1.1, 2.1, 4.1, 10.1])
period = 1
targ_strength = 1.
targ_phase = .1
strength, phase = vectorstrength(events, period)
assert_equal(strength.ndim, 0)
assert_equal(phase.ndim, 0)
assert_almost_equal(strength, targ_strength)
assert_almost_equal(phase, 2*np.pi*targ_phase)
def test_spaced_2dperiod(self):
events = np.array([.1, 1.1, 2.1, 4.1, 10.1])
period = [1, .5]
targ_strength = [1.]*2
targ_phase = np.array([.1, .2])
strength, phase = vectorstrength(events, period)
assert_equal(strength.ndim, 1)
assert_equal(phase.ndim, 1)
assert_almost_equal(strength, targ_strength)
assert_almost_equal(phase, 2*np.pi*targ_phase)
def test_partial_1dperiod(self):
events = np.array([.25, .5, .75])
period = 1
targ_strength = 1./3.
targ_phase = .5
strength, phase = vectorstrength(events, period)
assert_equal(strength.ndim, 0)
assert_equal(phase.ndim, 0)
assert_almost_equal(strength, targ_strength)
assert_almost_equal(phase, 2*np.pi*targ_phase)
def test_partial_2dperiod(self):
events = np.array([.25, .5, .75])
period = [1., 1., 1., 1.]
targ_strength = [1./3.]*4
targ_phase = np.array([.5, .5, .5, .5])
strength, phase = vectorstrength(events, period)
assert_equal(strength.ndim, 1)
assert_equal(phase.ndim, 1)
assert_almost_equal(strength, targ_strength)
assert_almost_equal(phase, 2*np.pi*targ_phase)
def test_opposite_1dperiod(self):
events = np.array([0, .25, .5, .75])
period = 1.
targ_strength = 0
strength, phase = vectorstrength(events, period)
assert_equal(strength.ndim, 0)
assert_equal(phase.ndim, 0)
assert_almost_equal(strength, targ_strength)
def test_opposite_2dperiod(self):
events = np.array([0, .25, .5, .75])
period = [1.]*10
targ_strength = [0.]*10
strength, phase = vectorstrength(events, period)
assert_equal(strength.ndim, 1)
assert_equal(phase.ndim, 1)
assert_almost_equal(strength, targ_strength)
def test_2d_events_ValueError(self):
events = np.array([[1, 2]])
period = 1.
assert_raises(ValueError, vectorstrength, events, period)
def test_2d_period_ValueError(self):
events = 1.
period = np.array([[1]])
assert_raises(ValueError, vectorstrength, events, period)
def test_zero_period_ValueError(self):
events = 1.
period = 0
assert_raises(ValueError, vectorstrength, events, period)
def test_negative_period_ValueError(self):
events = 1.
period = -1
assert_raises(ValueError, vectorstrength, events, period)
if __name__ == "__main__":
run_module_suite()
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/scipy/signal/tests/test_signaltools.py
|
Python
|
agpl-3.0
| 40,574
|
from setuptools import setup
setup(
name="master",
version="0.0.0",
packages=["master"],
install_requires=[
"buildbot==0.8.9",
"psycopg2"
]
)
|
pyfarm/pyfarm-build
|
master/setup.py
|
Python
|
apache-2.0
| 179
|
from __future__ import absolute_import
from django import forms
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from sentry import roles
from sentry.models import AuditLogEntryEvent, Organization
from sentry.web.frontend.base import OrganizationView
class OrganizationSettingsForm(forms.ModelForm):
name = forms.CharField(help_text=_('The name of your organization. i.e. My Company'))
slug = forms.SlugField(
label=_('Short name'),
help_text=_('A unique ID used to identify this organization.'),
)
allow_joinleave = forms.BooleanField(
label=_('Open Membership'),
help_text=_('Allow organization members to freely join or leave any team.'),
required=False,
)
default_role = forms.ChoiceField(
label=_('Default Role'),
choices=roles.get_choices(),
help_text=_('The default role new members will receive.'),
)
enhanced_privacy = forms.BooleanField(
label=_('Enhanced Privacy'),
help_text=_('Enable enhanced privacy controls to limit personally identifiable information (PII) as well as source code in things like notifications.'),
required=False,
)
allow_shared_issues = forms.BooleanField(
label=_('Allow Shared Issues'),
help_text=_('Enable sharing of limited details on issues to anonymous users.'),
required=False,
)
require_scrub_data = forms.BooleanField(
label=_('Require Data Scrubber'),
help_text=_('Require server-side data scrubbing be enabled for all projects.'),
required=False
)
require_scrub_defaults = forms.BooleanField(
label=_('Require Using Default Scrubbers'),
help_text=_('Require the default scrubbers be applied to prevent things like passwords and credit cards from being stored for all projects.'),
required=False
)
sensitive_fields = forms.CharField(
label=_('Global additional sensitive fields'),
help_text=_('Additional field names to match against when scrubbing data for all projects. '
'Separate multiple entries with a newline.<br /><strong>Note: These fields will be used in addition to project specific fields.</strong>'),
widget=forms.Textarea(attrs={
'placeholder': mark_safe(_('e.g. email')),
'class': 'span8',
'rows': '3',
}),
required=False,
)
safe_fields = forms.CharField(
label=_('Global safe fields'),
help_text=_('Field names which data scrubbers should ignore. '
'Separate multiple entries with a newline.<br /><strong>Note: These fields will be used in addition to project specific fields.</strong>'),
widget=forms.Textarea(attrs={
'placeholder': mark_safe(_('e.g. email')),
'class': 'span8',
'rows': '3',
}),
required=False,
)
require_scrub_ip_address = forms.BooleanField(
label=_('Prevent Storing of IP Addresses'),
help_text=_('Preventing IP addresses from being stored for new events on all projects.'),
required=False
)
early_adopter = forms.BooleanField(
label=_('Early Adopter'),
help_text=_('Opt-in to new features before they\'re released to the public.'),
required=False
)
class Meta:
fields = ('name', 'slug', 'default_role')
model = Organization
def __init__(self, has_delete, *args, **kwargs):
super(OrganizationSettingsForm, self).__init__(*args, **kwargs)
if not has_delete:
del self.fields['default_role']
def clean_sensitive_fields(self):
value = self.cleaned_data.get('sensitive_fields')
if not value:
return
return filter(bool, (v.lower().strip() for v in value.split('\n')))
def clean_safe_fields(self):
value = self.cleaned_data.get('safe_fields')
if not value:
return
return filter(bool, (v.lower().strip() for v in value.split('\n')))
class OrganizationSettingsView(OrganizationView):
required_scope = 'org:write'
def get_form(self, request, organization):
has_delete = request.access.has_scope('org:delete')
return OrganizationSettingsForm(
has_delete=has_delete,
data=request.POST or None,
instance=organization,
initial={
'default_role': organization.default_role,
'allow_joinleave': bool(organization.flags.allow_joinleave),
'enhanced_privacy': bool(organization.flags.enhanced_privacy),
'allow_shared_issues': bool(not organization.flags.disable_shared_issues),
'require_scrub_data': bool(organization.get_option('sentry:require_scrub_data', False)),
'require_scrub_defaults': bool(organization.get_option('sentry:require_scrub_defaults', False)),
'sensitive_fields': '\n'.join(organization.get_option('sentry:sensitive_fields', None) or []),
'safe_fields': '\n'.join(organization.get_option('sentry:safe_fields', None) or []),
'require_scrub_ip_address': bool(organization.get_option('sentry:require_scrub_ip_address', False)),
'early_adopter': bool(organization.flags.early_adopter),
}
)
def handle(self, request, organization):
form = self.get_form(request, organization)
if form.is_valid():
organization = form.save(commit=False)
organization.flags.allow_joinleave = form.cleaned_data['allow_joinleave']
organization.flags.enhanced_privacy = form.cleaned_data['enhanced_privacy']
organization.flags.disable_shared_issues = not form.cleaned_data['allow_shared_issues']
organization.flags.early_adopter = form.cleaned_data['early_adopter']
organization.save()
for opt in (
'require_scrub_data',
'require_scrub_defaults',
'sensitive_fields',
'safe_fields',
'require_scrub_ip_address'):
value = form.cleaned_data.get(opt)
if value is None:
organization.delete_option('sentry:%s' % (opt,))
else:
organization.update_option('sentry:%s' % (opt,), value)
self.create_audit_entry(
request,
organization=organization,
target_object=organization.id,
event=AuditLogEntryEvent.ORG_EDIT,
data=organization.get_audit_log_data(),
)
messages.add_message(request, messages.SUCCESS,
_('Changes to your organization were saved.'))
return HttpResponseRedirect(reverse('sentry-organization-settings', args=[organization.slug]))
context = {
'form': form,
}
return self.respond('sentry/organization-settings.html', context)
|
alexm92/sentry
|
src/sentry/web/frontend/organization_settings.py
|
Python
|
bsd-3-clause
| 7,227
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-18 22:59
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('about', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='entry',
name='notes',
),
]
|
Arlefreak/ApiArlefreak
|
about/migrations/0002_remove_entry_notes.py
|
Python
|
mit
| 377
|
"""
Checks that Pylint does not complain about foreign key sets on models
"""
# pylint: disable=missing-docstring consider-using-f-string
from django.db import models
class SomeModel(models.Model):
name = models.CharField(max_length=20)
timestamp = models.DateTimeField()
class OtherModel(models.Model):
something = models.ForeignKey(SomeModel, on_delete=models.CASCADE)
elsething = models.OneToOneField(SomeModel, on_delete=models.CASCADE)
def something_doer(self):
part_a = f"{self.something.name} - {self.something.timestamp}"
part_b = self.elsething.name
return part_a, part_b
|
landscapeio/pylint-django
|
pylint_django/tests/input/func_noerror_foreign_key_attributes.py
|
Python
|
gpl-2.0
| 632
|
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
# Register options for the service
OPTS = [
cfg.IntOpt('port',
default=8777,
deprecated_name='metering_api_port',
deprecated_group='DEFAULT',
help='The port for the aodh API server.',
),
cfg.StrOpt('host',
default='0.0.0.0',
help='The listen IP for the aodh API server.',
),
]
CONF = cfg.CONF
opt_group = cfg.OptGroup(name='api',
title='Options for the aodh-api service')
CONF.register_group(opt_group)
CONF.register_opts(OPTS, opt_group)
|
chungg/aodh
|
aodh/api/__init__.py
|
Python
|
apache-2.0
| 1,224
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models, _
class ResCompany(models.Model):
_inherit = "res.company"
@api.model
def create(self, vals):
new_company = super(ResCompany, self).create(vals)
ProductPricelist = self.env['product.pricelist']
pricelist = ProductPricelist.search([('currency_id', '=', new_company.currency_id.id), ('company_id', '=', False)], limit=1)
if not pricelist:
pricelist = ProductPricelist.create({
'name': new_company.name,
'currency_id': new_company.currency_id.id,
})
field_id = self.env['ir.model.fields'].search([('model', '=', 'res.partner'), ('name', '=', 'property_product_pricelist')])
self.env['ir.property'].create({
'name': 'property_product_pricelist',
'company_id': new_company.id,
'value_reference': 'product.pricelist,%s' % pricelist.id,
'fields_id': field_id.id
})
return new_company
|
ChawalitK/odoo
|
addons/product/res_company.py
|
Python
|
gpl-3.0
| 1,090
|
from gpiozero import LED
from time import sleep
g_led = LED(22)
y_led = LED(23)
r_led = LED(24)
g_time = 5
y_time = 2
r_time = 5
while True:
g_led.on()
y_led.off()
r_led.off()
sleep(g_time)
g_led.off()
y_led.on()
r_led.off()
sleep(y_time)
g_led.off()
y_led.off()
r_led.on()
sleep(r_time)
|
rdonelli/futurelearn-raspberry
|
week2/blink_traffic_light.py
|
Python
|
gpl-2.0
| 339
|
from flask import current_app, request as current_request
from werkzeug.wrappers import BaseResponse
from . import renderers as _renderers, normalizers
from .renderers import RendererNotFound, UnrenderedResponse
from functools import wraps
from collections import defaultdict
import logging
import datetime
from types import NoneType, GeneratorType
class Pushrod(object):
"""
The main resolver class for Pushrod.
:param renderers: A tuple of renderers that are registered immediately (can also be strings, which are currently expanded to flask.ext.pushrod.renderers.%s_renderer)
"""
#: The query string argument checked for an explicit renderer (to override header-based content type negotiation).
#:
#: .. note::
#: This is set on the class level, not the instance level.
format_arg_name = "format"
@property
def logger(self):
"""
Gets the logger to use, mainly for internal use.
The current resolution order looks as follows:
- :attr:`self.app <app>`
- :data:`flask.current_app`
- :mod:`logging`
"""
if self.app:
return self.app.logger
elif current_app:
return current_app.logger
else:
return logging
def __init__(self, app=None, renderers=('json', 'jinja2',), default_renderer='html'):
#: The renderers keyed by MIME type.
self.mime_type_renderers = {}
#: The renderers keyed by output format name (such as html).
self.named_renderers = {}
#: Hooks for overriding a class' normalizer, even if they explicitly define one.
#:
#: All items should be lists of callables. All values default to an empty list.
self.normalizer_overrides = defaultdict(lambda: [])
#: Hooks for providing a class with a fallback normalizer, which is called only if it doesn't define one. All items should be callables.
self.normalizers = {
basestring: normalizers.normalize_basestring,
list: normalizers.normalize_iterable,
tuple: normalizers.normalize_iterable,
GeneratorType: normalizers.normalize_iterable,
dict: normalizers.normalize_dict,
int: normalizers.normalize_int,
long: normalizers.normalize_int,
float: normalizers.normalize_float,
bool: normalizers.normalize_bool,
NoneType: normalizers.normalize_none,
datetime.datetime: normalizers.normalize_basestring,
datetime.date: normalizers.normalize_basestring,
datetime.time: normalizers.normalize_basestring,
}
#: The current app, only set from the constructor, not if using :meth:`init_app`.
self.app = app or None
for renderer in renderers:
if isinstance(renderer, basestring):
renderer = getattr(_renderers, '%s_renderer' % renderer)
self.register_renderer(renderer)
if isinstance(default_renderer, basestring):
if default_renderer in self.named_renderers:
default_renderer = self.named_renderers[default_renderer]
else:
self.logger.warning(u"Attempted to set the unrenderable format '%s' as the default format, disabling", default_renderer)
default_renderer = None
self.default_renderer = default_renderer
if app:
self.init_app(app)
def init_app(self, app):
"""
Registers the Pushrod resolver with the Flask app (can also be done by passing the app to the constructor).
"""
app.extensions['pushrod'] = self
def register_renderer(self, renderer, default=False):
"""
Registers a renderer with the Pushrod resolver (can also be done by passing the renderer to the constructor).
"""
if not (hasattr(renderer, '_is_pushrod_renderer') and renderer._is_pushrod_renderer):
raise TypeError(u'Got passed an invalid renderer')
for name in renderer.renderer_names:
self.named_renderers[name] = renderer
for mime_type in renderer.renderer_mime_types:
self.mime_type_renderers[mime_type] = renderer
def get_renderers_for_request(self, request=None):
"""
Inspects a Flask :class:`~flask.Request` for hints regarding what renderer to use.
This is found out by first looking in the query string argument named after :attr:`~format_arg_name` (``format`` by default), and then matching the contents of the Accept:-header. If nothing is found anyway, then :attr:`~default_renderer` is used.
.. note::
If the query string argument is specified but doesn't exist (or fails), then the request fails immediately, without trying the other methods.
:param request: The request to be inspected (defaults to :obj:`flask.request`)
:returns: List of matching renderers, in order of user preference
"""
if request is None:
request = current_request
if self.format_arg_name in request.args:
renderer_name = request.args[self.format_arg_name]
if renderer_name in self.named_renderers:
return [self.named_renderers[renderer_name]]
else:
return []
matching_renderers = [self.mime_type_renderers[mime_type]
for mime_type in request.accept_mimetypes.itervalues()
if mime_type in self.mime_type_renderers]
if self.default_renderer:
matching_renderers.append(self.default_renderer)
return matching_renderers
def render_response(self, response, renderer=None, renderer_kwargs=None):
"""
Renders an unrendered response (a bare value, a (response, status, headers)-:obj:`tuple`, or an :class:`~flask.ext.pushrod.renderers.UnrenderedResponse` object).
:throws RendererNotFound: If a usable renderer could not be found (explicit renderer argument points to an invalid render, or no acceptable mime types can be used as targets and there is no default renderer)
:param response: The response to render
:param renderer: The renderer(s) to use (defaults to using :meth:`get_renderer_for_request`)
:param renderer_kwargs: Any extra arguments to pass to the renderer
.. note::
For convenience, a bare string (:obj:`unicode`, :obj:`str`, or any other :obj:`basestring` derivative), or a derivative of :class:`werkzeug.wrappers.BaseResponse` (such as :class:`flask.Response`) is passed through unchanged.
.. note::
A renderer may mark itself as unable to render a specific response by returning :obj:`None`, in which case the next possible renderer is attempted.
"""
if renderer:
if hasattr(renderer, "__iter__"):
renderers = renderer
else:
renderers = [renderer]
else:
renderers = self.get_renderers_for_request()
if renderer_kwargs is None:
renderer_kwargs = {}
if isinstance(response, tuple):
response, status, headers = response
else:
status = headers = None
if isinstance(response, (basestring, BaseResponse)):
return response
if not isinstance(response, UnrenderedResponse):
response = UnrenderedResponse(response, status, headers)
for renderer in renderers:
rendered = renderer(response, **renderer_kwargs)
if rendered is not NotImplemented:
return rendered
raise RendererNotFound()
def normalize(self, obj):
"""
Runs an object through the normalizer mechanism, with the goal of producing a value consisting only of "native types" (:obj:`unicode`, :obj:`int`, :obj:`long`, :obj:`float`, :obj:`dict`, :obj:`list`, etc).
The resolution order looks like this:
- Loop through :attr:`self.normalizer_overrides[type(obj)] <normalizer_overrides>` (taking parent classes into account), should be a callable taking (obj, pushrod), falls through on :obj:`NotImplemented`
- :attr:`self.normalizers[type(obj)] <normalizers>` (taking parent classes into account), should be a callable taking (obj, pushrod), falls through on :obj:`NotImplemented`
See :ref:`bundled-normalizers` for all default normalizers.
:param obj: The object to normalize.
"""
for cls in type(obj).__mro__:
for override in self.normalizer_overrides[cls]:
attempt = override(obj, self)
if attempt is not NotImplemented:
return attempt
attempt = normalizers.normalize_object(obj, self)
if attempt is not NotImplemented:
return attempt
for cls in type(obj).__mro__:
if cls in self.normalizers:
attempt = self.normalizers[cls](obj, self)
if attempt is not NotImplemented:
return attempt
return NotImplemented
def pushrod_view(**renderer_kwargs):
"""
Decorator that wraps view functions and renders their responses through :meth:`flask.ext.pushrod.Pushrod.render_response`.
.. note::
Views should only return :obj:`dicts <dict>` or a type that :meth:`normalizes <Pushrod.normalize>` down to :obj:`dicts <dict>`.
:param renderer_kwargs: Any extra arguments to pass to the renderer
"""
def decorator(f):
@wraps(f)
def wrapper(*view_args, **view_kwargs):
response = f(*view_args, **view_kwargs)
return current_app.extensions['pushrod'].render_response(response, renderer_kwargs=renderer_kwargs)
return wrapper
return decorator
|
teozkr/Flask-Pushrod
|
flask_pushrod/resolver.py
|
Python
|
mit
| 9,877
|
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2006, Frank Scholz <coherence@beebits.net>
class RenderingControlClient:
def __init__(self, service):
self.service = service
self.namespace = service.get_type()
self.url = service.get_control_url()
self.service.subscribe()
self.service.client = self
#print "RenderingControlClient __init__", self.url
#def __del__(self):
# #print "RenderingControlClient deleted"
# pass
def remove(self):
self.service.remove()
self.service = None
self.namespace = None
self.url = None
del self
def subscribe_for_variable(self, var_name, callback, signal=False):
self.service.subscribe_for_variable(var_name, instance=0, callback=callback, signal=signal)
def list_presets(self, instance_id=0):
action = self.service.get_action('ListPresets')
return action.call(InstanceID=instance_id)
def select_presets(self, instance_id=0, preset_name=''):
action = self.service.get_action('SelectPresets')
return action.call(InstanceID=instance_id,
PresetName=preset_name)
def get_mute(self, instance_id=0, channel='Master'):
action = self.service.get_action('GetMute')
return action.call(InstanceID=instance_id,
Channel=channel)
def set_mute(self, instance_id=0, channel='Master', desired_mute=0):
action = self.service.get_action('SetMute')
return action.call(InstanceID=instance_id,
Channel=channel,
DesiredMute=desired_mute)
def get_volume(self, instance_id=0, channel='Master'):
action = self.service.get_action('GetVolume')
return action.call(InstanceID=instance_id,
Channel=channel)
def set_volume(self, instance_id=0, channel='Master', desired_volume=0):
action = self.service.get_action('SetVolume')
return action.call(InstanceID=instance_id,
Channel=channel,
DesiredVolume=desired_volume)
def get_volume_db(self, instance_id=0, channel='Master'):
action = self.service.get_action('GetVolumeDB')
return action.call(InstanceID=instance_id,
Channel=channel)
def set_volume_db(self, instance_id=0, channel='Master', desired_volume=0):
action = self.service.get_action('SetVolumeDB')
return action.call(InstanceID=instance_id,
Channel=channel,
DesiredVolume=desired_volume)
def get_volume_db_range(self, instance_id=0, channel='Master'):
action = self.service.get_action('GetVolumeDBRange')
return action.call(InstanceID=instance_id,
Channel=channel)
def get_loudness(self, instance_id=0, channel='Master'):
action = self.service.get_action('GetLoudness')
return action.call(InstanceID=instance_id,
Channel=channel)
def set_loudness(self, instance_id=0, channel='Master', desired_loudness=0):
action = self.service.get_action('SetLoudness')
return action.call(InstanceID=instance_id,
Channel=channel,
DesiredLoudness=desired_loudness)
|
furbrain/Coherence
|
coherence/upnp/services/clients/rendering_control_client.py
|
Python
|
mit
| 3,458
|
"""
Python 3 client library for the PayTrace Payment Gateway public API.
The PayTrace API is documented in a single PDF file available here:
https://paytrace.com/manuals/PayTraceAPIUserGuideXML.pdf (dated July, 2011)
Section references in doc strings below refer to this document.
"""
import sys
from datetime import datetime
from textwrap import TextWrapper
from urllib.parse import urlencode, quote_plus
import requests
#__all__ = ['parse_response', 'send_api_request']
POST_URL = 'https://paytrace.com/api/default.pay'
def parse_response(s):
"""
Parse a PayTrace response string into a dictionary.
See section 5.1.
"""
if not s.endswith('|'):
raise Exception('Unexpected response: %r' % s[:100])
try:
api_response_dict = dict(s.split('~') for s in s[:-1].split('|'))
except:
raise Exception('Malformed response: %r' % s)
return api_response_dict
def send_api_request(api_request, post_url=POST_URL):
"""
Send a PayTrace API request and get a response.
api_request -- a subclass of PayTraceRequest
See section 3.2.
Variable naming gets a little confusing here because both requests
and PayTrace have a notion of "requests" and "responses". For clarity,
you'll see
requests.post (an HTTP request)
requests.response (an HTTP response object)
api_request (a subclass of PayTraceResponse)
api_response_dict (a PayTrace response string parsed into a dictionary)
TEMPORARY:
Here are the responses.response object's attributes and methods:
response.config
response.content --> bytes
response.cookies
response.encoding
response.error
response.headers
response.history
response.iter_content(
response.iter_lines(
response.json
response.ok
response.raise_for_status(
response.raw --> requests.packages.urllib3.response.HTTPResponse
response.reason
response.request
response.status_code
response.text --> Unicode
response.url
"""
utc_timestamp = '%s+00:00' % datetime.utcnow()
try:
response = requests.post(
post_url,
data=str(api_request),
headers={'Content-Type': 'application/x-www-form-urlencoded'},
timeout=60,
)
except KeyboardInterrupt:
raise
except:
exc_class, exc_instance = sys.exc_info()[:2]
raise Exception(
'Error sending HTTP POST.',
{'exc_instance': exc_instance,
'api_request': repr(api_request),
'api_request_raw': str(api_request),
'utc_timestamp': utc_timestamp}
)
try:
api_response_dict = parse_response(response.text)
except KeyboardInterrupt:
raise
except:
exc_class, exc_instance = sys.exc_info()[:2]
raise Exception(
'Error parsing HTTP response.',
{'exc_instance': exc_instance,
'api_request': repr(api_request),
'api_request_raw': str(api_request),
'api_response': response.text[:100],
'utc_timestamp': utc_timestamp}
)
return api_response_dict
def uppercase_keys(d):
"""Change a dictionary in-place so that all keys are uppercase."""
for key in d:
KEY = key.upper()
if key != KEY:
d[KEY] = d[key]
del d[key]
def set_credentials(username, password):
"""
To use the PayTrace API, you need to supply the user name and password for
a valid PayTrace account. For example, to use the PayTrace demo account,
run set_credentials('demo123', 'demo123').
"""
PayTraceRequest.UN = username
PayTraceRequest.PSWD = password
def set_test_mode():
"""
All transaction types (TranxType) of the ProcessTranx method can be
processed as test transactions by adding a TEST attribute. Transactions
processed with a TEST value of "Y" will be processed as test transactions
with standardized test responses. Test transactions will not place a hold
on the customer's credit card.
"""
PayTraceRequest._test_mode = True
#
# Metaclass that customizes each class's repr.
#
class MetaRepr(type):
"""
Provide a customized repr for classes defining their metaclass as this
class. To use, your class should implement a classmethod called
__classrepr__ that returns the repr you are after.
Just as the __repr__ method on a class generates the repr for instances
of that class, the __repr__ method on the class's type (its metaclass)
generates the repr for the class itself.
See http://www.aleax.it/Python/osc05_bla_dp.pdf, "A tiny custom metaclass"
(p. 21).
"""
def __repr__(cls):
if hasattr(cls, '__classrepr__'):
return cls.__classrepr__()
else:
return repr(cls)
#
# Data definition classes
#
class PayTraceRequest(metaclass=MetaRepr):
"""
PayTrace request abstract base class.
Provide constant data fields for subclasses and ensure required and
optional fields are correctly supplied by subclasses.
"""
UN = None
PSWD = None
TERMS = 'Y'
_required = NotImplemented
_optional = NotImplemented
_discretionary_data_allowed = NotImplemented
_test_mode = False
def __init__(self, **kwargs):
"""
Convert kwargs to uppercased instance attributes, assert all required
fields are supplied, and verify that optional fields are acceptable.
"""
assert self.UN and self.PSWD, (
'You first need to define UN and PSWD by running '
"set_credentials('username', 'password')"
)
# Normalize kwargs to uppercase.
uppercase_keys(kwargs)
# Add kwargs as uppercased instance attributes.
for key, value in kwargs.items():
setattr(self, key, str(value))
# TEST is a special case allowed for all ProcessTranx transactions.
if self.METHOD == 'ProcessTranx' and PayTraceRequest._test_mode:
# If test mode has been enabled by running set_test_mode(),
# inject TEST here. All ProcessTranx requests will be submitted
# as test transactions.
self.TEST = 'Y'
fields = set(self._fields)
required = set(self._required)
optional = set(self._optional)
name = self.__class__.__name__
# Overlapping required and optional fields check.
assert not required & optional, (
'{name}._required and {name}._optional must not overlap'
.format(**locals())
)
# If conditional fields are defined, add at least one set to required.
self._required = self._required[:]
if hasattr(self, '_conditional'):
for field, field_list in self._conditional.items():
if field in kwargs:
self._required += field_list
required.update(field_list)
break
else:
field_sets = '\n'.join(
' {0}'.format(field_list)
for field_list in self._conditional.values()
)
raise AssertionError(
'One of the following sets of fields is required:\n'
'{field_sets}'
.format(field_sets=field_sets)
)
# Missing fields check.
missing = ', '.join(required - fields)
if missing:
raise KeyError(
'{name} has missing fields: {missing}'.format(**locals())
)
# Extra fields check.
extra = ', '.join(sorted(fields - required - optional))
if extra:
if self._discretionary_data_allowed is True:
# Extra fields found but discretionary data is allowed.
sys.stderr.write(
'Note: Extra fields found (ok if discretionary data): %s\n'
% extra
)
else:
raise KeyError(
'{name} defines extra fields: {extra}'.format(**locals())
)
@classmethod
def __classrepr__(cls):
"""
Note: ipython3 doesn't seem to render class reprs correctly -- may be
a bug in the beta version I used. Looks fine in python3 and ipython2.
"""
def field_items(field_list):
return list((attr, getattr(cls, attr, '')) for attr in field_list)
def format_fields(field_list):
s = ', '.join(
'{field}={value!r}'.format(field=field.lower(), value=value)
for field, value in field_items(field_list)
if not value # show only fields without default values
)
return s + ',' if s else '# <none>'
textwrapper = TextWrapper(
initial_indent=' ' * 4,
subsequent_indent=' ' * 4,
)
l = []
l.append('\n{cls.__name__}('.format(cls=cls))
l.append(' # Required fields')
l.append(textwrapper.fill(format_fields(cls._required)))
if getattr(cls, '_conditional', None):
for label, fields in cls._conditional.items():
l.append('\n # Required if using ' + label)
l.append(textwrapper.fill(format_fields(fields)))
if cls._discretionary_data_allowed is True:
l.append(
'\n '
'# Customer-defined discretionary data may also be included.'
)
l.append('\n # Optional fields')
l.append(textwrapper.fill(format_fields(cls._optional)))
l.append(')\n')
return '\n'.join(l)
@property
def _fields(self):
return [s for s in dir(self) if not s.startswith('_')]
def __str__(self):
"""
Serialize into a PayTrace request string. For example,
PARMLIST=METHOD%7EProcessTranx%7CPSWD%7Edemo123%7CTERMS%7EY%7CTRANXID
%7E1539%7CTRANXTYPE%7EVoid%7CUN%7Edemo123%7C
See section 3.2.
"""
items = ((key, getattr(self, key)) for key in self._fields)
params = '|'.join('~'.join(item) for item in items) + '|'
request_string = urlencode(dict(PARMLIST=params))
return request_string
def __repr__(self):
"""
Output an interpretable repr. For example,
VoidRequest(**{'TRANXID': '1539', 'TRANXTYPE': 'Void',
'UN': 'demo123', 'PSWD': 'demo123', 'TERMS': 'Y',
'METHOD': 'ProcessTranx'})
"""
d = dict((key, getattr(self, key)) for key in self._fields)
return '{self.__class__.__name__}(**{d})'.format(self=self, d=d)
#
# Classes for processing transactions.
#
class Sale(PayTraceRequest):
"""
Processing a sale through the PayTrace API may be accomplished by
providing a new customer's swiped credit card information, a new customer's
key entered credit card information, or the customer ID of an existing
customer.
{field_details}
See section 4.1.1.
"""
METHOD = 'ProcessTranx'
TRANXTYPE = 'Sale'
_required = [
'UN', 'PSWD', 'TERMS', 'METHOD', 'TRANXTYPE', 'AMOUNT',
]
_conditional = {
'SWIPE': ['SWIPE'],
'CC': ['CC', 'EXPMNTH', 'EXPYR'],
'CUSTID': ['CUSTID']
}
_optional = [
'BNAME', 'BADDRESS', 'BADDRESS2', 'BCITY', 'BSTATE', 'BZIP',
'BCOUNTRY', 'SNAME', 'SADDRESS', 'SADDRESS2', 'SCITY', 'SCOUNTY',
'SSTATE', 'SZIP', 'SCOUNTRY', 'EMAIL', 'CSC', 'INVOICE', 'DESCRIPTION',
'TAX', 'CUSTREF', 'RETURNCLR', 'CUSTOMDBA', 'ENABLEPARTIALAUTH',
'TEST'
]
_discretionary_data_allowed = True
def __init__(self, **kwargs):
super().__init__(**kwargs)
# Additional encoding of value is required when SWIPE is used;
# see 3.3.1 SWIPE data definition for details.
if 'SWIPE' in kwargs:
kwargs['SWIPE'] = quote_plus(kwargs['SWIPE'].replace('|', '***'))
class Authorization(Sale):
"""
Processing an authorization through the PayTrace API will request
authorization for specified amount. However, the approved funds
will not be charged or funded until the transaction is captured
and settled.
The required fields for processing an Authorization Request are the same
as processing a Sale Request.
See section 4.1.2.
"""
TRANXTYPE = 'Authorization'
class Refund(PayTraceRequest):
"""
Processing a refund through the PayTrace API may be accomplished by
providing a new customer's swiped credit card information, providing a new
customer's key entered credit card information, providing the customer ID
of an existing customer, or providing the transaction ID of the original
transaction that should be refunded.
See section 4.1.3.
"""
METHOD = 'ProcessTranx'
TRANXTYPE = 'Refund'
_required = [
'UN', 'PSWD', 'TERMS', 'METHOD', 'TRANXTYPE'
]
_conditional = {
'SWIPE': ['AMOUNT', 'SWIPE'],
'CC': ['AMOUNT', 'CC', 'EXPMNTH', 'EXPYR'],
'CUSTID': ['AMOUNT', 'CUSTID'],
'TRANXID': ['TRANXID']
}
_optional = [
'BNAME', 'BADDRESS', 'BADDRESS2', 'BCITY', 'BSTATE', 'BZIP',
'BCOUNTRY', 'SNAME', 'SADDRESS', 'SADDRESS2', 'SCITY', 'SCOUNTY',
'SSTATE', 'SZIP', 'SCOUNTRY', 'EMAIL', 'CSC', 'INVOICE', 'DESCRIPTION',
'TAX', 'CUSTREF', 'AMOUNT', 'TEST'
]
_discretionary_data_allowed = True
class Void(PayTraceRequest):
"""
Processing a void through the PayTrace API may only be accomplished by
providing the transaction ID of the unsettled transaction that should
be voided.
See section 4.1.4.
"""
METHOD = 'ProcessTranx'
TRANXTYPE = 'Void'
_required = [
'UN', 'PSWD', 'TERMS', 'METHOD', 'TRANXTYPE', 'TRANXID'
]
_optional = ['TEST']
class ForcedSale(PayTraceRequest):
"""
Processing a forced sale through the PayTrace API may be accomplished by
providing a new customer's swiped credit card information, providing a new
customer's key entered credit card information, or providing the customer
ID of an existing customer. A forced sale is a sale where the approval
code for the purchase amount has been obtained outside of the PayTrace
Payment Gateway or has been voided from the settlement record.
ForcedSale has the same fields as SaleRequest with the addition of
APPROVAL.
See section 4.1.5.
"""
METHOD = 'ProcessTranx'
TRANXTYPE = 'Force'
_required = [
'UN', 'PSWD', 'TERMS', 'METHOD', 'TRANXTYPE', 'AMOUNT', 'APPROVAL'
]
_conditional = {
'SWIPE': ['SWIPE'],
'CC': ['CC', 'EXPMNTH', 'EXPYR'],
'CUSTID': ['CUSTID']
}
_optional = [
'UN', 'PSWD', 'TERMS', 'METHOD', 'TRANXTYPE', 'AMOUNT', 'SWIPE',
'APPROVAL', 'TEST'
]
class Capture(PayTraceRequest):
"""
Capturing a transaction updates an approved authorization to a pending
settlement status that will initiate a transfer of funds. Processing a
capture through the PayTrace API may only be accomplished by providing
the transaction ID of the unsettled transaction that should be settled.
See section 4.1.6.
"""
METHOD = 'ProcessTranx'
TRANXTYPE = 'Capture'
_required = [
'UN', 'PSWD', 'TERMS', 'METHOD', 'TRANXTYPE', 'TRANXID'
]
_optional = ['TEST']
class CashAdvance(PayTraceRequest):
"""
Processing a Cash Advance transaction is similar to processing a Sale;
however, Cash Advances are special transactions that result in cash
disbursements to the card holder. Consequently, additional information is
required to process Cash Advances. Cash Advances should always be swiped
unless your card reader is not able to reader the card's magnetic stripe.
Additionally, your PayTrace account must be specially configured to process
this type of transaction.
Please note that Cash Advances may also be processed as forced
transactions by setting the TranxType to FORCE and including a valid
APPROVAL value, all other fields remain the same. Forced Cash Advance
transactions should be also be swiped unless your card reader is not able
to read the card's magnetic stripe.
See section 4.1.7.
"""
METHOD = 'ProcessTranx'
TRANXTYPE = 'Sale'
_required = [
'UN', 'PSWD', 'TERMS', 'METHOD', 'TRANXTYPE', 'AMOUNT', 'SWIPE',
'CASHADVANCE', 'PHOTOID', 'IDEXP', 'LAST4', 'BNAME', 'BADDRESS',
'BADDRESS2', 'BCITY', 'BSTATE', 'BZIP'
]
_optional = [
'UN', 'PSWD', 'TERMS', 'METHOD', 'TRANXTYPE', 'AMOUNT', 'CC',
'EXPMNTH', 'EXPYR', 'CASHADVANCE', 'PHOTOID', 'IDEXP', 'LAST4',
'BNAME', 'BADDRESS', 'BADDRESS2', 'BCITY', 'BSTATE', 'BZIP', 'TEST'
]
class StoreAndForward(Sale):
"""
Processing a store & forward through the PayTrace API will request that
the transaction is stored for future authorization for specified amount.
Please note that the authorization of the store & forward may be scheduled
by provided a StrFwdDate value or manually via the Virtual Terminal.
Note that swiped account numbers and *CSC* values are not stored.
See section 4.1.8.
"""
METHOD = 'ProcessTranx'
TRANXTYPE = 'Str/FWD'
#
# Classes for managing customer profiles.
#
class CreateCustomer(PayTraceRequest):
"""
Create a customer profile.
"""
METHOD = 'CreateCustomer'
_required = [
'UN', 'PSWD', 'TERMS', 'METHOD', 'CUSTID', 'BNAME', 'CC', 'EXPMNTH',
'EXPYR'
]
_optional = [
'BADDRESS', 'BADDRESS2', 'BCITY', 'BSTATE', 'BZIP', 'BCOUNTRY',
'SNAME', 'SADDRESS', 'SADDRESS2', 'SCITY', 'SCOUNTY', 'SSTATE', 'SZIP',
'SCOUNTRY', 'EMAIL', 'PHONE', 'FAX', 'CUSTPSWD', 'DDA', 'TR'
]
_discretionary_data_allowed = True
class UpdateCustomer(PayTraceRequest):
"""
Update an existing customer profile.
"""
METHOD = 'UpdateCustomer'
_required = [
'UN', 'PSWD', 'TERMS', 'METHOD', 'CUSTID'
]
_optional = [
'BADDRESS', 'BADDRESS2', 'BCITY', 'BSTATE', 'BZIP', 'BCOUNTRY',
'SNAME', 'SADDRESS', 'SADDRESS2', 'SCITY', 'SCOUNTY', 'SSTATE', 'SZIP',
'SCOUNTRY', 'EMAIL', 'PHONE', 'FAX', 'CC', 'EXPMNTH', 'EXPYR',
'CUSTPSWD', 'DDA', 'TR', 'NEWCUSTID'
]
_discretionary_data_allowed = True
class DeleteCustomer(PayTraceRequest):
"""
Delete an existing customer profile.
"""
METHOD = 'DeleteCustomer'
_required = [
'UN', 'PSWD', 'TERMS', 'METHOD', 'CUSTID'
]
_optional = []
#
# Emailing recepits
#
class EmailReceipt(PayTraceRequest):
"""
Email a transaction or check receipt.
"""
METHOD = 'EmailReceipt'
_required = ['UN', 'PSWD', 'TERMS', 'METHOD', 'EMAIL']
_conditional = {
'TRANXID': ['TRANXID'],
'CHECKID': ['CHECKID']
}
_optional = ['TRANXTYPE', 'CUSTID', 'USER', 'RETURNBIN', 'SEARCHTEXT']
def __init__(self, **kwargs):
super().__init__(**kwargs)
tranxtype = kwargs.get('TRANXTYPE')
if tranxtype:
assert tranxtype in ['SETTLED', 'PENDING', 'DECLINED']
#
# Exporting transaction information
#
class ExportTransaction(PayTraceRequest):
"""
Export transaction information.
See 4.4.
Response from PayTrace support that explains how to use ExportTransaction
to deal with "Service Unavailable" API responses.
"Web servers typically restart when large volumes of transactions
processing from our gateway, or when we release product updates into our
production environment. One work around that you can try that does not
involve manually logging into the site would be to perform an export
transaction request to our gateway if you happen to receive this response.
This essentially would be a "query" to our gateway that would check to
see if the transaction had really processed if you receive a
"service unavailable" response. This call to our API is outlined here --
http://help.paytrace.com/api-export-transaction-information.
You can use the "searchtext" parameter in your request to narrow down which
transaction you are looking for to see if it truly had been processed. If
it was processed, it will return the details of your transaction to parse
through and store -- and then you can move on to your next transaction. If
the transaction has not processed, no results will be returned letting you
know if the transaction truly did not process."
"""
METHOD = 'ExportTranx'
_required = ['UN', 'PSWD', 'TERMS', 'METHOD']
_conditional = {
'TRANXID': ['TRANXID'],
'SDATE': ['SDATE', 'EDATE']
}
_optional = ['TRANXTYPE', 'CUSTID', 'USER', 'RETURNBIN', 'SEARCHTEXT']
def __init__(self, **kwargs):
super().__init__(**kwargs)
# Validate TRANXTYPE value.
allowed_tranxtypes = [
'Sale', 'Authorization', 'Str/Fwd', 'Refund', 'Void', 'Capture',
'Force', 'SETTLED', 'PENDING', 'DECLINED'
]
tranxtype = kwargs.get('TRANXTYPE')
assert tranxtype is None or tranxtype in allowed_tranxtypes, (
'Invalid TRANXTYPE value: %r (allowed values: %s)'
% (tranxtype, allowed_tranxtypes)
)
class ExportBatch(PayTraceRequest):
"""
Export batch information.
See 4.12.
"""
METHOD = 'ExportBatch'
_required = ['UN', 'PSWD', 'TERMS', 'METHOD']
_optional = ['SDATE', 'BATCHNUMBER']
# TODO: Implement 4.5, 4.6, 4.7, 4.8, 4.9, 4.11, 4.13, 4.14.
class SettleTranxRequest(PayTraceRequest):
"""
4.10 Settling Transactions Through the PayTrace API
Transactions processed through merchant accounts that are set up on the
TSYS/Vital network or other terminal-based networks may initiate the
settlement of batches through the PayTrace API.
See section 4.10.
"""
METHOD = 'SETTLETRANX'
_required = [
'UN', 'PSWD', 'TERMS', 'METHOD'
]
_optional = []
def _test():
"""
Send Authorization and Void requests to the PayTrace demo account using
the demo credit card from the PayTrace API docs.
"""
import time
print("""
=== API usage example ===
>>> # 1. Set credentials for the PayTrace demo account.
>>> set_credentials('demo123', 'demo123')""")
time.sleep(2)
print("""
>>> # 2. Sending Authorization request to PayTrace demo account...
>>> authorization = Authorization(
... amount='1.00',
... cc='4012881888818888',
... expmnth='01',
... expyr='15',
... csc='999',
... baddress='123 Main St.',
... bzip='53719',
... invoice='8888',
... )
>>> response = send_api_request(authorization)""")
authorization = Authorization(
amount='1.00',
cc='4012881888818888',
expmnth='01',
expyr='15',
csc='999',
baddress='123 Main St.',
bzip='53719',
invoice='8888',
)
response = send_api_request(authorization)
print("""\
>>> response
{response}
""".format(**locals()))
print("""\
>>> # 3. Grab the transaction ID from the response.
>>> transactionid = response['TRANSACTIONID']
""")
time.sleep(2)
print("""\
>>> # 4. Sending Void request to cancel authorization...
>>> void = Void(
tranxid=transactionid
)
>>> response = send_api_request(void)""")
transactionid = response['TRANSACTIONID']
void = Void(tranxid=transactionid)
response = send_api_request(void)
print("""\
>>> response
{response}
=== end API usage example ===
""".format(**locals()))
input('Type <enter> to continue...')
print("""
# NOTE: To explore more of the API, run dir() to see what's in the current
# namespace. To see the required and optional fields for a particular
# request class, print its repr. For example,
>>> Sale # Note: if using ipython, you'll need to use repr(Sale) instead
Sale(
# Required fields
amount='',
# Required if using CC
cc='', expmnth='', expyr='',
# Required if using SWIPE
swipe='',
# Required if using CUSTID
custid='',
# Customer-defined discretionary data may also be included.
# Optional fields
bname='', baddress='', baddress2='', bcity='', bstate='', bzip='',
bcountry='', sname='', saddress='', saddress2='', scity='',
scounty='', sstate='', szip='', scountry='', email='', csc='',
invoice='', description='', tax='', custref='', returnclr='',
customdba='', enablepartialauth='', test='',
)
""".format(**locals()))
if __name__ == '__main__':
print("""
To explore the API, run 'python3 -i paytrace.py', then call the _test()
function. By default, credentials for the PayTrace demo account are in
effect.
>>> set_credentials('demo123', 'demo123')
>>> # now you call _test()...
""")
set_credentials('demo123', 'demo123')
|
jdnier/paytrace
|
paytrace.py
|
Python
|
mit
| 25,479
|
#!/usr/bin/env python3
# Copyright 2020 David Robillard <d@drobilla.net>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Write Sphinx markup from Doxygen XML.
Takes a path to a directory of XML generated by Doxygen, and emits a directory
with a reStructuredText file for every documented symbol.
"""
import argparse
import os
import sys
import textwrap
import xml.etree.ElementTree
__author__ = "David Robillard"
__date__ = "2020-11-18"
__email__ = "d@drobilla.net"
__license__ = "ISC"
__version__ = __date__.replace("-", ".")
def load_index(index_path):
"""
Load the index from XML.
:returns: A dictionary from ID to skeleton records with basic information
for every documented entity. Some records have an ``xml_filename`` key
with the filename of a definition file. These files will be loaded later
to flesh out the records in the index.
"""
root = xml.etree.ElementTree.parse(index_path).getroot()
index = {}
for compound in root:
compound_id = compound.get("refid")
compound_kind = compound.get("kind")
compound_name = compound.find("name").text
if compound_kind in ["dir", "file", "page"]:
continue
# Add record for compound (compounds appear only once in the index)
assert compound_id not in index
index[compound_id] = {
"kind": compound_kind,
"name": compound_name,
"xml_filename": compound_id + ".xml",
"children": [],
}
name_prefix = (
("%s::" % compound_name) if compound_kind == "namespace" else ""
)
for child in compound.findall("member"):
if child.get("refid") in index:
assert compound_kind == "group"
continue
# Everything has a kind and a name
child_record = {
"kind": child.get("kind"),
"name": name_prefix + child.find("name").text,
}
if child.get("kind") == "enum":
# Enums are not compounds, but we want to resolve the parent of
# their values so they are not written as top level documents
child_record["children"] = []
if child.get("kind") == "enumvalue":
# Remove namespace prefix
child_record["name"] = child.find("name").text
index[child.get("refid")] = child_record
return index
def resolve_index(index, root):
"""
Walk a definition document and extend the index for linking.
This does two things: sets the "parent" and "children" fields of all
applicable records, and sets the "strong" field of enums so that the
correct Sphinx role can be used when referring to them.
"""
def add_child(index, parent_id, child_id):
parent = index[parent_id]
child = index[child_id]
if child["kind"] == "enumvalue":
assert parent["kind"] == "enum"
assert "parent" not in child or child["parent"] == parent_id
child["parent"] = parent_id
else:
if parent["kind"] in ["class", "struct", "union"]:
assert "parent" not in child or child["parent"] == parent_id
child["parent"] = parent_id
if child_id not in parent["children"]:
parent["children"] += [child_id]
compound = root.find("compounddef")
compound_kind = compound.get("kind")
if compound_kind == "group":
for subgroup in compound.findall("innergroup"):
add_child(index, compound.get("id"), subgroup.get("refid"))
for klass in compound.findall("innerclass"):
add_child(index, compound.get("id"), klass.get("refid"))
for section in compound.findall("sectiondef"):
if section.get("kind").startswith("private"):
for member in section.findall("memberdef"):
if member.get("id") in index:
del index[member.get("id")]
else:
for member in section.findall("memberdef"):
member_id = member.get("id")
add_child(index, compound.get("id"), member_id)
if member.get("kind") == "enum":
index[member_id]["strong"] = member.get("strong") == "yes"
for value in member.findall("enumvalue"):
add_child(index, member_id, value.get("id"))
def sphinx_role(record, lang):
"""
Return the Sphinx role used for a record.
This is used for the description directive like ".. c:function::", and
links like ":c:func:`foo`.
"""
kind = record["kind"]
if kind in ["class", "function", "namespace", "struct", "union"]:
return lang + ":" + kind
if kind == "define":
return "c:macro"
if kind == "enum":
return lang + (":enum-class" if record["strong"] else ":enum")
if kind == "typedef":
return lang + ":type"
if kind == "enumvalue":
return lang + ":enumerator"
if kind == "variable":
return lang + (":member" if "parent" in record else ":var")
raise RuntimeError("No known role for kind '%s'" % kind)
def child_identifier(lang, parent_name, child_name):
"""
Return the identifier for an enum value or struct member.
Sphinx, for some reason, uses a different syntax for this in C and C++.
"""
separator = "::" if lang == "cpp" else "."
return "%s%s%s" % (parent_name, separator, child_name)
def link_markup(index, lang, refid):
"""Return a Sphinx link for a Doxygen reference."""
record = index[refid]
kind, name = record["kind"], record["name"]
role = sphinx_role(record, lang)
if kind in ["class", "enum", "struct", "typedef", "union"]:
return ":%s:`%s`" % (role, name)
if kind == "function":
return ":%s:func:`%s`" % (lang, name)
if kind == "enumvalue":
parent_name = index[record["parent"]]["name"]
return ":%s:`%s`" % (role, child_identifier(lang, parent_name, name))
if kind == "variable":
if "parent" not in record:
return ":%s:var:`%s`" % (lang, name)
parent_name = index[record["parent"]]["name"]
return ":%s:`%s`" % (role, child_identifier(lang, parent_name, name))
raise RuntimeError("Unknown link target kind: %s" % kind)
def indent(markup, depth):
"""
Indent markup to a depth level.
Like textwrap.indent() but takes an integer and works in reST indentation
levels for clarity."
"""
return textwrap.indent(markup, " " * depth)
def heading(text, level):
"""
Return a ReST heading at a given level.
Follows the style in the Python documentation guide, see
<https://devguide.python.org/documenting/#sections>.
"""
assert 1 <= level <= 6
chars = ("#", "*", "=", "-", "^", '"')
line = chars[level] * len(text)
return "%s%s\n%s\n\n" % (line + "\n" if level < 3 else "", text, line)
def dox_to_rst(index, lang, node):
"""
Convert documentation commands (docCmdGroup) to Sphinx markup.
This is used to convert the content of descriptions in the documentation.
It recursively parses all children tags and raises a RuntimeError if any
unknown tag is encountered.
"""
def field_value(markup):
"""Return a value for a field as a single line or indented block."""
if "\n" in markup.strip():
return "\n" + indent(markup, 1)
return " " + markup.strip()
if node.tag == "lsquo":
return "‘"
if node.tag == "rsquo":
return "’"
if node.tag == "computeroutput":
assert len(node) == 0
return "``%s``" % node.text
if node.tag == "itemizedlist":
markup = ""
for item in node.findall("listitem"):
assert len(item) == 1
markup += "\n- %s" % dox_to_rst(index, lang, item[0])
return markup
if node.tag == "para":
markup = node.text if node.text is not None else ""
for child in node:
markup += dox_to_rst(index, lang, child)
markup += child.tail if child.tail is not None else ""
return markup.strip() + "\n\n"
if node.tag == "parameterlist":
markup = ""
for item in node.findall("parameteritem"):
name = item.find("parameternamelist/parametername")
description = item.find("parameterdescription")
assert len(description) == 1
markup += "\n\n:param %s:%s" % (
name.text,
field_value(dox_to_rst(index, lang, description[0])),
)
return markup + "\n"
if node.tag == "programlisting":
return "\n.. code-block:: %s\n\n%s" % (
lang,
indent(plain_text(node), 1),
)
if node.tag == "ref":
refid = node.get("refid")
if refid not in index:
sys.stderr.write("warning: Unresolved link: %s\n" % refid)
return node.text
assert len(node) == 0
assert len(link_markup(index, lang, refid)) > 0
return link_markup(index, lang, refid)
if node.tag == "simplesect":
assert len(node) == 1
if node.get("kind") == "return":
return "\n:returns:" + field_value(
dox_to_rst(index, lang, node[0])
)
if node.get("kind") == "see":
return dox_to_rst(index, lang, node[0])
raise RuntimeError("Unknown simplesect kind: %s" % node.get("kind"))
if node.tag == "ulink":
return "`%s <%s>`_" % (node.text, node.get("url"))
raise RuntimeError("Unknown documentation command: %s" % node.tag)
def description_markup(index, lang, node):
"""Return the markup for a brief or detailed description."""
assert node.tag == "briefdescription" or node.tag == "detaileddescription"
assert not (node.tag == "briefdescription" and len(node) > 1)
assert len(node.text.strip()) == 0
return "".join([dox_to_rst(index, lang, child) for child in node]).strip()
def set_descriptions(index, lang, definition, record):
"""Set a record's brief/detailed descriptions from the XML definition."""
for tag in ["briefdescription", "detaileddescription"]:
node = definition.find(tag)
if node is not None:
record[tag] = description_markup(index, lang, node)
def set_template_params(node, record):
"""Set a record's template_params from the XML definition."""
template_param_list = node.find("templateparamlist")
if template_param_list is not None:
params = []
for param in template_param_list.findall("param"):
if param.find("declname") is not None:
# Value parameter
type_text = plain_text(param.find("type"))
name_text = plain_text(param.find("declname"))
params += ["%s %s" % (type_text, name_text)]
else:
# Type parameter
params += ["%s" % (plain_text(param.find("type")))]
record["template_params"] = "%s" % ", ".join(params)
def plain_text(node):
"""
Return the plain text of a node with all tags ignored.
This is needed where Doxygen may include refs but Sphinx needs plain text
because it parses things itself to generate links.
"""
if node.tag == "sp":
markup = " "
elif node.text is not None:
markup = node.text
else:
markup = ""
for child in node:
markup += plain_text(child)
markup += child.tail if child.tail is not None else ""
return markup
def local_name(name):
"""Return a name with all namespace prefixes stripped."""
return name[name.rindex("::") + 2 :] if "::" in name else name
def read_definition_doc(index, lang, root):
"""Walk a definition document and update described records in the index."""
# Set descriptions for the compound itself
compound = root.find("compounddef")
compound_record = index[compound.get("id")]
set_descriptions(index, lang, compound, compound_record)
set_template_params(compound, compound_record)
if compound.find("title") is not None:
compound_record["title"] = compound.find("title").text.strip()
# Set documentation for all children
for section in compound.findall("sectiondef"):
if section.get("kind").startswith("private"):
continue
for member in section.findall("memberdef"):
kind = member.get("kind")
record = index[member.get("id")]
set_descriptions(index, lang, member, record)
set_template_params(member, record)
if compound.get("kind") in ["class", "struct", "union"]:
assert kind in ["function", "typedef", "variable"]
record["type"] = plain_text(member.find("type"))
if kind == "enum":
for value in member.findall("enumvalue"):
set_descriptions(
index, lang, value, index[value.get("id")]
)
elif kind == "function":
record["prototype"] = "%s %s%s" % (
plain_text(member.find("type")),
member.find("name").text,
member.find("argsstring").text,
)
elif kind == "typedef":
name = local_name(record["name"])
args_text = member.find("argsstring").text
target_text = plain_text(member.find("type"))
if args_text is not None: # Function pointer
assert target_text[-2:] == "(*" and args_text[0] == ")"
record["type"] = target_text + args_text
record["definition"] = target_text + name + args_text
else: # Normal named typedef
assert target_text is not None
record["type"] = target_text
if member.find("definition").text.startswith("using"):
record["definition"] = "%s = %s" % (
name,
target_text,
)
else:
record["definition"] = "%s %s" % (
target_text,
name,
)
elif kind == "variable":
record["definition"] = member.find("definition").text
def declaration_string(record):
"""
Return the string that describes a declaration.
This is what follows the directive, and is in C/C++ syntax, except without
keywords like "typedef" and "using" as expected by Sphinx. For example,
"struct ThingImpl Thing" or "void run(int value)".
"""
kind = record["kind"]
result = ""
if "template_params" in record:
result = "template <%s> " % record["template_params"]
if kind == "function":
result += record["prototype"]
elif kind == "typedef":
result += record["definition"]
elif kind == "variable":
if "parent" in record:
result += "%s %s" % (record["type"], local_name(record["name"]))
else:
result += record["definition"]
elif "type" in record:
result += "%s %s" % (record["type"], local_name(record["name"]))
else:
result += local_name(record["name"])
return result
def document_markup(index, lang, record):
"""Return the complete document that describes some documented entity."""
kind = record["kind"]
role = sphinx_role(record, lang)
name = record["name"]
markup = ""
if name != local_name(name):
markup += ".. cpp:namespace:: %s\n\n" % name[0 : name.rindex("::")]
# Write top-level directive
markup += ".. %s:: %s\n" % (role, declaration_string(record))
# Write main description blurb
markup += "\n" + indent(record["briefdescription"] + "\n", 1)
if len(record["detaileddescription"]) > 0:
markup += "\n" + indent(record["detaileddescription"], 1) + "\n"
assert (
kind in ["class", "enum", "namespace", "struct", "union"]
or "children" not in record
)
# Sphinx C++ namespaces work by setting a scope, they have no content
child_indent = 0 if kind == "namespace" else 1
# Write inline children if applicable
markup += "\n" if "children" in record else ""
for child_id in record.get("children", []):
child_record = index[child_id]
child_role = sphinx_role(child_record, lang)
child_header = ".. %s:: %s\n\n" % (
child_role,
declaration_string(child_record),
)
markup += "\n"
markup += indent(child_header, child_indent)
markup += indent(child_record["briefdescription"], child_indent + 1)
markup += indent(child_record["detaileddescription"], child_indent + 1)
return markup
def symbol_filename(name):
"""Adapt the name of a symbol to be suitable for use as a filename."""
return name.replace("::", "__")
def emit_groups(index, lang, output_dir, force):
"""Write a description file for every group documented in the index."""
for record in index.values():
if record["kind"] != "group":
continue
name = record["name"]
filename = os.path.join(output_dir, "%s.rst" % name)
if not force and os.path.exists(filename):
raise FileExistsError("File already exists: '%s'" % filename)
with open(filename, "w") as rst:
rst.write(heading(record["title"], 1))
# Get all child group and symbol names
child_groups = {}
child_symbols = {}
for child_id in record["children"]:
child = index[child_id]
if child["kind"] == "group":
child_groups[child["name"]] = child
else:
child_symbols[child["name"]] = child
# Emit description (document body)
if len(record["briefdescription"]) > 0:
rst.write(record["briefdescription"] + "\n\n")
if len(record["detaileddescription"]) > 0:
rst.write(record["detaileddescription"] + "\n\n")
if len(child_groups) > 0:
# Emit TOC for child groups
rst.write(".. toctree::\n\n")
for name, group in child_groups.items():
rst.write(indent(group["name"], 1) + "\n")
# Emit symbols in sorted order
for name, symbol in child_symbols.items():
rst.write("\n")
rst.write(document_markup(index, lang, symbol))
rst.write("\n")
def run(index_xml_path, output_dir, language, force):
"""Write a directory of Sphinx files from a Doxygen XML directory."""
# Build skeleton index from index.xml
xml_dir = os.path.dirname(index_xml_path)
index = load_index(index_xml_path)
# Load all definition documents
definition_docs = []
for record in index.values():
if "xml_filename" in record:
xml_path = os.path.join(xml_dir, record["xml_filename"])
definition_docs += [xml.etree.ElementTree.parse(xml_path)]
# Do an initial pass of the definition documents to resolve the index
for root in definition_docs:
resolve_index(index, root)
# Finally read the documentation from definition documents
for root in definition_docs:
read_definition_doc(index, language, root)
# Create output directory
try:
os.makedirs(output_dir)
except OSError:
pass
# Emit output files
emit_groups(index, language, output_dir, force)
if __name__ == "__main__":
ap = argparse.ArgumentParser(
usage="%(prog)s [OPTION]... XML_DIR OUTPUT_DIR",
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
ap.add_argument(
"-f",
"--force",
action="store_true",
help="overwrite files",
)
ap.add_argument(
"-l",
"--language",
default="c",
choices=["c", "cpp"],
help="language domain for output",
)
ap.add_argument("index_xml_path", help="path index.xml from Doxygen")
ap.add_argument("output_dir", help="output directory")
print(sys.argv)
run(**vars(ap.parse_args(sys.argv[1:])))
|
OpenMusicKontrollers/midi_matrix.lv2
|
subprojects/nk_pugl/pugl/scripts/dox_to_sphinx.py
|
Python
|
artistic-2.0
| 21,230
|
"""
WSGI config for detectme project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from os.path import abspath, dirname
from sys import path
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "jajaja.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "detectme.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
_application = get_wsgi_application()
# Import OS enviroment variables to the underlying application
# reference: http://ericplumb.com/blog/passing-apache-environment-variables-to-django-via-mod_wsgi.html
env_variables_to_pass = ['SECRET_KEY', 'EMAIL_HOST', 'DB_NAME',
'DB_USER', 'DB_PASSWORD', 'DB_HOST']
def application(environ, start_response):
# pass the WSGI environment variables on through to os.environ
for var in env_variables_to_pass:
os.environ[var] = environ.get(var, '')
return _application(environ, start_response)
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
mingot/detectme_server
|
detectme/detectme/wsgi.py
|
Python
|
mit
| 2,103
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for CreateQuestion
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dataqna
# [START dataqna_generated_dataqna_v1alpha_QuestionService_CreateQuestion_async]
from google.cloud import dataqna_v1alpha
async def sample_create_question():
# Create a client
client = dataqna_v1alpha.QuestionServiceAsyncClient()
# Initialize request argument(s)
question = dataqna_v1alpha.Question()
question.scopes = ['scopes_value_1', 'scopes_value_2']
question.query = "query_value"
request = dataqna_v1alpha.CreateQuestionRequest(
parent="parent_value",
question=question,
)
# Make the request
response = await client.create_question(request=request)
# Handle the response
print(response)
# [END dataqna_generated_dataqna_v1alpha_QuestionService_CreateQuestion_async]
|
googleapis/python-data-qna
|
samples/generated_samples/dataqna_generated_dataqna_v1alpha_question_service_create_question_async.py
|
Python
|
apache-2.0
| 1,680
|
c = get_config()
c.IPythonWidget.execute_on_complete_input = False
c.FrontendWidget.lexer_class = 'pygments.lexers.FSharpLexer'
|
npmurphy/IfSharp
|
ipython-profile/ipython_qtconsole_config.py
|
Python
|
bsd-3-clause
| 128
|
from __future__ import absolute_import
from typing import Any
from argparse import ArgumentParser
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
help = """Send some stats to statsd."""
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
parser.add_argument('operation', metavar='<operation>', type=str,
choices=['incr', 'decr', 'timing', 'timer', 'gauge'],
help="incr|decr|timing|timer|gauge")
parser.add_argument('name', metavar='<name>', type=str)
parser.add_argument('val', metavar='<val>', type=str)
def handle(self, *args, **options):
# type: (*Any, **str) -> None
operation = options['operation']
name = options['name']
val = options['val']
if settings.STATSD_HOST != '':
from statsd import statsd
func = getattr(statsd, operation)
func(name, val)
|
sonali0901/zulip
|
zerver/management/commands/send_stats.py
|
Python
|
apache-2.0
| 1,017
|
import inspect
#public symbols
__all__ = ["Factory"]
class Factory(object):
"""Base class for objects that know how to create other objects
based on a type argument and several optional arguments (version,
server id, and resource description).
"""
def __init__(self):
pass
def create(self, typname, version=None, server=None,
res_desc=None, **ctor_args):
"""Return an object of type *typname* (or a proxy to it if it resides
in another process) using the specified package version, server
location, and resource description. Returns None if this factory is
unable to create the specified type.
"""
raise NotImplementedError('create')
def get_available_types(self, groups=None):
"""Return a set of tuples of the form (typename, metadata_dict), one
for each available plugin type in the given entry point groups.
If groups is *None,* return the set for all openmdao entry point groups.
"""
raise NotImplementedError('get_available_types')
def get_signature(self, typname, version=None):
"""Return constructor argument signature for *typname,* using the
specified package version. The return value is a dictionary:
args: list
List of 1 or 2-element lists. The first element is the argument
name; the second element is the default value.
varargs: string
The name of the '*' argument.
kwargs: string
The name of the '**' argument.
"""
raise NotImplementedError('get_signature')
@staticmethod
def form_signature(cls):
"""Return constructor signature for class `cls`."""
argspec = inspect.getargspec(cls.__init__)
arglist = argspec.args[1:] # Drop self.
non_default = len(arglist)
if argspec.defaults is not None:
non_default -= len(argspec.defaults)
args = [[arg] for arg in arglist[:non_default]]
if argspec.defaults is not None:
defstrs = [repr(default) for default in argspec.defaults]
args.extend([arg, default]
for arg, default in zip(arglist[non_default:], defstrs))
return dict(args=args,
varargs=argspec.varargs or '',
kwargs=argspec.keywords or '')
|
HyperloopTeam/FullOpenMDAO
|
lib/python2.7/site-packages/openmdao.main-0.13.0-py2.7.egg/openmdao/main/factory.py
|
Python
|
gpl-2.0
| 2,382
|
'''
- login and get token
- process 2FA if 2FA is setup for this account
- Get list of child accounts for a parent user
- if the user is a regular customer then get a list of child accounts for this user
- if the user is a partner_admin then get a list of child accounts for the first user from the list of users this partner admin has access to
'''
import requests
import json
get_token_url = "https://api.canopy.cloud:443/api/v1/sessions/"
validate_otp_url = "https://api.canopy.cloud:443/api/v1/sessions/otp/validate.json" #calling the production server for OTP authentication
get_partner_users_url = "https://api.canopy.cloud:443/api/v1/admin/users.json"
get_children_url = "https://api.canopy.cloud:443/api/v1/child_accounts.json"
#please replace below with your username and password over here
username = 'userxxx'
password = 'passxxx'
#please enter the OTP token in case it is enabled
otp_code = '123456'
#first call for a fresh token
payload = "user%5Busername%5D=" + username + "&user%5Bpassword%5D=" + password
headers = {
'accept': "application/json",
'content-type':"application/x-www-form-urlencoded"
}
response = requests.request("POST", get_token_url, data=payload, headers=headers)
print json.dumps(response.json(), indent=4, sort_keys = True)
token = response.json()['token']
login_flow = response.json()['login_flow']
#in case 2FA is enabled use the OTP code to get the second level of authentication
if login_flow == '2fa_verification':
headers['Authorization'] = token
payload = 'otp_code=' + otp_code
response = requests.request("POST", validate_otp_url, data=payload, headers=headers)
print json.dumps(response.json(), indent=4, sort_keys = True) #print response.text
token = response.json()['token']
login_role = response.json()['role']
switch_user_id = response.json()['id']
if login_role == 'Partneradmin':
#print "============== partner's users ==========="
headers = {
'authorization': token,
'content-type': "application/x-www-form-urlencoded; charset=UTF-8"
}
partner_users = []
response = requests.request("GET", get_partner_users_url, headers=headers)
for parent_user in response.json()['users']:
partner_users.append(parent_user['id'])
#print partner_users
#take the first users in the list as the switch_user_id
switch_user_id = partner_users[0]
#in case the user is a partner_admin then switch_user_id is any one of the users it has access to (here we take the first one from the list)
#in case the user is a regular customer then the switch_user_id = user_id for this customer
#Headers for get request to get children
headers = {
'authorization': token,
'username': username,
'content-type': "application/x-www-form-urlencoded; charset=UTF-8",
'x-app-switch-user': str(switch_user_id)
}
response = requests.request("GET", get_children_url, headers=headers)
print json.dumps(response.json(), indent=4, sort_keys = True)
|
Mesitis/community
|
sample-code/Python/05 Child Accounts/get_children.py
|
Python
|
mit
| 2,968
|
"""
WiFi Positioning System
Wrappers around the SkyHook and Google Locations APIs to resolve
wireless routers' MAC addresses (BSSID) to physical locations.
"""
try:
from json import dumps, loads
except:
from simplejson import dumps, loads
from urllib2 import Request, urlopen
from urllib import urlencode
class Skyhook:
"""Not yet ready for production, use the GoogleLocation class instead."""
def __init__(self, username='api', realm='shodan'):
self.username = username
self.realm = realm
self.url = 'https://api.skyhookwireless.com/wps2/location'
def locate(self, mac):
# Remove the ':'
mac = mac.replace(':', '')
print mac
data = """<?xml version='1.0'?>
<LocationRQ xmlns='http://skyhookwireless.com/wps/2005' version='2.6' street-address-lookup='full'>
<authentication version='2.0'>
<simple>
<username>%s</username>
<realm>%s</realm>
</simple>
</authentication>
<access-point>
<mac>%s</mac>
<signal-strength>-50</signal-strength>
</access-point>
</LocationRQ>""" % (self.username, self.realm, mac)
request = Request(
url=self.url,
data=data,
headers={'Content-type': 'text/xml'})
response = urlopen(request)
result = response.read()
return result
class GoogleLocation:
def __init__(self):
self.url = 'http://www.google.com/loc/json'
def locate(self, mac):
data = {
'version': '1.1.0',
'request_address': True,
'wifi_towers': [{
'mac_address': mac,
'ssid': 'g',
'signal_strength': -72
}]
}
response = urlopen(self.url, dumps(data))
data = response.read()
return loads(data)
|
cnHackintosh/theHarvester
|
discovery/shodan/wps.py
|
Python
|
gpl-2.0
| 1,917
|
from odoo import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
is_service_provider = fields.Boolean(string='Is Service Provider', default=False)
|
thinkwelltwd/care_center
|
service_partner/models/res_partner.py
|
Python
|
lgpl-3.0
| 188
|
import os
from ernest.utils import truthiness
# Whether or not we're in DEBUG mode. DEBUG mode is good for
# development and BAD BAD BAD for production.
DEBUG = truthiness(os.environ.get('DEBUG', True))
# ------------------------------------------------
# Required things to set
# ------------------------------------------------
# Set the SECRET_KEY in your settings_local.py file.
# e.g. SECRET_KEY = 'ou812'
# Set the Bugzilla url for logging in via http post.
BUGZILLA_LOGIN_URL = 'https://bugzilla.mozilla.org/index.cgi'
# Set the Bugzilla API url.
BUGZILLA_API_URL = 'https://bugzilla.mozilla.org/bzapi/'
# This will fail if there's no SECRET_KEY in the environment.
# Either provide it there or add it to settings_local.py
SECRET_KEY = os.environ.get('SECRET_KEY')
# ------------------------------------------------
# Database
# ------------------------------------------------
# This is the url to the database. If you're using Heroku, then it'll
# populate this with DATABASE_URL from the environment.
#
# if you don't like what that's doing, you can set it explicitly
# here.
#
# Looks at SQLALCHEMY_DATABASE_URI then defaults to sqlite.
# For a sqlite file-based database, use sqlite:///path/to/db
# SQLALCHEMY_DATABASE_URI = os.environ.get(
# 'SQLALCHEMY_DATABASE_URI',
# 'sqlite:///{0}/ernest_app.db'.format(
# os.path.join(os.path.dirname(__file__), '..')
# )
# )
# This imports settings_local.py thus everything in that file
# overrides what's in this file.
try:
from ernest.settings_local import * # noqa
except ImportError:
pass
|
willkg/ernest
|
ernest/settings.py
|
Python
|
mpl-2.0
| 1,587
|
# coding=utf-8
# Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
googleinterns/cabby
|
cabby/model/text/s2cellid_prediction/__init__.py
|
Python
|
apache-2.0
| 587
|
# Copyright (C)
#
# Author :
from GIC.Channels.GenericChannel import *
class ChannelTest (GenericChannel):
# mandatory fields to work on LibreGeoSocial search engine
MANDATORY_FIELDS = ["latitude", "longitude", "radius", "category"]
CATEGORIES = [{"id" : "0", "name" : "all", "desc" : "All supported categories "},
{"id" : "1", "name" : "category1", "desc" : "Category for..."},
]
def __init__ (self):
self.options = {}
def get_categories(self):
return self.CATEGORIES
def get_info(self):
return "Channel description"
def set_options(self, options):
"""
Fill self.options with the received dictionary
regarding mandatory and optional fields of your channel
"""
return True, ""
def process (self):
"""
Make the search and return the nodes
"""
|
kgblll/libresoft-gymkhana
|
libs/ChannelTemplate.py
|
Python
|
gpl-2.0
| 822
|
#!/usr/bin/env python
# Encoding: utf-8
import re
import subprocess
def get_last_version_from_tags():
versions = subprocess.check_output(["git", "tag"])
versions = versions.split('\n')
version_regex = re.compile(r'(\d+)\.(\d+)\.(\d+)')
versions = [map(int, v.split('.')) for v in versions if version_regex.match(v)]
versions.sort(reverse=True)
return versions[0]
def generate_version():
if 'nothing to commit' not in subprocess.check_output(["git", "status"]):
print 'Error: You must commit current changes first'
exit()
last_version = get_last_version_from_tags()
with open('setup.py', 'r') as setup_py_file:
setup_py = setup_py_file.read()
new_version = last_version[:]
new_version[2] += 1
last_version = '.'.join(map(str, last_version))
new_version = '.'.join(map(str, new_version))
print 'upgrading from %s to %s' % (last_version, new_version)
version_line_re = re.compile(r'''(__version__ =)(\s*['"]\d+\.\d+\.\d+["'])''', flags=re.M)
with open('setup.py', 'w') as setup_file:
setup_file.write(version_line_re.sub('\\1 "%s"' % new_version, setup_py))
subprocess.check_output(["git", 'commit', '-m', '"version %s"' % new_version, '-a'])
subprocess.check_output(["git", 'tag', '%s' % new_version])
print
print 'Version %s created.' % new_version
push = raw_input('Do you want to push it to origin? YES/no: ')
if (push or 'YES').lower() == 'yes':
deploy = raw_input('Do you want to deploy it to pypi? YES/no: ')
subprocess.check_output(["git", 'push'])
subprocess.check_output(["git", 'push', '--tags'])
if (deploy or 'YES').lower() == 'yes':
subprocess.check_output(["python", 'setup.py', 'sdist', 'upload'])
else:
print 'Push it to origin with "git push --tags"'
if __name__ == '__main__':
generate_version()
|
dmugtasimov/django_audit_trail
|
bump_version.py
|
Python
|
apache-2.0
| 1,908
|
from __future__ import absolute_import
from collections import Callable
__all__ = ['get_or_create', 'RememberingSet']
class RememberingSet(set):
def __init__(self, *args, **kwargs):
super(RememberingSet, self).__init__(*args, **kwargs)
self._memory = set()
def add(self, value):
if value not in self._memory and value not in self:
super(RememberingSet, self).add(value)
self._memory.add(value)
def clear_memory(self):
self._memory.clear()
NOT_SET = object()
def get_or_create(dct, key, value_or_callable, *default_args, **default_kwargs):
value = dct.get(key, NOT_SET)
if value is not NOT_SET:
return value
value = value_or_callable
if isinstance(value, Callable):
value = value(*default_args, **default_kwargs)
dct[key] = value
return value
|
katakumpo/nicedjango
|
nicedjango/utils/py/collections.py
|
Python
|
mit
| 861
|
# GenCumulativeSkyMtx
#
# Ladybug: A Plugin for Environmental Analysis (GPL) started by Mostapha Sadeghipour Roudsari
#
# This file is part of Ladybug.
#
# Copyright (c) 2013-2015, Mostapha Sadeghipour Roudsari <Sadeghipour@gmail.com>
# Ladybug is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Ladybug is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ladybug; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+>
"""
This component uses Radiance's gendaymtx function to calculate the sky's radiation for each hour of the year. This is a necessary pre-step before doing radiation analysis with Rhino geometry or generating a radiation rose.
The first time you use this component, you will need to be connected to the internet so that the component can download the "gendaymtx.exe" function to your system.
Gendaymtx is written by Ian Ashdown and Greg Ward. For more information, check the Radiance manual at:
http://www.radiance-online.org/learning/documentation/manual-pages/pdfs/gendaymtx.pdf
-
Provided by Ladybug 0.0.61
Args:
_epwFile: The output of the Ladybug Open EPW component or the file path location of the epw weather file on your system.
_skyDensity_: Set to 0 to generate a Tregenza sky, which will divide up the sky dome with a coarse density of 145 sky patches. Set to 1 to generate a Reinhart sky, which will divide up the sky dome using a very fine density of 580 sky patches. Note that, while the Reinhart sky is more accurate, it will result in considerably longer calculation times. Accordingly, the default is set to 0 for a Tregenza sky.
workingDir_: An optional working directory in your system where the sky will be generated. Default is set to C:\Ladybug or C:\Users\yourUserName\AppData\Roaming\Ladybug. The latter is used if you cannot write to the C:\ drive of your computer. Any valid file path location can be connected.
useOldRes_: Set this to "True" if you have already run this component previously and you want to use the already-generated data for this weather file.
_runIt: Set to "True" to run the component and generate a sky matrix.
Returns:
readMe!: ...
cumulativeSkyMtx: The result of the gendaymtx function. Use the selectSkyMtx component to select a desired sky matrix from this output for use in a radiation study, radition rose, or sky dome visualization.
"""
ghenv.Component.Name = "Ladybug_GenCumulativeSkyMtx"
ghenv.Component.NickName = 'genCumulativeSkyMtx'
ghenv.Component.Message = 'VER 0.0.61\nNOV_05_2015'
ghenv.Component.Category = "Ladybug"
ghenv.Component.SubCategory = "2 | VisualizeWeatherData"
#compatibleLBVersion = VER 0.0.59\nFEB_01_2015
try: ghenv.Component.AdditionalHelpFromDocStrings = "2"
except: pass
import os
import scriptcontext as sc
from clr import AddReference
AddReference('Grasshopper')
import Grasshopper.Kernel as gh
from itertools import izip
import shutil
def date2Hour(month, day, hour):
# fix the end day
numOfDays = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
# dd = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
JD = numOfDays[int(month)-1] + int(day)
return (JD - 1) * 24 + hour
def hour2Date(hour):
monthList = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']
numOfDays = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365]
numOfHours = [24 * numOfDay for numOfDay in numOfDays]
for h in range(len(numOfHours)-1):
if hour <= numOfHours[h+1]: month = h + 1; break
if hour == 0: day = 1
elif (hour)%24 == 0: day = int((hour - numOfHours[h]) / 24)
else: day = int((hour - numOfHours[h]) / 24) + 1
time = hour%24 + 0.5
return str(day), str(month), str(time)
def getRadiationValues(epw_file, analysisPeriod, weaFile):
# start hour and end hour
stHour = 0
endHour = 8760
epwfile = open(epw_file,"r")
for lineCount, line in enumerate(epwfile):
hour = lineCount - 8
if int(stHour) <= hour <= int(endHour):
dirRad = (line.split(',')[14])
difRad = (line.split(',')[15])
day, month, time = hour2Date(hour)
weaFile.write(month + " " + day + " " + time + " " + dirRad + " " + difRad + "\n")
epwfile.close()
return weaFile
def weaHeader(epwFileAddress, lb_preparation):
locName, lat, long, timeZone, elev, dataStr = lb_preparation.epwLocation(epwFileAddress)
#print locName, lat, long, timeZone, elev
return "place " + locName + "\n" + \
"latitude " + lat + "\n" + \
"longitude " + `-float(long)` + "\n" + \
"time_zone " + `-float(timeZone) * 15` + "\n" + \
"site_elevation " + elev + "\n" + \
"weather_data_file_units 1\n"
def epw2wea(weatherFile, analysisPeriod, lb_preparation):
outputFile = weatherFile.replace(".epw", ".wea")
header = weaHeader(weatherFile, lb_preparation)
weaFile = open(outputFile, 'w')
weaFile.write(header)
weaFile = getRadiationValues(weatherFile, analysisPeriod, weaFile)
weaFile.close()
return outputFile
def main(epwFile, skyType, workingDir, useOldRes):
# import the classes
if sc.sticky.has_key('ladybug_release'):
try:
if not sc.sticky['ladybug_release'].isCompatible(ghenv.Component): return -1
except:
warning = "You need a newer version of Ladybug to use this compoent." + \
"Use updateLadybug component to update userObjects.\n" + \
"If you have already updated userObjects drag Ladybug_Ladybug component " + \
"into canvas and try again."
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, warning)
return -1
lb_preparation = sc.sticky["ladybug_Preparation"]()
# make working directory
if workingDir: workingDir = lb_preparation.removeBlankLight(workingDir)
workingDir = lb_preparation.makeWorkingDir(workingDir)
# make sure the directory has been created
if workingDir == -1: return -2
workingDrive = workingDir[0:1]
# GenCumulativeSky
gendaymtxFile = os.path.join(workingDir, 'gendaymtx.exe')
if not os.path.isfile(gendaymtxFile):
# let's see if we can grab it from radiance folder
if os.path.isfile("c:/radiance/bin/gendaymtx.exe"):
# just copy this file
shutil.copyfile("c:/radiance/bin/gendaymtx.exe", gendaymtxFile)
else:
# download the file
lb_preparation.downloadGendaymtx(workingDir)
#check if the file is there
if not os.path.isfile(gendaymtxFile) or os.path.getsize(gendaymtxFile)< 15000 : return -3
## check for epw file to be connected
if epwFile != None and epwFile[-3:] == 'epw':
if not os.path.isfile(epwFile):
print "Can't find epw file at " + epwFile
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, "Can't find epw file at " + epwFile)
return -1
# import data from epw file
locName, lat, lngt, timeZone, elev, locationStr = lb_preparation.epwLocation(epwFile)
newLocName = lb_preparation.removeBlank(locName)
# make new folder for each city
subWorkingDir = lb_preparation.makeWorkingDir(workingDir + "\\" + newLocName)
print 'Current working directory is set to: ', subWorkingDir
# copy .epw file to sub-directory
weatherFileAddress = lb_preparation.copyFile(epwFile, subWorkingDir + "\\" + newLocName + '.epw')
# create weaFile
weaFile = epw2wea(weatherFileAddress, [], lb_preparation)
outputFile = weaFile.replace(".wea", ".mtx")
outputFileDif = weaFile.replace(".wea", "_dif_" + `skyType` + ".mtx")
outputFileDir = weaFile.replace(".wea", "_dir_" + `skyType` + ".mtx")
# check if the study is already ran for this weather file
if useOldRes and os.path.isfile(outputFileDif) and os.path.isfile(outputFileDir):
# ask the user if he wants to re-run the study
print "Sky matrix files for this epw file are already existed on your system.\n" + \
"The component won't recalculate the sky and imports the available result.\n" + \
"In case you don't want to use these files, set useOldRes input to False and re-run the study.\n" + \
"If you found the lines above confusing just ignore it! It's all fine. =)\n"
else:
batchFile = weaFile.replace(".wea", ".bat")
command = "@echo off \necho.\n echo HELLO " + os.getenv("USERNAME").upper()+ "! " + \
"DO NOT CLOSE THIS WINDOW. \necho.\necho IT WILL BE CLOSED AUTOMATICALLY WHEN THE CALCULATION IS OVER!\n" + \
"echo.\necho AND MAY TAKE FEW MINUTES...\n" + \
"echo.\n" + \
"echo CALCULATING DIFFUSE COMPONENT OF THE SKY...\n" + \
workingDir + "\\gendaymtx -m " + str(n) + " -s -O1 " + weaFile + "> " + outputFileDif + "\n" + \
"echo.\necho CALCULATING DIRECT COMPONENT OF THE SKY...\n" + \
workingDir + "\\gendaymtx -m " + str(n) + " -d -O1 " + weaFile + "> " + outputFileDir
file = open(batchFile, 'w')
file.write(command)
file.close()
os.system(batchFile)
return outputFileDif, outputFileDir, newLocName, lat, lngt, timeZone
else:
print "epwWeatherFile address is not a valid .epw file"
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, "epwWeatherFile address is not a valid .epw file")
return -1
else:
print "You should first let the Ladybug fly..."
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, "You should first let the Ladybug fly...")
return -1
def readMTXFile(daylightMtxDif, daylightMtxDir, n, newLocName, lat, lngt, timeZone):
# All the patches on top high get the same values so maybe
# I should re-create the geometry 577 instead of 580
# and keep in mind that first patch is ground!
# I create the dictionary only for sky patches and don't collect the data
# for the first patch
# this line could have saved me 5 hours
skyPatchesDict = {1 : 145,
2 : 580 - 3}
numOfPatchesInEachRow = {1: [30, 30, 24, 24, 18, 12, 6, 1],
2: [60, 60, 60, 60, 48, 48, 48, 48, 36, 36, 24, 24, 12, 12, 1]}
# first row is horizon and last row is the one
strConv = {1 : [0.0435449227, 0.0416418006, 0.0473984151, 0.0406730411, 0.0428934136, 0.0445221864, 0.0455168385, 0.0344199465],
2: [0.0113221971, 0.0111894547, 0.0109255262, 0.0105335058, 0.0125224872, 0.0117312774, 0.0108025291, 0.00974713106, 0.011436609, 0.00974295956, 0.0119026242, 0.00905126163, 0.0121875626, 0.00612971396, 0.00921483254]}
numOfSkyPatches = skyPatchesDict[n]
# create an empty dictionary
radValuesDict = {}
for skyPatch in range(numOfSkyPatches):
radValuesDict[skyPatch] = {}
resFileDif = open(daylightMtxDif, "r")
resFileDir = open(daylightMtxDir, "r")
def getValue(line, rowNumber):
R, G, B = line.split(' ')
value = (.265074126 * float(R) + .670114631 * float(G) + .064811243 * float(B)) * strConv[n][rowNumber]
return value
lineCount = 0
extraHeadingLines = 0 # no heading
warnOff = False
failedHours = {}
for difLine, dirLine in izip(resFileDif, resFileDir):
# each line is the data for each hour for a single patch
# new version of gendaymtx genrates a header
# this is a check to make sure the component will work for both versions
if lineCount == 0 and difLine.startswith("#?RADIANCE"):
# the file has a header
extraHeadingLines = -8
if lineCount + extraHeadingLines < 0:
# pass heading line
lineCount += 1
continue
# these lines is an empty line to separate patches do let's pass them
hour = (lineCount + 1 + extraHeadingLines)% 8761
#print lineCount, hour
if hour != 0:
patchNumber = int((lineCount + 1 + extraHeadingLines) /8761)
# first patch is ground!
if patchNumber != 0: #and patchNumber < numOfSkyPatches:
for rowCount, patchCountInRow in enumerate(numOfPatchesInEachRow[n]):
if patchNumber - 1 < sum(numOfPatchesInEachRow[n][:rowCount+1]):
rowNumber = rowCount
# print rowNumber
break
try:
difValue = getValue(difLine, rowNumber)
dirValue = getValue(dirLine, rowNumber)
except Exception, e:
value = 0
if not warnOff:
print "genDayMtx returns null Values for few hours. The study will run anyways." + \
"\nMake sure that you are using an standard epw file." + \
"\nThe failed hours are listed below in [Month/Day @Hour] format."
warnOff = True
day, month, time = hour2Date(hour - 1)
if hour-1 not in failedHours.keys():
failedHours[hour-1] = [day, month, time]
print "Failed to read the results > " + month + "/" + day + " @" + time
try: radValuesDict[patchNumber-1][hour] = [difValue, dirValue]
except:print patchNumber-1, hour, value
lineCount += 1
resFileDif.close()
resFileDir.close()
class SkyResultsCollection(object):
def __init__(self, valuesDict, locationName, lat, lngt, timeZone):
self.d = valuesDict
self.location = locationName
self.lat = lat
self.lngt = lngt
self.timeZone = timeZone
return SkyResultsCollection(radValuesDict, newLocName, lat, lngt, timeZone)
if _runIt and _epwFile!=None:
if _skyDensity_ == None: n = 1 #Tregenza Sky
else: n = _skyDensity_%2 + 1 #
result = main(_epwFile, n, workingDir_, useOldRes_)
w = gh.GH_RuntimeMessageLevel.Warning
if result== -3:
warning = 'Download failed!!! You need GenDayMtx.exe to use this component.' + \
'\nPlease check your internet connection, and try again!'
print warning
ghenv.Component.AddRuntimeMessage(w, warning)
elif result == -2:
warning = 'Working directory cannot be created! Please set workingDir to a new path'
print warning
ghenv.Component.AddRuntimeMessage(w, warning)
elif result == -1:
pass
else:
daylightMtxDiffueFile, daylightMtxDirectFile, newLocName, lat, lngt, timeZone = result
cumulativeSkyMtx = readMTXFile(daylightMtxDiffueFile, daylightMtxDirectFile, n, newLocName, lat, lngt, timeZone)
else:
warn = "Set runIt to True and connect a valid epw file address"
print warn
#ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warn)
|
boris-p/ladybug
|
src/Ladybug_GenCumulativeSkyMtx.py
|
Python
|
gpl-3.0
| 16,430
|
# cairo.py
#
# Copyright (C) 2011 Carlos Garcia Campos <carlosgc@gnome.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from __future__ import absolute_import, division, print_function
from backends import Backend, register_backend
import subprocess
import os
class Cairo(Backend):
def __init__(self, name):
Backend.__init__(self, name, '.diff.png')
self._pdftocairo = os.path.join(self._utilsdir, 'pdftocairo');
def create_refs(self, doc_path, refs_path, password = None):
out_path = os.path.join(refs_path, 'cairo')
cmd = [self._pdftocairo, '-cropbox', '-r', '72', '-png', doc_path, out_path]
if password is not None:
cmd.extend(['-opw', password, '-upw', password])
p = subprocess.Popen(cmd, stderr = subprocess.PIPE)
return self._check_exit_status(p, out_path)
def _create_diff(self, ref_path, result_path):
self._diff_png(ref_path, result_path)
register_backend('cairo', Cairo)
|
tsdgeos/poppler_mirror
|
regtest/backends/cairo.py
|
Python
|
gpl-2.0
| 1,636
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_pysplotter.ui'
#
# Created: Thu Nov 3 17:38:24 2011
# by: PyQt4 UI code generator 4.8.6
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_PysplotterDlg(object):
def setupUi(self, PysplotterDlg):
PysplotterDlg.setObjectName(_fromUtf8("PysplotterDlg"))
PysplotterDlg.resize(800, 600)
PysplotterDlg.setWindowTitle(QtGui.QApplication.translate("PysplotterDlg", "MainWindow", None, QtGui.QApplication.UnicodeUTF8))
self.centralwidget = QtGui.QWidget(PysplotterDlg)
self.centralwidget.setEnabled(True)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.mplwidget = MplWidget(self.centralwidget)
self.mplwidget.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.mplwidget.sizePolicy().hasHeightForWidth())
self.mplwidget.setSizePolicy(sizePolicy)
self.mplwidget.setObjectName(_fromUtf8("mplwidget"))
self.horizontalLayout.addWidget(self.mplwidget)
self.verticalLayout.addLayout(self.horizontalLayout)
PysplotterDlg.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(PysplotterDlg)
self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 20))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setTitle(QtGui.QApplication.translate("PysplotterDlg", "&File", None, QtGui.QApplication.UnicodeUTF8))
self.menuFile.setObjectName(_fromUtf8("menuFile"))
self.menuEdit = QtGui.QMenu(self.menubar)
self.menuEdit.setTitle(QtGui.QApplication.translate("PysplotterDlg", "&Edit", None, QtGui.QApplication.UnicodeUTF8))
self.menuEdit.setObjectName(_fromUtf8("menuEdit"))
self.menuView = QtGui.QMenu(self.menubar)
self.menuView.setTitle(QtGui.QApplication.translate("PysplotterDlg", "View", None, QtGui.QApplication.UnicodeUTF8))
self.menuView.setObjectName(_fromUtf8("menuView"))
self.menu_Spectrum = QtGui.QMenu(self.menubar)
self.menu_Spectrum.setTitle(QtGui.QApplication.translate("PysplotterDlg", "&Spectrum", None, QtGui.QApplication.UnicodeUTF8))
self.menu_Spectrum.setObjectName(_fromUtf8("menu_Spectrum"))
PysplotterDlg.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(PysplotterDlg)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
PysplotterDlg.setStatusBar(self.statusbar)
self.toolBar = QtGui.QToolBar(PysplotterDlg)
self.toolBar.setWindowTitle(QtGui.QApplication.translate("PysplotterDlg", "toolBar", None, QtGui.QApplication.UnicodeUTF8))
self.toolBar.setObjectName(_fromUtf8("toolBar"))
PysplotterDlg.addToolBar(QtCore.Qt.TopToolBarArea, self.toolBar)
self.dockWidget = QtGui.QDockWidget(PysplotterDlg)
self.dockWidget.setObjectName(_fromUtf8("dockWidget"))
self.dockWidgetContents = QtGui.QWidget()
self.dockWidgetContents.setObjectName(_fromUtf8("dockWidgetContents"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.dockWidgetContents)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.treeWidget = QtGui.QTreeWidget(self.dockWidgetContents)
self.treeWidget.setObjectName(_fromUtf8("treeWidget"))
self.treeWidget.headerItem().setText(0, _fromUtf8("1"))
self.verticalLayout_2.addWidget(self.treeWidget)
self.listWidget = QtGui.QListWidget(self.dockWidgetContents)
self.listWidget.setObjectName(_fromUtf8("listWidget"))
self.verticalLayout_2.addWidget(self.listWidget)
self.verticalLayout_3.addLayout(self.verticalLayout_2)
self.dockWidget.setWidget(self.dockWidgetContents)
PysplotterDlg.addDockWidget(QtCore.Qt.DockWidgetArea(2), self.dockWidget)
self.actionOpen = QtGui.QAction(PysplotterDlg)
self.actionOpen.setText(QtGui.QApplication.translate("PysplotterDlg", "&Open", None, QtGui.QApplication.UnicodeUTF8))
self.actionOpen.setShortcut(QtGui.QApplication.translate("PysplotterDlg", "Ctrl+O", None, QtGui.QApplication.UnicodeUTF8))
self.actionOpen.setObjectName(_fromUtf8("actionOpen"))
self.actionQuit = QtGui.QAction(PysplotterDlg)
self.actionQuit.setText(QtGui.QApplication.translate("PysplotterDlg", "&Quit", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuit.setShortcut(QtGui.QApplication.translate("PysplotterDlg", "Ctrl+Q", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuit.setObjectName(_fromUtf8("actionQuit"))
self.actionToggle_Grid = QtGui.QAction(PysplotterDlg)
self.actionToggle_Grid.setText(QtGui.QApplication.translate("PysplotterDlg", "Toggle &Grid", None, QtGui.QApplication.UnicodeUTF8))
self.actionToggle_Grid.setObjectName(_fromUtf8("actionToggle_Grid"))
self.actionToggle_linear_log = QtGui.QAction(PysplotterDlg)
self.actionToggle_linear_log.setText(QtGui.QApplication.translate("PysplotterDlg", "Toggle &linear/log", None, QtGui.QApplication.UnicodeUTF8))
self.actionToggle_linear_log.setObjectName(_fromUtf8("actionToggle_linear_log"))
self.action_Velocity_Space = QtGui.QAction(PysplotterDlg)
self.action_Velocity_Space.setText(QtGui.QApplication.translate("PysplotterDlg", "&Velocity Space", None, QtGui.QApplication.UnicodeUTF8))
self.action_Velocity_Space.setObjectName(_fromUtf8("action_Velocity_Space"))
self.action_Load_Spectrum = QtGui.QAction(PysplotterDlg)
self.action_Load_Spectrum.setText(QtGui.QApplication.translate("PysplotterDlg", "&Load Spectrum", None, QtGui.QApplication.UnicodeUTF8))
self.action_Load_Spectrum.setObjectName(_fromUtf8("action_Load_Spectrum"))
self.actionShow_Available_Spectra = QtGui.QAction(PysplotterDlg)
self.actionShow_Available_Spectra.setText(QtGui.QApplication.translate("PysplotterDlg", "Show Available Spectra", None, QtGui.QApplication.UnicodeUTF8))
self.actionShow_Available_Spectra.setObjectName(_fromUtf8("actionShow_Available_Spectra"))
self.actionSelect_Line = QtGui.QAction(PysplotterDlg)
self.actionSelect_Line.setText(QtGui.QApplication.translate("PysplotterDlg", "Select Line", None, QtGui.QApplication.UnicodeUTF8))
self.actionSelect_Line.setObjectName(_fromUtf8("actionSelect_Line"))
self.menuFile.addAction(self.actionOpen)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionQuit)
self.menuView.addAction(self.actionToggle_Grid)
self.menuView.addAction(self.actionToggle_linear_log)
self.menuView.addAction(self.action_Velocity_Space)
self.menu_Spectrum.addAction(self.action_Load_Spectrum)
self.menu_Spectrum.addAction(self.actionShow_Available_Spectra)
self.menu_Spectrum.addAction(self.actionSelect_Line)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuEdit.menuAction())
self.menubar.addAction(self.menuView.menuAction())
self.menubar.addAction(self.menu_Spectrum.menuAction())
self.toolBar.addAction(self.action_Load_Spectrum)
self.toolBar.addAction(self.actionSelect_Line)
self.toolBar.addAction(self.action_Velocity_Space)
self.retranslateUi(PysplotterDlg)
QtCore.QMetaObject.connectSlotsByName(PysplotterDlg)
def retranslateUi(self, PysplotterDlg):
pass
from mplwidget import MplWidget
|
iancze/Pysplotter
|
ui_pysplotter.py
|
Python
|
mit
| 8,252
|
# todo: warn on no default, or something
# todo: get default collection name for picklestorage, mongostorage constructors
# todo: requirements.txt
import pprint
from modularodm import StoredObject
from modularodm.storedobject import ContextLogger
from modularodm import fields
from modularodm import storage
from modularodm.validators import *
from modularodm.query.querydialect import DefaultQueryDialect as Q
from modularodm.translators import DefaultTranslator, JSONTranslator, StringTranslator
pp = pprint.PrettyPrinter(indent=4)
import random
import logging
logging.basicConfig(level=logging.DEBUG)
from pymongo import MongoClient
client = MongoClient()
db = client['testdb']
db.tag.remove()
db.blog.remove()
class Ron(StoredObject):
_id = fields.DateTimeField(primary=True)
ron_str = fields.StringField()
ron_int = fields.IntegerField()
ron_now = fields.DateTimeField()
Ron.set_storage(storage.PickleStorage('ron'))
ron1 = Ron()
ron1._id = datetime.datetime.now()
ron1.save()
ron2 = Ron()
ron2._id = datetime.datetime.now() + datetime.timedelta(days=1)
ron2.save()
# import pdb; pdb.set_trace()
ron3 = Ron()
ron3._id = datetime.datetime.now()
ron3.save()
# Ron._add_field('added_ron', fields.StringField())
import datetime
class Sheila(StoredObject):
_id = fields.StringField(primary=True)
_meta = {'optimistic' : True}
# Simple fields
sheila_str = fields.StringField(default='sheila', validate=True, required=True)
sheila_int = fields.IntegerField(default=7, validate=MaxValueValidator(9))
sheila_now = fields.DateTimeField()
sheila_url = fields.StringField(validate=URLValidator())
sheila_foostop = fields.StringField(required=True, validate=RegexValidator(r'foo$'), list=True)
created = fields.DateTimeField(auto_now_add=True)
modified = fields.DateTimeField(auto_now=True)
# List fields
sheila_strs = fields.StringField(list=True, validate=MinLengthValidator(5), list_validate=MinLengthValidator(3))
sheila_nows = fields.DateTimeField(list=True)#, default=[])
sheila_urls = fields.StringField(list=True, validate=[URLValidator(), MinLengthValidator(20)], list_validate=MinLengthValidator(2))
sheila_ints = fields.IntegerField(list=True, validate=MinValueValidator(3), list_validate=MinLengthValidator(2))
# Foreign fields
sheila_ron = fields.ForeignField('Ron', backref='ron')
sheila_rons = fields.ForeignField('Ron', backref='rons', list=True)
Sheila.set_storage(storage.PickleStorage('sheila'))
# import pdb; pdb.set_trace()
sheila1 = Sheila()
sheila1.sheila_url = None#'http://centerforopenscience.org/'
sheila1.sheila_str = 'shh'
sheila1.sheila_strs = ['abcde', 'bcdef', 'qqqqq']
sheila1.sheila_nows = [
datetime.datetime.now() + datetime.timedelta(days=5),
datetime.datetime.now() + datetime.timedelta(days=-5),
]
sheila1.sheila_urls = [
'http://centerforopenscience.org/',
'http://openscienceframework.org/',
]
sheila1.sheila_ints = [5, 3]
sheila1.sheila_ron = ron1
sheila1.sheila_rons = [ron2, ron3]
sheila1.save()
sheila1.save()
# import pdb; pdb.set_trace()
#
# Ron.remove(ron1)
# import pdb; pdb.set_trace()
#
# Sheila.remove(sheila1)
sheila1.sheila_rons = []
sheila1.save()
# import pdb; pdb.set_trace()
# sheila1.sheila_rons = [ron3, ron2]
sheila1.sheila_rons = [ron2]
# import pdb; pdb.set_trace()
sheila1.save()
# import pdb; pdb.set_trace()
sheila1.sheila_ron = ron2
# import pdb; pdb.set_trace()
sheila1.save()
# import pdb; pdb.set_trace()
sheila1.sheila_rons = [ron1, ron2, ron3]
sheila1.save()
sheila1_stored = sheila1.to_storage(clone=True)
sheila1_reloaded = Sheila.from_storage(sheila1_stored)
# import pdb; pdb.set_trace()
class Tag(StoredObject):
value = fields.StringField(primary=True, index=False)
count = fields.StringField(default='c', validate=True, index=True)
misc = fields.StringField(default='')
misc2 = fields.StringField(default='')
created = fields.DateTimeField(validate=True)
modified = fields.DateTimeField(validate=True, auto_now=True)
keywords = fields.StringField(default=['keywd1', 'keywd2'], validate=[MinLengthValidator(5), MaxLengthValidator(10)], list=True)
mybool = fields.BooleanField(default=False)
myint = fields.IntegerField()
myfloat = fields.FloatField(required=True, default=4.5)
myurl = fields.StringField(validate=URLValidator())
class Blog(StoredObject):
_id = fields.StringField(primary=True, optimistic=True)
body = fields.StringField(default='blog body')
title = fields.StringField(default='asdfasdfasdf', validate=MinLengthValidator(8))
tag = fields.ForeignField('Tag', backref='tagged')
tags = fields.ForeignField('Tag', list=True, backref='taggeds')
_meta = {
'optimistic' : True,
'log_level' : logging.DEBUG,
}
import os
try:os.remove('db_blog.pkl')
except:pass
try:os.remove('db_tag.pkl')
except:pass
Tag.set_storage(storage.MongoStorage(db, 'tag'))
Blog.set_storage(storage.MongoStorage(db, 'blog'))
# Tag.set_storage(storage.PickleStorage('tag'))
# Blog.set_storage(storage.PickleStorage('blog'))
tag1 = Tag(value=str(random.randint(0, 1000)), count='count_1', keywords=['keywd1', 'keywd3', 'keywd4'])
tag1.save()
tag2 = Tag(value=str(random.randint(0, 1000)), count="count_2", misc="foobar", misc2="a")
tag2.save()
tag3 = Tag(value=str(random.randint(0, 1000)), count="count_3", misc="foobaz", misc2="b")
tag3.save()
tag4 = Tag(value=str(random.randint(0, 1000)), count="mycount_4", misc="bar", misc2="a")
tag4.save()
tag5 = Tag(value=str(random.randint(0, 1000)), count="mycount_5", misc="baz", misc2="b")
tag5.save()
blog1 = Blog(title='blogtitle1')
blog2 = Blog(title='blogtitle2')
blog3 = Blog(title='blogtitle3')
# blog1.tags = [tag1, tag2, tag3]
# blog1.save()
# import pdb; pdb.set_trace()
#
# StoredObject._clear_caches()
# blog1_loaded = Blog.load(blog1._id)
# import pdb; pdb.set_trace()
blog1.tags.append(tag1)
blog1.tags.append(tag1)
# blog1.tags = [tag1, tag1]
# blog1.tag = tag1
# import pdb; pdb.set_trace()
blog1.save()
blog1.tags.pop()
blog1.save()
# import pdb; pdb.set_trace()
logging.debug("foo")
with ContextLogger():
blog2.tag = tag1
# blog2.tags.append(tag1)
blog2.save()
blog3.tag = tag1
blog3.save()
blog4 = Blog(title='tbdtbdtbd', tags=[tag1, tag2, tag3, tag4, tag5])
blog4.save()
logging.debug("bar")
# import pdb; pdb.set_trace()
Blog.remove(Q('title', 'startswith', 'tbd'))
# res = Tag.find(Q('count', 'startswith', 'count_') & Q('misc', 'endswith', 'bar'))
# print 'before rm', res.count()
#
# Tag.remove(Q('count', 'startswith', 'count_') & Q('misc', 'endswith', 'bar'))
#
# res = Tag.find(Q('count', 'startswith', 'count_') & Q('misc', 'endswith', 'bar'))
# print 'after rm', res.count()
Tag.update(Q('count', 'startswith', 'count_'), {'count' : 'shutup'})
print tag1.count
# import pdb; pdb.set_trace()
# print 'here', [(r.misc, r.misc2) for r in res]
res = Tag.find(Q('count', 'eq', 'count_1'))
print 'here', res.count(), list(res)
res = Tag.find(~Q('count', 'eq', 'count_1'))
print 'here', res.count(), list(res)
res = Tag.find(Q('misc', 'startswith', 'foo'))
print 'here', res.count(), list(res)
res = Tag.find(Q('misc', 'endswith', 'bar'))
print 'here', res.count(), list(res)
# todo: alias to in
res = Tag.find(Q('keywords', 'eq', 'keywd1'))
print 'here', res.count(), list(res)
res = Tag.find(Q('keywords', 'eq', 'keywd2'))
print 'here', res.count(), list(res)
res = Tag.find(Q('keywords', 'eq', 'keywd3'))
print 'here', res.count(), list(res)
# Compound query
res = Tag.find(Q('misc', 'startswith', 'foo'), Q('keywords', 'eq', 'keywd1'))
print 'here', res.count(), list(res)
# Query by foreign key
res = Blog.find(Q('tag', 'eq', tag1))
print 'here', res.count(), list(res)
# Query by foreign list
res = Blog.find(Q('tags', 'eq', tag1))
print 'here', res.count(), list(res)
# # Test deleting a tag
#
# blog1.tags.pop(0)
# blog1.save()
# #
# # End test deleting a tag
#
# # Test replacing a tag
#
# tag4 = Tag(value=str(random.randint(0,1000)), count="ofor@fiv.six")
# tag4.save()
#
# # blog1.tag = tag2
# # blog1.tags.append(tag4)
# blog1.tags[0] = tag4
# blog1.save()
#
# # End test replacing a tag
#
# # # Test clearing tags
# #
# # blog1.tags = []
# # blog1.save()
# #
# # # End test clearing tags
#
# logging.debug('tag1.tagged' + str(tag1.tagged))
# #logging.debug('tag2.tagged' + str(tag2.tagged))
#
#
#
# # use for testing Blog.__dict__['tag'].modified_data[blog1]
#
# # b = Blog.load('Vj8I3')
# # b.tags.append(tag1)
# # b.save()
# # print Blog.load('Vj8I3').tags[0]
# # print Blog.load('Vj8I3').tags[0:1]
# # print Blog.load('Vj8I3').tags[0]
# # print Blog.load('Vj8I3').tags
# # print Blog.load('Vj8I3').tags[0:1]
# # print Tag.load('my first tag').value
#
# # print tag1.tagged
#
# logging.debug('*** DATABASE ***\n' + pp.pformat(Tag._storage[0].store))
# logging.debug('\n' + pp.pformat(Blog._storage[0].store))
# logging.debug('****************')
#
# logging.debug('*** QUERYING ***\n')
# logging.debug(('exact match', list(Tag.find(count='one@two.thr'))))
# logging.debug(('matcher from value', list(Tag.find(count__startswith='one'))))
# logging.debug(('matcher from operator', list(Tag.find(created__le=datetime.datetime.utcnow()))))
|
sloria/modular-odm
|
main.py
|
Python
|
apache-2.0
| 9,277
|
import subprocess
def getRoot(config):
if not config.parent:
return config
return getRoot(config.parent)
def is_gold_linker_available():
if not config.gold_executable:
return False
try:
ld_cmd = subprocess.Popen([config.gold_executable, '--help'], stdout = subprocess.PIPE)
ld_out = ld_cmd.stdout.read().decode()
ld_cmd.wait()
except:
return False
if not '-plugin' in ld_out:
return False
# config.clang is not guaranteed to be just the executable!
clang_cmd = subprocess.Popen(" ".join([config.clang, '-fuse-ld=gold', '-xc', '-']),
shell=True,
universal_newlines = True,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
clang_err = clang_cmd.communicate('int main() { return 0; }')[1]
if not 'invalid linker' in clang_err:
return True
return False
root = getRoot(config)
if root.host_os not in ['Linux'] or not is_gold_linker_available():
config.unsupported = True
|
endlessm/chromium-browser
|
third_party/llvm/compiler-rt/test/profile/Linux/lit.local.cfg.py
|
Python
|
bsd-3-clause
| 1,115
|
import pytest
from tests.support.asserts import assert_success
from tests.support.image import png_dimensions
from tests.support.inline import iframe, inline
from . import element_rect
DEFAULT_CSS_STYLE = """
<style>
div, iframe {
display: block;
border: 1px solid blue;
width: 10em;
height: 10em;
}
</style>
"""
DEFAULT_CONTENT = "<div>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</div>"
def take_element_screenshot(session, element_id):
return session.transport.send(
"GET",
"session/{session_id}/element/{element_id}/screenshot".format(
session_id=session.session_id,
element_id=element_id,
)
)
@pytest.mark.parametrize("domain", ["", "alt"], ids=["same_origin", "cross_origin"])
def test_source_origin(session, url, domain):
session.url = inline("""{0}{1}""".format(DEFAULT_CSS_STYLE, DEFAULT_CONTENT))
element = session.find.css("div", all=False)
rect = element_rect(session, element)
response = take_element_screenshot(session, element.id)
reference_screenshot = assert_success(response)
assert png_dimensions(reference_screenshot) == (rect["width"], rect["height"])
iframe_content = "<style>body {{ margin: 0; }}</style>{}".format(DEFAULT_CONTENT)
session.url = inline("""{0}{1}""".format(
DEFAULT_CSS_STYLE, iframe(iframe_content, domain=domain)))
frame_element = session.find.css("iframe", all=False)
frame_rect = element_rect(session, frame_element)
response = take_element_screenshot(session, frame_element.id)
screenshot = assert_success(response)
assert png_dimensions(screenshot) == (frame_rect["width"], frame_rect["height"])
assert screenshot == reference_screenshot
|
nnethercote/servo
|
tests/wpt/web-platform-tests/webdriver/tests/take_element_screenshot/iframe.py
|
Python
|
mpl-2.0
| 1,780
|
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2019 Philipp Wolfer
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import os
import unittest
from picard import config
from picard.formats.ac3 import AC3File
from picard.formats.mutagenext.ac3 import native_ac3
from picard.metadata import Metadata
from .common import (
CommonTests,
load_metadata,
save_and_load_metadata,
)
from .test_apev2 import CommonApeTests
class AC3WithAPETest(CommonApeTests.ApeTestCase):
testfile = 'test.ac3'
supports_ratings = False
expected_info = {
'length': 106,
'~bitrate': '192.0',
'~sample_rate': '44100',
'~channels': '2',
}
unexpected_info = ['~video']
def setUp(self):
super().setUp()
config.setting['ac3_save_ape'] = True
config.setting['remove_ape_from_ac3'] = True
@unittest.skipUnless(native_ac3, "mutagen.ac3 not available")
def test_info(self):
super().test_info()
class AC3NoTagsTest(CommonTests.BaseFileTestCase):
testfile = 'test-apev2.ac3'
def setUp(self):
super().setUp()
config.setting['ac3_save_ape'] = False
config.setting['remove_ape_from_ac3'] = False
def test_load_but_do_not_save_tags(self):
metadata = load_metadata(self.filename)
self.assertEqual('Test AC3 with APEv2 tags', metadata['title'])
self.assertEqual('The Artist', metadata['artist'])
metadata['artist'] = 'Foo'
metadata['title'] = 'Bar'
metadata = save_and_load_metadata(self.filename, metadata)
self.assertEqual('Test AC3 with APEv2 tags', metadata['title'])
self.assertEqual('The Artist', metadata['artist'])
def test_remove_ape_tags(self):
config.setting['remove_ape_from_ac3'] = True
metadata = Metadata({
'artist': 'Foo'
})
metadata = save_and_load_metadata(self.filename, metadata)
self.assertEqual('AC-3', metadata['~format'])
self.assertNotIn('title', metadata)
self.assertNotIn('artist', metadata)
def test_info_format(self):
metadata = load_metadata(os.path.join('test', 'data', 'test.ac3'))
self.assertEqual('AC-3', metadata['~format'])
metadata = load_metadata(os.path.join('test', 'data', 'test-apev2.ac3'))
self.assertEqual('AC-3 (APEv2)', metadata['~format'])
if native_ac3:
metadata = load_metadata(os.path.join('test', 'data', 'test.eac3'))
self.assertEqual('Enhanced AC-3', metadata['~format'])
def test_supports_tag(self):
config.setting['ac3_save_ape'] = True
self.assertTrue(AC3File.supports_tag('title'))
config.setting['ac3_save_ape'] = False
self.assertFalse(AC3File.supports_tag('title'))
@unittest.skipUnless(native_ac3, "mutagen.ac3 not available")
class EAC3Test(CommonTests.SimpleFormatsTestCase):
testfile = 'test.eac3'
expected_info = {
'~format': 'Enhanced AC-3',
'length': 107,
'~sample_rate': '44100',
'~channels': '2',
}
unexpected_info = ['~video']
def setUp(self):
super().setUp()
config.setting['ac3_save_ape'] = True
def test_bitrate(self):
# For EAC3 bitrate is calculated and often a fractional value
metadata = load_metadata(os.path.join('test', 'data', 'test.ac3'))
self.assertAlmostEqual(192.0, float(metadata['~bitrate']))
|
Sophist-UK/Sophist_picard
|
test/formats/test_ac3.py
|
Python
|
gpl-2.0
| 4,137
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.