code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
#
import os
import unittest
class TestBase(unittest.TestCase):
@classmethod
def datafilename(cls, name):
fname = os.path.join(
os.path.dirname(__file__),
"data",
name,
)
assert os.path.exists(fname)
return fname
| deets/pyCamBam | tests/base.py | Python | gpl-2.0 | 295 |
#
# Advene: Annotate Digital Videos, Exchange on the NEt
# Copyright (C) 2008-2017 Olivier Aubert <contact@olivieraubert.net>
#
# Advene is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Advene is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Advene; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
"""Popup menu for Advene elements.
Generic popup menu used by the various Advene views.
"""
from gi.repository import Gtk
import re
import os
from gettext import gettext as _
import advene.core.config as config
from advene.model.package import Package
from advene.model.annotation import Annotation, Relation
from advene.model.schema import Schema, AnnotationType, RelationType
from advene.model.resources import Resources, ResourceData
from advene.model.view import View
from advene.model.query import Query
from advene.rules.elements import RuleSet, Rule, Event, Condition, Action
from advene.gui.util import image_from_position, dialog, get_clipboard
import advene.util.helper as helper
import advene.util.importer
class Menu:
def __init__(self, element=None, controller=None, readonly=False):
self.element=element
self.controller=controller
self.readonly=readonly
self.menu=self.make_base_menu(element)
def popup(self):
self.menu.popup_at_pointer(None)
return True
def get_title (self, element):
"""Return the element title."""
return self.controller.get_title(element, max_size=40)
def goto_annotation (self, widget, ann):
c = self.controller
c.update_status(status="seek", position=ann.fragment.begin)
c.gui.set_current_annotation(ann)
return True
def duplicate_annotation(self, widget, ann):
self.controller.duplicate_annotation(ann)
return True
def activate_annotation (self, widget, ann):
self.controller.notify("AnnotationActivate", annotation=ann)
return True
def desactivate_annotation (self, widget, ann):
self.controller.notify("AnnotationDeactivate", annotation=ann)
return True
def activate_stbv (self, widget, view):
self.controller.activate_stbv(view)
return True
def open_adhoc_view (self, widget, view):
self.controller.gui.open_adhoc_view(view)
return True
def create_element(self, widget, elementtype=None, parent=None):
if elementtype == 'staticview':
elementtype=View
mimetype='text/html'
elif elementtype == 'dynamicview':
elementtype=View
mimetype='application/x-advene-ruleset'
else:
mimetype=None
cr = self.controller.gui.create_element_popup(type_=elementtype,
parent=parent,
controller=self.controller,
mimetype=mimetype)
cr.popup()
return True
def do_insert_resource_file(self, parent=None, filename=None, id_=None):
if id_ is None:
# Generate the id_
basename = os.path.basename(filename)
id_=re.sub('[^a-zA-Z0-9_.]', '_', basename)
size=os.stat(filename).st_size
f=open(filename, 'rb')
parent[id_]=f.read(size + 2)
f.close()
el=parent[id_]
self.controller.notify('ResourceCreate',
resource=el)
return el
def do_insert_resource_dir(self, parent=None, dirname=None, id_=None):
if id_ is None:
# Generate the id_
basename = os.path.basename(dirname)
id_=re.sub('[^a-zA-Z0-9_.]', '_', basename)
parent[id_] = parent.DIRECTORY_TYPE
el=parent[id_]
self.controller.notify('ResourceCreate',
resource=el)
for n in os.listdir(dirname):
filename = os.path.join(dirname, n)
if os.path.isdir(filename):
self.do_insert_resource_dir(parent=el, dirname=filename)
else:
self.do_insert_resource_file(parent=el, filename=filename)
return el
def insert_resource_data(self, widget, parent=None, title=None, filter=None):
if title is None:
title = _("Choose the file to insert")
filename=dialog.get_filename(title=title, filter=filter)
if filename is None:
return True
basename = os.path.basename(filename)
id_=re.sub('[^a-zA-Z0-9_.]', '_', basename)
if id_ != basename:
while True:
id_ = dialog.entry_dialog(title=_("Select a valid identifier"),
text=_("The filename %s contains invalid characters\nthat have been replaced.\nYou can modify this identifier if necessary:") % filename,
default=id_)
if id_ is None:
# Edition cancelled
return True
elif re.match('^[a-zA-Z0-9_.]+$', id_):
break
self.do_insert_resource_file(parent=parent, filename=filename, id_=id_)
return True
def insert_soundclip(self, widget, parent=None):
self.insert_resource_data(widget, parent, title=_("Choose the soundclip to insert"), filter='audio')
return True
def insert_resource_directory(self, widget, parent=None):
dirname=dialog.get_dirname(title=_("Choose the directory to insert"))
if dirname is None:
return True
self.do_insert_resource_dir(parent=parent, dirname=dirname)
return True
def edit_element (self, widget, el):
self.controller.gui.edit_element(el)
return True
def filter_service(self, widget, importer, annotationtype):
self.controller.gui.open_adhoc_view('importerview', message=_("Apply %s") % importer.name, display_unlikely=False, importerclass=importer, source_type=annotationtype)
def popup_get_offset(self):
offset=dialog.entry_dialog(title='Enter an offset',
text=_("Give the offset to use\non specified element.\nIt is in ms and can be\neither positive or negative."),
default="0")
if offset is not None:
return int(offset)
else:
return offset
def offset_element (self, widget, el):
offset = self.popup_get_offset()
if offset is not None:
self.controller.offset_element(el, offset)
return True
def search_replace_content(self, widget, el):
if isinstance(el, (Annotation, View)):
l = [ el ]
title = _("Replace content in %s" % self.controller.get_title(el))
elif isinstance(el, AnnotationType):
l = el.annotations
title = _("Replace content in annotations of type %s" % self.controller.get_title(el))
elif isinstance(el, Package):
l = el.annotations
title = _("Replace content in all annotations")
self.controller.gui.search_replace_dialog(l, title=title)
return True
def copy_id (self, widget, el):
clip = get_clipboard()
clip.set_text(el.id, -1)
return True
def browse_element (self, widget, el):
self.controller.gui.open_adhoc_view('browser', element=el)
return True
def query_element (self, widget, el):
self.controller.gui.open_adhoc_view('interactivequery', here=el, sources= [ "here" ])
return True
def delete_element (self, widget, el):
self.controller.delete_element(el)
return True
def delete_elements (self, widget, el, elements):
batch_id=object()
if isinstance(el, (AnnotationType, RelationType)):
for e in elements:
self.controller.delete_element(e, batch=batch_id)
return True
def create_montage(self, widget, rt):
"""Create a montage from a relationtype.
"""
l = list(set( r.members[0] for r in rt.relations ))
res = []
if l:
l.sort(key=lambda a: a.fragment.begin)
ann = l[0]
while True:
res.append(ann)
try:
l.remove(ann)
except ValueError:
pass
r = ann.typedRelatedOut.get(rt.id, None)
if not r:
ann = None
else:
ann = r[0]
if ann is None or ann in res:
# End of relations. Look for other roots.
if l:
ann = l[0]
else:
break
self.controller.gui.open_adhoc_view('montage', elements=res)
return True
def pick_color(self, widget, element):
self.controller.gui.update_color(element)
return True
def add_menuitem(self, menu=None, item=None, action=None, *param, **kw):
if item is None or item == "":
i = Gtk.SeparatorMenuItem()
else:
i = Gtk.MenuItem(item, use_underline=False)
if action is not None:
i.connect('activate', action, *param, **kw)
menu.append(i)
return i
def make_base_menu(self, element):
"""Build a base popup menu dedicated to the given element.
@param element: the element
@type element: an Advene element
@return: the built menu
@rtype: Gtk.Menu
"""
menu = Gtk.Menu()
def add_item(*p, **kw):
return self.add_menuitem(menu, *p, **kw)
title=add_item(self.get_title(element))
if hasattr(element, 'id') or isinstance(element, Package):
title.set_submenu(self.common_submenu(element))
if hasattr(element, 'type') and config.data.preferences['expert-mode']:
title = add_item(_("Type %s") % self.get_title(element.type))
title.set_submenu(self.common_submenu(element.type))
add_item("")
try:
i=element.id
add_item(_("Copy id %s") % i,
self.copy_id,
element)
except AttributeError:
pass
if hasattr(element, 'viewableType'):
self.make_bundle_menu(element, menu)
specific_builder={
Annotation: self.make_annotation_menu,
Relation: self.make_relation_menu,
AnnotationType: self.make_annotationtype_menu,
RelationType: self.make_relationtype_menu,
Schema: self.make_schema_menu,
View: self.make_view_menu,
Package: self.make_package_menu,
Query: self.make_query_menu,
Resources: self.make_resources_menu,
ResourceData: self.make_resourcedata_menu,
}
for t, method in specific_builder.items():
if isinstance(element, t):
method(element, menu)
menu.show_all()
return menu
def display_stats(self, m, el):
"""Display statistics about the element's annotations.
element can be either the package, or an annotation type.
"""
self.controller.gui.display_statistics(el.annotations, label=_("<b>Statistics about %s</b>\n\n") % self.controller.get_title(el))
return True
def renumber_annotations(self, m, at):
"""Renumber all annotations of a given type.
"""
d = Gtk.Dialog(title=_("Renumbering annotations of type %s") % self.get_title(at),
parent=self.controller.gui.gui.win,
flags=Gtk.DialogFlags.DESTROY_WITH_PARENT,
buttons=( Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OK, Gtk.ResponseType.OK,
))
l=Gtk.Label()
l.set_markup(_("<b>Renumber all annotations according to their order.</b>\n\n<i>Note that this action cannot be undone.</i>\nReplace the first numeric value of the annotation content with the new annotation number.\nIf no numeric value is found and the annotation is structured, it will insert the number.\nIf no numeric value is found and the annotation is of type text/plain, it will overwrite the annotation content.\nThe offset parameter allows you to renumber from a given annotation."))
l.set_line_wrap(True)
l.show()
d.vbox.add(l)
hb=Gtk.HBox()
l=Gtk.Label(label=_("Offset"))
hb.pack_start(l, False, True, 0)
s=Gtk.SpinButton()
s.set_range(-5, len(at.annotations))
s.set_value(1)
s.set_increments(1, 5)
hb.add(s)
d.vbox.pack_start(hb, False, True, 0)
d.connect('key-press-event', dialog.dialog_keypressed_cb)
d.show_all()
dialog.center_on_mouse(d)
res=d.run()
if res == Gtk.ResponseType.OK:
re_number=re.compile(r'(\d+)')
re_struct=re.compile(r'^num=(\d+)$', re.MULTILINE)
offset=s.get_value_as_int() - 1
l=at.annotations
l.sort(key=lambda a: a.fragment.begin)
l=l[offset:]
size=float(len(l))
dial=Gtk.Dialog(_("Renumbering %d annotations") % size,
self.controller.gui.gui.win,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL))
prg=Gtk.ProgressBar()
dial.vbox.pack_start(prg, False, True, 0)
dial.show_all()
for i, a in enumerate(l[offset:]):
prg.set_text(_("Annotation #%d") % i)
prg.set_fraction( i / size )
while Gtk.events_pending():
Gtk.main_iteration()
if a.type.mimetype == 'application/x-advene-structured':
if re_struct.search(a.content.data):
# A 'num' field is present. Update it.
data=re_struct.sub("num=%d" % (i+1), a.content.data)
else:
# Insert the num field
data=("num=%d\n" % (i+1)) + a.content.data
elif re_number.search(a.content.data):
# There is a number. Simply substitute the new one.
data=re_number.sub(str(i+1), a.content.data)
elif a.type.mimetype == 'text/plain':
# Overwrite the contents
data=str(i+1)
else:
data=None
if data is not None and a.content.data != data:
a.content.data=data
self.controller.notify('PackageActivate', package=self.controller.package)
dial.destroy()
d.destroy()
return True
def split_package_by_type(self, element):
title = self.controller.get_title(element)
count = len(element.annotations)
def runner_method(callback=None):
def progress_callback(name, filename, n, index):
return callback(index / count, _("Created %(name)s - %(n)d annotations") % locals())
self.controller.split_package_by_type(element, callback=progress_callback)
dialog.progress_dialog(title=_("Splitting package according to %s") % title,
label=_("For each of the %(count)d annotations in %(atype)s, create a package named after the source package and the annotation content, copying only annotations contained in the reference annotation.") % { 'count': count,
'atype': title },
controller=self.controller,
runner=runner_method)
def extract_montage(self, m, elements):
"""Extract the montage corresponding to annotation list
"""
ann = elements[0]
if len(elements) == 1:
basename = ann.id + "-" + helper.title2id(self.controller.get_title(ann)) + ".webm"
else:
atypes = set(a.type for a in elements)
if len(atypes) == 1:
# Single type - use its title for basename
basename = helper.title2id(ann.type) + '.webm'
else:
basename = 'montage.webm'
self.controller.gui.render_montage_dialog(elements, basename = basename)
return True
def common_submenu(self, element):
"""Build the common submenu for all elements.
"""
submenu=Gtk.Menu()
def add_item(*p, **kw):
self.add_menuitem(submenu, *p, **kw)
# Common to all other elements:
add_item(_("Edit"), self.edit_element, element)
if config.data.preferences['expert-mode']:
add_item(_("Browse"), self.browse_element, element)
add_item(_("Query"), self.query_element, element)
def open_in_browser(i, v):
c=self.controller.build_context(here=element)
url=c.evaluateValue('here/absolute_url')
self.controller.open_url(url)
return True
add_item(_("Open in web browser"), open_in_browser, element)
if not self.readonly:
# Common to deletable elements
if isinstance(element, (Annotation, Relation, View, Query,
Schema, AnnotationType, RelationType, ResourceData)):
add_item(_("Delete"), self.delete_element, element)
if isinstance(element, Resources) and isinstance(element.parent, Resources):
# Add Delete item to Resources except for the root resources (with parent = package)
add_item(_("Delete"), self.delete_element, element)
if isinstance(element, (Annotation, AnnotationType, Package)):
add_item(_("Search/replace content"), self.search_replace_content, element)
## Common to offsetable elements
if (config.data.preferences['expert-mode']
and isinstance(element, (Annotation, Schema, AnnotationType, Package))):
add_item(_("Offset"), self.offset_element, element)
submenu.show_all()
return submenu
def activate_submenu(self, element):
"""Build an "activate" submenu for the given annotation"""
submenu=Gtk.Menu()
def add_item(*p, **kw):
self.add_menuitem(submenu, *p, **kw)
add_item(_("Activate"), self.activate_annotation, element)
add_item(_("Desactivate"), self.desactivate_annotation, element)
submenu.show_all()
return submenu
def make_annotation_menu(self, element, menu):
def add_item(*p, **kw):
self.add_menuitem(menu, *p, **kw)
def loop_on_annotation(menu, ann):
self.controller.gui.loop_on_annotation_gui(ann, goto=True)
return True
def save_snapshot(menu, ann):
self.controller.gui.save_snapshot_as(ann.fragment.begin)
return True
add_item(_("Go to %s") % helper.format_time(element.fragment.begin), self.goto_annotation, element)
add_item(_("Loop"), loop_on_annotation, element)
add_item(_("Duplicate"), self.duplicate_annotation, element)
item = Gtk.MenuItem(_("Highlight"), use_underline=False)
item.set_submenu(self.activate_submenu(element))
menu.append(item)
add_item(_("Save snapshot..."), save_snapshot, element)
if 'montagerenderer' in self.controller.generic_features:
add_item(_("Extract video fragment"), self.extract_montage, [ element ])
def build_submenu(submenu, el, items):
"""Build the submenu for the given element.
"""
if submenu.get_children():
# The submenu was already populated.
return False
if len(items) == 1:
# Only 1 elements, do not use an intermediary menu
m=Menu(element=items[0], controller=self.controller)
for c in m.menu.get_children():
m.menu.remove(c)
submenu.append(c)
else:
for i in items:
item=Gtk.MenuItem(self.get_title(i), use_underline=False)
m=Menu(element=i, controller=self.controller)
item.set_submenu(m.menu)
submenu.append(item)
submenu.show_all()
return False
def build_related(submenu, el):
"""Build the related annotations submenu for the given element.
"""
if submenu.get_children():
# The submenu was already populated.
return False
if el.incomingRelations:
i=Gtk.MenuItem(_("Incoming"))
submenu.append(i)
i=Gtk.SeparatorMenuItem()
submenu.append(i)
for t, l in el.typedRelatedIn.items():
at=self.controller.package.get_element_by_id(t)
m=Gtk.MenuItem(self.get_title(at), use_underline=False)
amenu=Gtk.Menu()
m.set_submenu(amenu)
amenu.connect('map', build_submenu, at, l)
submenu.append(m)
if submenu.get_children():
# There were incoming annotations. Use a separator
i=Gtk.SeparatorMenuItem()
submenu.append(i)
if el.outgoingRelations:
i=Gtk.MenuItem(_("Outgoing"))
submenu.append(i)
i=Gtk.SeparatorMenuItem()
submenu.append(i)
for t, l in el.typedRelatedOut.items():
at=self.controller.package.get_element_by_id(t)
m=Gtk.MenuItem(self.get_title(at), use_underline=False)
amenu=Gtk.Menu()
m.set_submenu(amenu)
amenu.connect('map', build_submenu, at, l)
submenu.append(m)
submenu.show_all()
return False
if element.relations:
i=Gtk.MenuItem(_("Related annotations"), use_underline=False)
submenu=Gtk.Menu()
i.set_submenu(submenu)
submenu.connect('map', build_related, element)
menu.append(i)
if element.incomingRelations:
i=Gtk.MenuItem(_("Incoming relations"), use_underline=False)
submenu=Gtk.Menu()
i.set_submenu(submenu)
submenu.connect('map', build_submenu, element, element.incomingRelations)
menu.append(i)
if element.outgoingRelations:
i=Gtk.MenuItem(_("Outgoing relations"), use_underline=False)
submenu=Gtk.Menu()
i.set_submenu(submenu)
submenu.connect('map', build_submenu, element, element.outgoingRelations)
menu.append(i)
add_item("")
item = Gtk.MenuItem()
item.add(image_from_position(self.controller,
position=element.fragment.begin,
media=element.media,
height=60))
item.connect('activate', self.goto_annotation, element)
menu.append(item)
#add_item(element.content.data[:40])
add_item(_('Begin: %s')
% helper.format_time (element.fragment.begin), lambda i: self.controller.gui.adjust_annotation_bound(element, 'begin'))
add_item(_('End: %s') % helper.format_time (element.fragment.end), lambda i: self.controller.gui.adjust_annotation_bound(element, 'end'))
add_item(_('Duration: %s') % helper.format_time (element.fragment.duration))
return
def make_relation_menu(self, element, menu):
def add_item(*p, **kw):
self.add_menuitem(menu, *p, **kw)
add_item(element.content.data)
add_item(_('Members:'))
for a in element.members:
item=Gtk.MenuItem(self.get_title(a), use_underline=False)
m=Menu(element=a, controller=self.controller)
item.set_submenu(m.menu)
menu.append(item)
return
def make_package_menu(self, element, menu):
def add_item(*p, **kw):
self.add_menuitem(menu, *p, **kw)
if self.readonly:
return
add_item(_('Edit package properties...'), self.controller.gui.on_package_properties1_activate)
add_item(_('%d annotations(s) - statistics') % len(element.annotations), self.display_stats, element)
add_item('')
add_item(_('Create a new static view...'), self.create_element, 'staticview', element)
add_item(_('Create a new dynamic view...'), self.create_element, 'dynamicview', element)
add_item(_('Create a new annotation...'), self.create_element, Annotation, element)
#add_item(_('Create a new relation...'), self.create_element, Relation, element)
add_item(_('Create a new schema...'), self.create_element, Schema, element)
add_item(_('Create a new query...'), self.create_element, Query, element)
return
def make_resources_menu(self, element, menu):
def add_item(*p, **kw):
self.add_menuitem(menu, *p, **kw)
if self.readonly:
return
add_item(_('Create a new folder...'), self.create_element, Resources, element)
add_item(_('Create a new resource file...'), self.create_element, ResourceData, element)
add_item(_('Insert a new resource file...'), self.insert_resource_data, element)
add_item(_('Insert a new resource directory...'), self.insert_resource_directory, element)
#print "Menu for", id(element), id(self.controller.package.resources), element.id
if element.resourcepath == '':
# Resources root
if 'soundclips' not in element:
# Create the soundclips folder
element['soundclips'] = element.DIRECTORY_TYPE
self.controller.notify('ResourceCreate', resource=element['soundclips'])
add_item(_('Insert a soundclip...'), self.insert_soundclip, element['soundclips'])
elif element.resourcepath == 'soundclips':
add_item(_('Insert a soundclip...'), self.insert_soundclip, element)
return
def make_resourcedata_menu(self, element, menu):
def add_item(*p, **kw):
self.add_menuitem(menu, *p, **kw)
def play_sound(w, filename):
self.controller.soundplayer.play(filename)
return True
if element.id.split('.')[-1] in ('wav', 'ogg', 'mp3'):
# Audio resource (presumably). Propose to play it.
add_item(_('Play sound'), play_sound, element.file_)
if self.readonly:
return
return
def make_schema_menu(self, element, menu):
def add_item(*p, **kw):
self.add_menuitem(menu, *p, **kw)
if self.readonly:
return
add_item(_('Create a new annotation type...'),
self.create_element, AnnotationType, element)
add_item(_('Create a new relation type...'),
self.create_element, RelationType, element)
add_item(_('Select a color'), self.pick_color, element)
return
def create_dynamic_view(self, at):
"""Create a caption dynamic view for the given annotation-type.
"""
p=self.controller.package
ident='v_caption_%s' % at.id
if p.get_element_by_id(ident) is not None:
dialog.message_dialog(_("A caption dynamic view for %s already seems to exist.") % self.get_title(at))
return True
v=p.createView(
ident=ident,
author=config.data.userid,
date=helper.get_timestamp(),
clazz='package',
content_mimetype='application/x-advene-ruleset'
)
v.title=_("Caption %s annotations") % self.get_title(at)
# Build the ruleset
r=RuleSet()
catalog=self.controller.event_handler.catalog
ra=catalog.get_action("AnnotationCaption")
action=Action(registeredaction=ra, catalog=catalog)
action.add_parameter('message', 'annotation/content/data')
rule=Rule(name=_("Caption the annotation"),
event=Event("AnnotationBegin"),
condition=Condition(lhs='annotation/type/id',
operator='equals',
rhs='string:%s' % at.id),
action=action)
r.add_rule(rule)
v.content.data=r.xml_repr()
p.views.append(v)
self.controller.notify('ViewCreate', view=v)
self.controller.activate_stbv(v)
return True
def make_annotationtype_menu(self, element, menu):
def add_item(*p, **kw):
self.add_menuitem(menu, *p, **kw)
def create_static(at):
v=self.controller.create_static_view([ at ])
self.controller.gui.edit_element(v)
return True
add_item(_('Create a comment view'), lambda i: create_static(element))
add_item(_('Generate a caption dynamic view...'), lambda i: self.create_dynamic_view(element))
add_item(_('Display as transcription'), lambda i: self.controller.gui.open_adhoc_view('transcription', source='here/annotationTypes/%s/annotations/sorted' % element.id))
add_item(_('Display annotations in table'), lambda i: self.controller.gui.open_adhoc_view('table', elements=element.annotations, source='here/annotationTypes/%s/annotations' % element.id))
add_item(_('Export to another format...'), lambda i: self.controller.gui.export_element(element))
if 'montagerenderer' in self.controller.generic_features:
add_item(_("Extract video montage"), self.extract_montage, sorted(element.annotations))
add_item(_('Split according to annotations'), lambda i: self.split_package_by_type(element))
for imp in ( i for i in advene.util.importer.IMPORTERS if i.annotation_filter ):
add_item(_("Apply %s..." % imp.name), self.filter_service, imp, element)
if self.readonly:
return
add_item(None)
add_item(_('Select a color'), self.pick_color, element)
add_item(_('Create a new annotation...'), self.create_element, Annotation, element)
add_item(_('Delete all annotations'), self.delete_elements, element, element.annotations)
add_item(_('Renumber annotations...'), self.renumber_annotations, element)
add_item(_('Shot validation view...'), lambda m, at: self.controller.gui.adjust_annotationtype_bounds(at), element)
add_item('')
add_item(_('%d annotations(s) - statistics') % len(element.annotations), self.display_stats, element)
return
def create_follow_dynamic_view(self, rt):
"""Create a dynamic view for the given relation-type.
"""
p = self.controller.package
ident = 'v_follow_%s' % rt.id
if p.get_element_by_id(ident) is not None:
dialog.message_dialog(_("A follow dynamic view for %s already seems to exist.") % self.get_title(rt))
return True
v = p.createView(
ident=ident,
author=config.data.userid,
date=helper.get_timestamp(),
clazz='package',
content_mimetype='application/x-advene-ruleset'
)
v.title = _("Follow %s relation-type") % self.get_title(rt)
# Build the ruleset
r = RuleSet()
catalog = self.controller.event_handler.catalog
ra = catalog.get_action("PlayerGoto")
action = Action(registeredaction=ra, catalog=catalog)
action.add_parameter('position', 'annotation/typedRelatedOut/%s/first/fragment/begin' % rt.id)
rule=Rule(name=_("Follow the relation"),
event=Event("AnnotationEnd"),
condition=Condition(lhs='annotation/typedRelatedOut/%s' % rt.id,
operator='value'),
action=action)
r.add_rule(rule)
v.content.data=r.xml_repr()
p.views.append(v)
self.controller.notify('ViewCreate', view=v)
self.controller.activate_stbv(v)
return True
def make_relationtype_menu(self, element, menu):
def add_item(*p, **kw):
self.add_menuitem(menu, *p, **kw)
if self.readonly:
return
add_item(_('Select a color'), self.pick_color, element)
add_item(_('Delete all relations...'), self.delete_elements, element, element.relations)
add_item(_('Create montage from related annotations'), self.create_montage, element)
add_item(_('Create dynamic view following relations'), lambda i, e: self.create_follow_dynamic_view(e), element)
return
def make_query_menu(self, element, menu):
def add_item(*p, **kw):
self.add_menuitem(menu, *p, **kw)
def try_query(item, expr):
try:
res, q = self.controller.evaluate_query(element, expr=expr)
self.controller.gui.open_adhoc_view('interactiveresult',
query=element,
result=res,
destination='east')
except Exception as e:
self.controller.log(_('Exception in query: %s') % str(e))
return True
m=Gtk.MenuItem(_('Apply query on...'))
menu.append(m)
sm=Gtk.Menu()
m.set_submenu(sm)
for (expr, label) in (
('package', _('the package')),
('package/annotations', _('all annotations of the package')),
('package/annotations/first', _('the first annotation of the package')),
):
i=Gtk.MenuItem(label)
i.connect('activate', try_query, expr)
sm.append(i)
return
def make_view_menu(self, element, menu):
def open_in_browser(i, v):
c=self.controller.build_context()
url=c.evaluateValue('here/view/%s/absolute_url' % v.id)
self.controller.open_url(url)
return True
def add_item(*p, **kw):
self.add_menuitem(menu, *p, **kw)
t=helper.get_view_type(element)
if t == 'dynamic':
add_item(_('Activate view'), self.activate_stbv, element)
elif t == 'adhoc':
add_item(_('Open adhoc view'), self.open_adhoc_view, element)
elif t == 'static' and element.matchFilter['class'] in ('package', '*'):
add_item(_('Open in web browser'), open_in_browser, element)
return
def make_bundle_menu(self, element, menu):
def add_item(*p, **kw):
self.add_menuitem(menu, *p, **kw)
if self.readonly:
return
if element.viewableType == 'query-list':
add_item(_('Create a new query...'), self.create_element, Query, element.rootPackage)
elif element.viewableType == 'view-list':
add_item(_('Create a new static view...'), self.create_element, 'staticview', element.rootPackage)
add_item(_('Create a new dynamic view...'), self.create_element, 'dynamicview', element.rootPackage)
elif element.viewableType == 'schema-list':
add_item(_('Create a new schema...'), self.create_element, Schema, element.rootPackage)
return
| oaubert/advene | lib/advene/gui/popup.py | Python | gpl-2.0 | 36,607 |
#!/usr/bin/python
# brick_status_event_handler.py Event handler for Brick status
# Service. Reschedules the check for volume status service whenever a
# brick status changes.
# Copyright (C) 2014 Red Hat Inc
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
#
import argparse
import sys
import datetime
import submit_external_command
from glusternagios import utils
GLUSTER_HOST_GROUP = "gluster-host"
def parse_input():
parser = argparse.ArgumentParser(description="Nagios plugin to handle "
"brick status events")
parser.add_argument('-hg', '--hostgroups', action='store',
dest='hostGroups',
type=str, required=True, help='Hostgroups')
parser.add_argument('-st', '--statetype', action='store',
dest='stateType',
type=str, required=True, help='Service State Type')
parser.add_argument('-v', '--volume', action='store', dest='volume',
type=str, required=True, help='Volume Name')
args = parser.parse_args()
return args
def _findClusterName(hostGroupNames):
hostGroups = hostGroupNames.split(",")
for hostGroup in hostGroups:
if hostGroup != GLUSTER_HOST_GROUP:
return hostGroup
if __name__ == '__main__':
args = parse_input()
if args.stateType == "SOFT":
sys.exit(utils.PluginStatusCode.OK)
hostName = _findClusterName(args.hostGroups)
now = datetime.datetime.now()
command = "SCHEDULE_SVC_CHECK"
volumeStatusService = "Volume Status - %s" % args.volume
cmdStr = "[%s] %s;%s;%s;%s\n" % (now, command, hostName,
volumeStatusService, now)
submit_external_command.submitExternalCommand(cmdStr)
sys.exit(utils.PluginStatusCode.OK)
| dealnews/nagios-server-addons | plugins/brick_status_event_handler.py | Python | gpl-2.0 | 2,494 |
#coding=utf-8
#-*- encoding: utf-8 -*-
import tornado.ioloop
import tornado.iostream
import socket
import struct
import NotifyTCPServer
def readPacketHeader():
stream.read_bytes(NotifyTCPServer.PACKET_HEADER_LEN, parsePacketHeader)
def parsePacketHeader(data):
sign,cmd,bodySize = struct.unpack('>2sHH', data)
print "Sign: %s, Command: %s, Size: %s" % (sign,cmd,bodySize)
command=cmd
stream.read_bytes(bodySize, parsePacketBody)
def parsePacketBody(data):
print "Data: %s" % str(data)
if command == NotifyTCPServer.NOTIFY_COMMAND_PING:
send_ping(data)
readPacketHeader()
def send_register(userKey):
send_packet(NotifyTCPServer.NOTIFY_COMMAND_REGISTER, userKey)
def send_ping(msg):
send_packet(NotifyTCPServer.NOTIFY_COMMAND_PING, msg)
def send_packet(cmd, msg):
data = bytes(msg)
stream.write(struct.pack(">2sHH", "NT", cmd, len(data)))
stream.write(data)
def send_request():
readPacketHeader()
send_register('591410cbf9614cbf9aaac4a871ddb466')
command=0
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
stream = tornado.iostream.IOStream(s)
stream.connect(("localhost", 9002), send_request)
#stream.connect(("221.180.20.232", 9002), send_request)
tornado.ioloop.IOLoop.instance().start() | xaccc/videoapiserver | testNotifyTCPServer.py | Python | gpl-2.0 | 1,234 |
import cPickle as pkl
import networkx as nx
import numpy as np
import os
import scipy.spatial
from ..handler.basics import chunkify
from ..processing.general import single_conn_comp_img
from syconnmp.shared_mem import start_multiprocess_obj, start_multiprocess
from ..handler.compression import VoxelDict, AttributeDict
from syconnmp.shared_mem import start_multiprocess_obj
from .utils import subfold_from_ix
import segmentation
from knossos_utils import knossosdataset
def dataset_analysis_thread(args):
paths = args[0]
obj_type = args[1]
version = args[2]
working_dir = args[3]
recompute = args[4]
attr_dict = dict(id=[], size=[], bounding_box=[], rep_coord=[])
for p in paths:
if not len(os.listdir(p)) > 0:
os.rmdir(p)
else:
this_attr_dc = AttributeDict(p + "/attr_dict.pkl",
read_only=not recompute, timeout=3600)
if recompute:
this_vx_dc = VoxelDict(p + "/voxel.pkl", read_only=True,
timeout=3600)
for so_id in this_attr_dc.keys():
attr_dict["id"].append(so_id)
so = segmentation.SegmentationObject(so_id,
obj_type,
version,
working_dir)
so.attr_dict = this_attr_dc[so_id]
if recompute:
so.load_voxels(voxel_dc=this_vx_dc)
so.calculate_rep_coord(voxel_dc=this_vx_dc)
if recompute: # or "rep_coord" not in so.attr_dict:
so.attr_dict["rep_coord"] = so.rep_coord
if recompute: # or "bounding_box" not in so.attr_dict:
so.attr_dict["bounding_box"] = so.bounding_box
if recompute: #or "size" not in so.attr_dict:
so.attr_dict["size"] = so.size
for attribute in ["rep_coord", "size", "bounding_box"]:#so.attr_dict.keys():
if attribute not in attr_dict:
attr_dict[attribute] = []
attr_dict[attribute].append(so.attr_dict[attribute])
# so.save_attr_dict()
this_attr_dc[so_id] = so.attr_dict
if recompute:
this_attr_dc.save2pkl()
return attr_dict
def map_objects_thread(args):
paths = args[0]
obj_type = args[1]
obj_version = args[2]
working_dir = args[3]
kd_path = args[4]
readonly = args[5]
if len(args) > 6:
datatype = args[6]
else:
datatype = np.uint64
kd = knossosdataset.KnossosDataset()
kd.initialize_from_knossos_path(kd_path)
seg_dataset = segmentation.SegmentationDataset(obj_type,
version=obj_version,
working_dir=working_dir)
sv_id_dict = {}
for p in paths:
this_attr_dc = AttributeDict(p + "/attr_dict.pkl",
read_only=readonly, timeout=3600)
this_vx_dc = VoxelDict(p + "/voxel.pkl", read_only=True,
timeout=3600)
for so_id in this_vx_dc.keys():
so = seg_dataset.get_segmentation_object(so_id)
so.attr_dict = this_attr_dc[so_id]
so.load_voxels(voxel_dc=this_vx_dc)
if readonly:
if "mapping_ids" in so.attr_dict:
ids = so.attr_dict["mapping_ids"]
id_ratios = so.attr_dict["mapping_ratios"]
for i_id in range(len(ids)):
if ids[i_id] in sv_id_dict:
sv_id_dict[ids[i_id]][so_id] = id_ratios[i_id]
else:
sv_id_dict[ids[i_id]] = {so_id: id_ratios[i_id]}
else:
if np.product(so.shape) > 1e8:
continue
vx_list = np.argwhere(so.voxels) + so.bounding_box[0]
try:
id_list = kd.from_overlaycubes_to_list(vx_list,
datatype=datatype)
except:
continue
ids, id_counts = np.unique(id_list, return_counts=True)
id_ratios = id_counts / float(np.sum(id_counts))
for i_id in range(len(ids)):
if ids[i_id] in sv_id_dict:
sv_id_dict[ids[i_id]][so_id] = id_ratios[i_id]
else:
sv_id_dict[ids[i_id]] = {so_id: id_ratios[i_id]}
so.attr_dict["mapping_ids"] = ids
so.attr_dict["mapping_ratios"] = id_ratios
this_attr_dc[so_id] = so.attr_dict
if not readonly:
this_attr_dc.save2pkl()
return sv_id_dict
def write_mapping_to_sv_thread(args):
paths = args[0]
obj_type = args[1]
mapping_dict_path = args[2]
with open(mapping_dict_path, "r") as f:
mapping_dict = pkl.load(f)
for p in paths:
this_attr_dc = AttributeDict(p + "/attr_dict.pkl",
read_only=False, timeout=3600)
for sv_id in this_attr_dc.keys():
this_attr_dc[sv_id]["mapping_%s_ids" % obj_type] = \
mapping_dict[sv_id].keys()
this_attr_dc[sv_id]["mapping_%s_ratios" % obj_type] = \
mapping_dict[sv_id].values()
this_attr_dc.save2pkl()
def split_components_thread(args):
i_id = args[0]
stride = args[1]
obj_type = args[2]
new_obj_type = args[3]
version = args[4]
new_version = args[5]
version_dict = args[6]
working_dir = args[7]
dist = args[8]
new_id = args[9]
sd = segmentation.SegmentationDataset(obj_type=obj_type, version=version,
working_dir=working_dir,
version_dict=version_dict)
ids = sd.ids[i_id * stride: (i_id + 1) * stride]
new_sd = segmentation.SegmentationDataset(obj_type=new_obj_type,
version=new_version,
working_dir=working_dir,
version_dict=version_dict)
for i_id in range(len(ids)):
so_obj = sd.get_segmentationdataset(ids[i_id])
kdtree = scipy.spatial.cKDTree(so_obj.voxel_list)
graph = nx.from_edgelist(kdtree.query_pairs(dist))
ccs = list(nx.connected_components(graph))
if len(ccs) == 1:
new_so_obj = new_sd.get_segmentationdataset(new_id)
new_id += 1
new_so_obj.attr_dict["paths_to_voxels"] = so_obj.paths_to_voxels
new_so_obj.save_attr_dict()
else:
for cc in ccs:
new_so_obj = new_sd.get_segmentationdataset(new_id)
new_id += 1
voxel_ids = np.array(list(cc), dtype=np.int32)
this_voxel_list = so_obj.voxel_list[voxel_ids]
bb = [np.min(this_voxel_list, axis=1),
np.max(this_voxel_list, axis=1)]
this_voxel_list -= bb[0]
this_voxels = np.zeros(bb[1]-bb[0], dtype=np.bool)
this_voxels[this_voxel_list[:, 0],
this_voxel_list[:, 1],
this_voxel_list[:, 2]] = True
new_so_obj.save_voxels(this_voxels)
def init_sos(sos_dict):
loc_dict = sos_dict.copy()
svixs = loc_dict["svixs"]
del loc_dict["svixs"]
sos = [segmentation.SegmentationObject(ix, **loc_dict) for ix in svixs]
return sos
def sos_dict_fact(svixs, version="0", scaling=(10, 10, 20), obj_type="sv",
working_dir="/wholebrain/scratch/areaxfs/", create=False):
sos_dict = {"svixs": svixs, "version": version,
"working_dir": working_dir, "scaling": scaling,
"create": create, "obj_type": obj_type}
return sos_dict
def predict_sos_views(model, sos, pred_key, nb_cpus=1, woglia=True,
verbose=False, raw_only=False):
nb_chunks = np.max([1, len(sos) / 50])
so_chs = chunkify(sos, nb_chunks)
for ch in so_chs:
views = start_multiprocess_obj("load_views", [[sv, {"woglia": woglia,
"raw_only": raw_only}]
for sv in ch], nb_cpus=nb_cpus)
for kk in range(len(views)):
data = views[kk]
for i in range(len(data)):
sing_cc = np.concatenate([single_conn_comp_img(data[i, 0, :1]),
single_conn_comp_img(data[i, 0, 1:])])
data[i, 0] = sing_cc
views[kk] = data
part_views = np.cumsum([0] + [len(v) for v in views])
views = np.concatenate(views)
probas = model.predict_proba(views, verbose=verbose)
so_probas = []
for ii, so in enumerate(ch):
sv_probas = probas[part_views[ii]:part_views[ii + 1]]
so_probas.append(sv_probas)
# so.attr_dict[key] = sv_probas
assert len(so_probas) == len(ch)
params = [[so, prob, pred_key] for so, prob in zip(ch, so_probas)]
start_multiprocess(multi_probas_saver, params, nb_cpus=nb_cpus)
def multi_probas_saver(args):
so, probas, key = args
so.save_attributes([key], [probas]) | StructuralNeurobiologyLab/SyConnFS | syconnfs/representations/segmentation_helper.py | Python | gpl-2.0 | 9,628 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - ScienceWise.info
This elements displays a linking icon to ScienceWise.info for arXiv
records.
"""
import cgi
import re
from invenio.config import CFG_SITE_URL, CFG_SITE_LANG
from invenio.messages import gettext_set_language
_RE_MODERN_ARXIV = re.compile('(arxiv:)?(?P<number>\d{4}.\d{4}(v\d+)?)')
_RE_OLD_ARXIV = re.compile('(arxiv:)?(?P<number>\w+-\w+/\d{7}(v\d+)?)')
_RE_BAD_OLD_ARXIV = re.compile('(arxiv:)?(?P<archive>\w+-\w+)-(?P<number>\d{7}(v\d+)?)')
def format_element(bfo):
"""
If the record has an ArXiv reportnumber, displays a ScienceWise icon
to bookmark it.
"""
_ = gettext_set_language(bfo.lang)
for tag in ('037__a', '088__a'):
for reportnumber in bfo.fields(tag):
icon = create_sciencewise_icon(reportnumber)
if icon:
return icon
return ""
def get_arxiv_reportnumber(bfo):
"""
Return an ArXiv reportnumber (if any) from the corresponding record.
Return empty string otherwise.
"""
for tag in ('037__a', '088__a'):
for reportnumber in bfo.fields(tag):
reportnumber = reportnumber.lower()
for regexp in (_RE_MODERN_ARXIV, _RE_OLD_ARXIV):
g = regexp.match(reportnumber)
if g:
return g.group('number')
return ""
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
def create_sciencewise_url(reportnumber):
"""
If the reportnumber is a valid arXiv reportnumber return a ScienceWise.info
URL.
"""
reportnumber = reportnumber.lower()
g = _RE_BAD_OLD_ARXIV.match(reportnumber)
if g:
reportnumber = '%s/%s' % (g.group('archive'), g.group('number'))
for regexp in (_RE_MODERN_ARXIV, _RE_OLD_ARXIV):
g = regexp.match(reportnumber)
if g:
return "http://sciencewise.info/bookmarks/%s/add" % g.group('number')
return ""
def create_sciencewise_icon(reportnumber, lang=CFG_SITE_LANG):
"""
If the reportnumber is a valid arXiv reportnumber return a ScienceWise.info
icon.
"""
_ = gettext_set_language(lang)
reportnumber = reportnumber.lower()
g = _RE_BAD_OLD_ARXIV.match(reportnumber)
if g:
reportnumber = '%s/%s' % (g.group('archive'), g.group('number'))
for regexp in (_RE_MODERN_ARXIV, _RE_OLD_ARXIV):
g = regexp.match(reportnumber)
if g:
return """\
<a href="http://sciencewise.info/bookmarks/%(id)s/add" target="_blank" title="%(title)s"><img src="%(siteurl)s/img/sciencewise.png" width="23" height="16" alt="ScienceWise.info icon" /></a>""" % {
'id': cgi.escape(g.group('number'), True),
'title': cgi.escape(_("Add this article to your ScienceWise.info bookmarks"), True),
'siteurl': cgi.escape(CFG_SITE_URL, True)
}
return ""
| robk5uj/invenio | modules/bibformat/lib/elements/bfe_sciencewise.py | Python | gpl-2.0 | 3,743 |
import os
import shutil
from mutagen._compat import cBytesIO
from mutagen.ogg import OggPage
from mutagen.oggspeex import OggSpeex, OggSpeexInfo, delete
from tests import add
from tests.test_ogg import TOggFileType
from tempfile import mkstemp
class TOggSpeex(TOggFileType):
Kind = OggSpeex
def setUp(self):
original = os.path.join("tests", "data", "empty.spx")
fd, self.filename = mkstemp(suffix='.ogg')
os.close(fd)
shutil.copy(original, self.filename)
self.audio = self.Kind(self.filename)
def test_module_delete(self):
delete(self.filename)
self.scan_file()
self.failIf(OggSpeex(self.filename).tags)
def test_channels(self):
self.failUnlessEqual(2, self.audio.info.channels)
def test_sample_rate(self):
self.failUnlessEqual(44100, self.audio.info.sample_rate)
def test_bitrate(self):
self.failUnlessEqual(0, self.audio.info.bitrate)
def test_invalid_not_first(self):
page = OggPage(open(self.filename, "rb"))
page.first = False
self.failUnlessRaises(IOError, OggSpeexInfo, cBytesIO(page.write()))
def test_vendor(self):
self.failUnless(
self.audio.tags.vendor.startswith("Encoded with Speex 1.1.12"))
self.failUnlessRaises(KeyError, self.audio.tags.__getitem__, "vendor")
def test_not_my_ogg(self):
fn = os.path.join('tests', 'data', 'empty.oggflac')
self.failUnlessRaises(IOError, type(self.audio), fn)
self.failUnlessRaises(IOError, self.audio.save, fn)
self.failUnlessRaises(IOError, self.audio.delete, fn)
def test_multiplexed_in_headers(self):
shutil.copy(
os.path.join("tests", "data", "multiplexed.spx"), self.filename)
audio = self.Kind(self.filename)
audio.tags["foo"] = ["bar"]
audio.save()
audio = self.Kind(self.filename)
self.failUnlessEqual(audio.tags["foo"], ["bar"])
def test_mime(self):
self.failUnless("audio/x-speex" in self.audio.mime)
add(TOggSpeex)
| LordSputnik/mutagen | tests/test_oggspeex.py | Python | gpl-2.0 | 2,072 |
from ChannelSelection import ChannelSelection, BouquetSelector, SilentBouquetSelector
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.ActionMap import NumberActionMap
from Components.Harddisk import harddiskmanager
from Components.Input import Input
from Components.Label import Label
from Components.MovieList import AUDIO_EXTENSIONS, MOVIE_EXTENSIONS, DVD_EXTENSIONS
from Components.PluginComponent import plugins
from Components.ServiceEventTracker import ServiceEventTracker
from Components.Sources.Boolean import Boolean
from Components.config import config, ConfigBoolean, ConfigClock, ConfigText
from Components.SystemInfo import SystemInfo
from Components.UsageConfig import preferredInstantRecordPath, defaultMoviePath, ConfigSelection
from Components.VolumeControl import VolumeControl
from Components.Sources.StaticText import StaticText
from EpgSelection import EPGSelection
from Plugins.Plugin import PluginDescriptor
from Screen import Screen
from Screens import ScreenSaver
from Screens import Standby
from Screens.ChoiceBox import ChoiceBox
from Screens.Dish import Dish
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.InputBox import InputBox
from Screens.MessageBox import MessageBox
from Screens.MinuteInput import MinuteInput
from Screens.TimerSelection import TimerSelection
from Screens.PictureInPicture import PictureInPicture
import Screens.Standby
from Screens.SubtitleDisplay import SubtitleDisplay
from Screens.RdsDisplay import RdsInfoDisplay, RassInteractive
from Screens.TimeDateInput import TimeDateInput
from Screens.UnhandledKey import UnhandledKey
from ServiceReference import ServiceReference, isPlayableForCur
from Tools import Notifications, ASCIItranslit
from Tools.Directories import fileExists, getRecordingFilename, moveFiles
from enigma import eTimer, eServiceCenter, eDVBServicePMTHandler, iServiceInformation, \
iPlayableService, eServiceReference, eEPGCache, eActionMap
from time import time, localtime, strftime
import os
from bisect import insort
from sys import maxint
####key debug
# from keyids import KEYIDS
# from datetime import datetime
from RecordTimer import RecordTimerEntry, RecordTimer, findSafeRecordPath
# hack alert!
from Menu import MainMenu, mdom
def isStandardInfoBar(self):
return self.__class__.__name__ == "InfoBar"
def setResumePoint(session):
global resumePointCache, resumePointCacheLast
service = session.nav.getCurrentService()
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (service is not None) and (ref is not None): # and (ref.type != 1):
# ref type 1 has its own memory...
seek = service.seek()
if seek:
pos = seek.getPlayPosition()
if not pos[0]:
key = ref.toString()
lru = int(time())
l = seek.getLength()
if l:
l = l[1]
else:
l = None
resumePointCache[key] = [lru, pos[1], l]
if len(resumePointCache) > 50:
candidate = key
for k,v in resumePointCache.items():
if v[0] < lru:
candidate = k
del resumePointCache[candidate]
if lru - resumePointCacheLast > 3600:
saveResumePoints()
def delResumePoint(ref):
global resumePointCache, resumePointCacheLast
try:
del resumePointCache[ref.toString()]
except KeyError:
pass
if int(time()) - resumePointCacheLast > 3600:
saveResumePoints()
def getResumePoint(session):
global resumePointCache
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (ref is not None) and (ref.type != 1):
try:
entry = resumePointCache[ref.toString()]
entry[0] = int(time()) # update LRU timestamp
return entry[1]
except KeyError:
return None
def saveResumePoints():
global resumePointCache, resumePointCacheLast
import cPickle
try:
f = open('/home/root/resumepoints.pkl', 'wb')
cPickle.dump(resumePointCache, f, cPickle.HIGHEST_PROTOCOL)
except Exception, ex:
print "[InfoBar] Failed to write resumepoints:", ex
resumePointCacheLast = int(time())
def loadResumePoints():
import cPickle
try:
return cPickle.load(open('/home/root/resumepoints.pkl', 'rb'))
except Exception, ex:
print "[InfoBar] Failed to load resumepoints:", ex
return {}
resumePointCache = loadResumePoints()
resumePointCacheLast = int(time())
class InfoBarDish:
def __init__(self):
self.dishDialog = self.session.instantiateDialog(Dish)
class InfoBarUnhandledKey:
def __init__(self):
self.unhandledKeyDialog = self.session.instantiateDialog(UnhandledKey)
self.hideUnhandledKeySymbolTimer = eTimer()
self.hideUnhandledKeySymbolTimer.callback.append(self.unhandledKeyDialog.hide)
self.checkUnusedTimer = eTimer()
self.checkUnusedTimer.callback.append(self.checkUnused)
self.onLayoutFinish.append(self.unhandledKeyDialog.hide)
eActionMap.getInstance().bindAction('', -maxint -1, self.actionA) #highest prio
eActionMap.getInstance().bindAction('', maxint, self.actionB) #lowest prio
self.flags = (1<<1)
self.uflags = 0
#this function is called on every keypress!
def actionA(self, key, flag):
####key debug
#try:
# print 'KEY: %s %s %s' % (key,(key_name for key_name,value in KEYIDS.items() if value==key).next(),getKeyDescription(key)[0])
#except:
# try:
# print 'KEY: %s %s' % (key,(key_name for key_name,value in KEYIDS.items() if value==key).next()) # inverse dictionary lookup in KEYIDS
# except:
# print 'KEY: %s' % (key)
self.unhandledKeyDialog.hide()
if flag != 4:
if self.flags & (1<<1):
self.flags = self.uflags = 0
self.flags |= (1<<flag)
if flag == 1: # break
self.checkUnusedTimer.start(0, True)
return 0
#this function is only called when no other action has handled this key
def actionB(self, key, flag):
if flag != 4:
self.uflags |= (1<<flag)
def checkUnused(self):
if self.flags == self.uflags:
self.unhandledKeyDialog.show()
self.hideUnhandledKeySymbolTimer.start(2000, True)
class InfoBarScreenSaver:
def __init__(self):
self.onExecBegin.append(self.__onExecBegin)
self.onExecEnd.append(self.__onExecEnd)
self.screenSaverTimer = eTimer()
self.screenSaverTimer.callback.append(self.screensaverTimeout)
self.screensaver = self.session.instantiateDialog(ScreenSaver.Screensaver)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self.screensaver.hide()
def __onExecBegin(self):
self.ScreenSaverTimerStart()
def __onExecEnd(self):
if self.screensaver.shown:
self.screensaver.hide()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
self.screenSaverTimer.stop()
def ScreenSaverTimerStart(self):
time = int(config.usage.screen_saver.value)
flag = self.seekstate[0]
if not flag:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref and not (hasattr(self.session, "pipshown") and self.session.pipshown):
ref = ref.toString().split(":")
flag = ref[2] == "2" or os.path.splitext(ref[10])[1].lower() in AUDIO_EXTENSIONS
if time and flag:
self.screenSaverTimer.startLongTimer(time)
else:
self.screenSaverTimer.stop()
def screensaverTimeout(self):
if self.execing and not Standby.inStandby and not Standby.inTryQuitMainloop:
self.hide()
if hasattr(self, "pvrStateDialog"):
self.pvrStateDialog.hide()
self.screensaver.show()
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressScreenSaver)
def keypressScreenSaver(self, key, flag):
if flag:
self.screensaver.hide()
self.show()
self.ScreenSaverTimerStart()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
class SecondInfoBar(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skin = None
class InfoBarShowHide(InfoBarScreenSaver):
""" InfoBar show/hide control, accepts toggleShow and hide actions, might start
fancy animations. """
STATE_HIDDEN = 0
STATE_HIDING = 1
STATE_SHOWING = 2
STATE_SHOWN = 3
def __init__(self):
self["ShowHideActions"] = ActionMap( ["InfobarShowHideActions"] ,
{
"toggleShow": self.okButtonCheck,
"hide": self.keyHide,
}, 1) # lower prio to make it possible to override ok and cancel..
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.serviceStarted,
})
InfoBarScreenSaver.__init__(self)
self.__state = self.STATE_SHOWN
self.__locked = 0
self.hideTimer = eTimer()
self.hideTimer.callback.append(self.doTimerHide)
self.hideTimer.start(5000, True)
self.onShow.append(self.__onShow)
self.onHide.append(self.__onHide)
self.onShowHideNotifiers = []
self.secondInfoBarScreen = ""
if isStandardInfoBar(self):
self.secondInfoBarScreen = self.session.instantiateDialog(SecondInfoBar)
self.secondInfoBarScreen.show()
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
def __onShow(self):
self.__state = self.STATE_SHOWN
for x in self.onShowHideNotifiers:
x(True)
self.startHideTimer()
def doDimming(self):
if config.usage.show_infobar_do_dimming.value:
self.dimmed = self.dimmed-1
else:
self.dimmed = 0
self.DimmingTimer.stop()
self.doHide()
def unDimming(self):
self.unDimmingTimer.stop()
self.doWriteAlpha(config.av.osd_alpha.value)
def doWriteAlpha(self, value):
if fileExists("/proc/stb/video/alpha"):
f=open("/proc/stb/video/alpha","w")
f.write("%i" % (value))
f.close()
def __onHide(self):
self.unDimmingTimer = eTimer()
self.unDimmingTimer.callback.append(self.unDimming)
self.unDimmingTimer.start(100, True)
self.__state = self.STATE_HIDDEN
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
for x in self.onShowHideNotifiers:
x(False)
def keyHide(self):
if self.__state == self.STATE_HIDDEN and self.session.pipshown and "popup" in config.usage.pip_hideOnExit.value:
if config.usage.pip_hideOnExit.value == "popup":
self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True)
else:
self.hidePipOnExitCallback(True)
elif config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.toggleShow()
elif self.__state == self.STATE_SHOWN:
self.hide()
def hidePipOnExitCallback(self, answer):
if answer == True:
self.showPiP()
def connectShowHideNotifier(self, fnc):
if not fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.append(fnc)
def disconnectShowHideNotifier(self, fnc):
if fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.remove(fnc)
def serviceStarted(self):
if self.execing:
if config.usage.show_infobar_on_zap.value:
self.doShow()
def startHideTimer(self):
if self.__state == self.STATE_SHOWN and not self.__locked:
self.hideTimer.stop()
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
idx = config.usage.show_second_infobar.index - 1
else:
idx = config.usage.infobar_timeout.index
if idx:
self.hideTimer.startLongTimer(idx)
def doShow(self):
self.show()
self.startHideTimer()
def doTimerHide(self):
self.hideTimer.stop()
#if self.__state == self.STATE_SHOWN:
# self.hide()
self.DimmingTimer = eTimer()
self.DimmingTimer.callback.append(self.doDimming)
self.DimmingTimer.start(70, True)
self.dimmed = config.usage.show_infobar_dimming_speed.value
def doHide(self):
if self.__state != self.STATE_HIDDEN:
self.doWriteAlpha((config.av.osd_alpha.value*self.dimmed/config.usage.show_infobar_dimming_speed.value))
if self.dimmed > 0:
self.DimmingTimer.start(70, True)
else:
self.DimmingTimer.stop()
if self.__state == self.STATE_SHOWN:
self.hide()
if hasattr(self, "pvrStateDialog"):
try:
self.pvrStateDialog.hide()
except:
pass
elif self.__state == self.STATE_HIDDEN and self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
def okButtonCheck(self):
if config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.openServiceList()
else:
self.toggleShow()
def toggleShow(self):
if self.__state == self.STATE_HIDDEN:
self.showFirstInfoBar()
else:
self.showSecondInfoBar()
def showSecondInfoBar(self):
if isStandardInfoBar(self) and config.usage.show_second_infobar.value == "EPG":
if not(hasattr(self, "hotkeyGlobal") and self.hotkeyGlobal("info") != 0):
self.showDefaultEPG()
elif self.secondInfoBarScreen and config.usage.show_second_infobar.value and not self.secondInfoBarScreen.shown:
self.show()
self.secondInfoBarScreen.show()
self.startHideTimer()
else:
self.hide()
self.hideTimer.stop()
def showFirstInfoBar(self):
if self.__state == self.STATE_HIDDEN or self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen and self.secondInfoBarScreen.hide()
self.show()
else:
self.hide()
self.hideTimer.stop()
def lockShow(self):
self.__locked = self.__locked + 1
if self.execing:
self.show()
self.hideTimer.stop()
def unlockShow(self):
self.__locked = self.__locked - 1
if self.execing:
self.startHideTimer()
class BufferIndicator(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self["status"] = Label()
self.mayShow = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evBuffering: self.bufferChanged,
iPlayableService.evStart: self.__evStart,
iPlayableService.evGstreamerPlayStarted: self.__evGstreamerPlayStarted,
})
def bufferChanged(self):
if self.mayShow:
service = self.session.nav.getCurrentService()
info = service and service.info()
if info:
value = info.getInfo(iServiceInformation.sBuffer)
if value and value != 100:
self["status"].setText(_("Buffering %d%%") % value)
if not self.shown:
self.show()
def __evStart(self):
self.mayShow = True
self.hide()
def __evGstreamerPlayStarted(self):
self.mayShow = False
self.hide()
class InfoBarBuffer():
def __init__(self):
self.bufferScreen = self.session.instantiateDialog(BufferIndicator)
self.bufferScreen.hide()
class NumberZap(Screen):
def quit(self):
self.Timer.stop()
self.close()
def keyOK(self):
self.Timer.stop()
self.close(self.service, self.bouquet)
def handleServiceName(self):
if self.searchNumber:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
if not self.startBouquet:
self.startBouquet = self.bouquet
def keyBlue(self):
self.Timer.start(3000, True)
if self.searchNumber:
if self.startBouquet == self.bouquet:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()), firstBouquetOnly = True)
else:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
def keyNumberGlobal(self, number):
self.Timer.start(1000, True)
self.numberString = self.numberString + str(number)
self["number"].text = self["number_summary"].text = self.numberString
self.field = self.numberString
self.handleServiceName()
if len(self.numberString) >= 5:
self.keyOK()
def __init__(self, session, number, searchNumberFunction = None):
Screen.__init__(self, session)
self.numberString = str(number)
self.field = str(number)
self.searchNumber = searchNumberFunction
self.startBouquet = None
self["channel"] = Label(_("Channel:"))
self["number"] = Label(self.numberString)
self["servicename"] = Label()
self["channel_summary"] = StaticText(_("Channel:"))
self["number_summary"] = StaticText(self.numberString)
self["servicename_summary"] = StaticText()
self.handleServiceName()
self["actions"] = NumberActionMap( [ "SetupActions", "ShortcutActions" ],
{
"cancel": self.quit,
"ok": self.keyOK,
"blue": self.keyBlue,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
})
self.Timer = eTimer()
self.Timer.callback.append(self.keyOK)
self.Timer.start(3000, True)
class InfoBarNumberZap:
""" Handles an initial number for NumberZapping """
def __init__(self):
self["NumberActions"] = NumberActionMap( [ "NumberActions"],
{
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
})
def keyNumberGlobal(self, number):
if number == 0:
if isinstance(self, InfoBarPiP) and self.pipHandles0Action():
self.pipDoHandle0Action()
elif len(self.servicelist.history) > 1:
self.checkTimeshiftRunning(self.recallPrevService)
else:
if self.has_key("TimeshiftActions") and self.timeshiftEnabled():
ts = self.getTimeshift()
if ts and ts.isTimeshiftActive():
return
self.session.openWithCallback(self.numberEntered, NumberZap, number, self.searchNumber)
def recallPrevService(self, reply):
if reply:
self.servicelist.history_tv = []
self.servicelist.history_radio = []
self.servicelist.recallPrevService()
def numberEntered(self, service = None, bouquet = None):
if service:
self.selectAndStartService(service, bouquet)
def searchNumberHelper(self, serviceHandler, num, bouquet):
servicelist = serviceHandler.list(bouquet)
if servicelist:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
if num == serviceIterator.getChannelNum():
return serviceIterator
serviceIterator = servicelist.getNext()
return None
def searchNumber(self, number, firstBouquetOnly=False, bouquet=None):
bouquet = bouquet or self.servicelist.getRoot()
service = None
serviceHandler = eServiceCenter.getInstance()
if not firstBouquetOnly:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if config.usage.multibouquet.value and not service:
bouquet = self.servicelist.bouquet_root
bouquetlist = serviceHandler.list(bouquet)
if bouquetlist:
bouquet = bouquetlist.getNext()
while bouquet.valid():
if bouquet.flags & eServiceReference.isDirectory:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service:
playable = not (service.flags & (eServiceReference.isMarker|eServiceReference.isDirectory)) or (service.flags & eServiceReference.isNumberedMarker)
if not playable:
service = None
break
if config.usage.alternative_number_mode.value or firstBouquetOnly:
break
bouquet = bouquetlist.getNext()
return service, bouquet
def selectAndStartService(self, service, bouquet):
if service and not service.flags & eServiceReference.isMarker:
if self.servicelist.getRoot() != bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
self.servicelist.zap(enable_pipzap = True)
self.servicelist.correctChannelNumber()
self.servicelist.startRoot = None
def zapToNumber(self, number):
service, bouquet = self.searchNumber(number)
self.selectAndStartService(service, bouquet)
config.misc.initialchannelselection = ConfigBoolean(default = True)
class InfoBarChannelSelection:
""" ChannelSelection - handles the channelSelection dialog and the initial
channelChange actions which open the channelSelection dialog """
def __init__(self):
#instantiate forever
self.servicelist = self.session.instantiateDialog(ChannelSelection)
if config.misc.initialchannelselection.value:
self.onShown.append(self.firstRun)
self["ChannelSelectActions"] = HelpableActionMap(self, "InfobarChannelSelection",
{
"keyUp": (self.keyUpCheck, self.getKeyUpHelptext),
"keyDown": (self.keyDownCheck, self.getKeyDownHelpText),
"keyLeft": (self.keyLeftCheck, self.getKeyLeftHelptext),
"keyRight": (self.keyRightCheck, self.getKeyRightHelptext),
"historyBack": (self.historyBack, _("Switch to previous channel in history")),
"historyNext": (self.historyNext, _("Switch to next channel in history")),
"openServiceList": (self.openServiceList, _("Open service list")),
"openhistorybrowser": (self.openHistoryBrowser, _("open history browser")),
#"opendevicemanager": (self.openDeviceManager, _("open device manager")),
#"openaroraplugins": (self.openAroraPlugins, _("open Arora Browser")),
"showPluginBrowser": (self.showPluginBrowser, _("Show the plugin browser..")),
"openBouquetList": (self.openBouquetList, _("open bouquetlist")),
"keyChannelUp": (self.keyChannelUpCheck, self.getKeyChannelUpHelptext),
"keyChannelDown": (self.keyChannelDownCheck, self.getKeyChannelDownHelptext),
})
def openHistoryBrowser(self):
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/ZapHistoryBrowser/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Zap-Historie Browser") or plugin.name == _("Zap-History Browser"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Zap-History Browser plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openDeviceManager(self):
if fileExists("/usr/lib/enigma2/python/Plugins/SystemPlugins/DeviceManager/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Device Manager - Fast Mounted Remove"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Device Manager plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openAroraPlugins(self):
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/WebBrowser/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Web Browser"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The WebBrowser is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showPluginBrowser(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
from Screens.PluginBrowser import PluginBrowser
self.session.open(PluginBrowser)
def showTvChannelList(self, zap=False):
self.servicelist.setModeTv()
if zap:
self.servicelist.zap()
def showRadioChannelList(self, zap=False):
self.servicelist.setModeRadio()
if zap:
self.servicelist.zap()
def firstRun(self):
self.onShown.remove(self.firstRun)
config.misc.initialchannelselection.value = False
config.misc.initialchannelselection.save()
self.switchChannelDown()
def historyBack(self):
self.checkTimeshiftRunning(self.historyBackCheckTimeshiftCallback)
def historyBackCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyBack()
def historyNext(self):
self.checkTimeshiftRunning(self.historyNextCheckTimeshiftCallback)
def historyNextCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyNext()
def openBouquetList(self):
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
def keyUpCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapDown()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelUp()
def keyDownCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapUp()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelDown()
def keyLeftCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelUp()
else:
self.zapUp()
def keyRightCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelDown()
else:
self.zapDown()
def keyChannelUpCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapDown()
else:
self.openServiceList()
def keyChannelDownCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapUp()
else:
self.openServiceList()
def getKeyUpHelptext(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to next channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
return value
def getKeyDownHelpText(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to previous channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
return value
def getKeyLeftHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
else:
value = _("Switch to previous channel")
return value
def getKeyRightHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
else:
value = _("Switch to next channel")
return value
def getKeyChannelUpHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to next channel") or _("Open service list")
def getKeyChannelDownHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to previous channel") or _("Open service list")
def switchChannelUp(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveUp()
self.session.execDialog(self.servicelist)
def switchChannelDown(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveDown()
self.session.execDialog(self.servicelist)
def zapUp(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if self.servicelist.atBegin():
self.servicelist.prevBouquet()
self.servicelist.moveUp()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveUp()
self.servicelist.zap(enable_pipzap = True)
def zapDown(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd():
self.servicelist.nextBouquet()
else:
self.servicelist.moveDown()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveDown()
self.servicelist.zap(enable_pipzap = True)
def openFavouritesList(self):
self.servicelist.showFavourites()
self.openServiceList()
def openServiceList(self):
self.session.execDialog(self.servicelist)
class InfoBarMenu:
""" Handles a menu action, to open the (main) menu """
def __init__(self):
self["MenuActions"] = HelpableActionMap(self, "InfobarMenuActions",
{
"mainMenu": (self.mainMenu, _("Enter main menu...")),
})
self.session.infobar = None
def mainMenu(self):
print "loading mainmenu XML..."
menu = mdom.getroot()
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.infobar = self
# so we can access the currently active infobar from screens opened from within the mainmenu
# at the moment used from the SubserviceSelection
self.session.openWithCallback(self.mainMenuClosed, MainMenu, menu)
def mainMenuClosed(self, *val):
self.session.infobar = None
class InfoBarSimpleEventView:
""" Opens the Eventview for now/next """
def __init__(self):
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.openEventView, _("Show event details")),
"showEventInfoSingleEPG": (self.openEventView, _("Show event details")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def openEventView(self):
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewSimple, epglist[0], ServiceReference(ref), self.eventViewCallback)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0] = epglist[1]
epglist[1] = tmp
setEvent(epglist[0])
class SimpleServicelist:
def __init__(self, services):
self.services = services
self.length = len(services)
self.current = 0
def selectService(self, service):
if not self.length:
self.current = -1
return False
else:
self.current = 0
while self.services[self.current].ref != service:
self.current += 1
if self.current >= self.length:
return False
return True
def nextService(self):
if not self.length:
return
if self.current+1 < self.length:
self.current += 1
else:
self.current = 0
def prevService(self):
if not self.length:
return
if self.current-1 > -1:
self.current -= 1
else:
self.current = self.length - 1
def currentService(self):
if not self.length or self.current >= self.length:
return None
return self.services[self.current]
class InfoBarEPG:
""" EPG - Opens an EPG list when the showEPGList action fires """
def __init__(self):
self.is_now_next = False
self.dlg_stack = [ ]
self.bouquetSel = None
self.eventView = None
self.epglist = []
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged,
})
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.showDefaultEPG, _("Show EPG...")),
"showEventInfoSingleEPG": (self.showSingleEPG, _("Show single service EPG")),
"showEventInfoMultiEPG": (self.showMultiEPG, _("Show multi channel EPG")),
#"showCurrentEvent": (self.openEventView, _("Show Current Info...")),
#"showSingleCurrentEPG": (self.openSingleServiceEPG, _("Show single channel EPG...")),
#"showBouquetEPG": (self.openMultiServiceEPG, _("Show Bouquet EPG...")),
##"showEventInfoPlugin": (self.showEventInfoPlugins, _("List EPG functions...")),
##"showEventGuidePlugin": (self.showEventGuidePlugins, _("List EPG functions...")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def getEPGPluginList(self, getAll=False):
pluginlist = [(p.name, boundFunction(self.runPlugin, p), p.path) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EVENTINFO) \
if 'selectedevent' not in p.__call__.func_code.co_varnames] or []
from Components.ServiceEventTracker import InfoBarCount
if getAll or InfoBarCount == 1:
pluginlist.append((_("Show EPG for current channel..."), self.openSingleServiceEPG, "current_channel"))
pluginlist.append((_("Multi EPG"), self.openMultiServiceEPG, "multi_epg"))
pluginlist.append((_("Current event EPG"), self.openEventView, "event_epg"))
return pluginlist
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def zapToService(self, service, preview = False, zapback = False):
if self.servicelist.startServiceRef is None:
self.servicelist.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if service is not None:
if self.servicelist.getRoot() != self.epg_bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != self.epg_bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(self.epg_bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
if not zapback or preview:
self.servicelist.zap(enable_pipzap = True)
if (self.servicelist.dopipzap or zapback) and not preview:
self.servicelist.zapBack()
if not preview:
self.servicelist.startServiceRef = None
self.servicelist.startRoot = None
def getBouquetServices(self, bouquet):
services = [ ]
servicelist = eServiceCenter.getInstance().list(bouquet)
if not servicelist is None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services
continue
services.append(ServiceReference(service))
return services
def openBouquetEPG(self, bouquet, withCallback=True):
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
if withCallback:
self.dlg_stack.append(self.session.openWithCallback(self.closed, EPGSelection, services, self.zapToService, None, self.changeBouquetCB))
else:
self.session.open(EPGSelection, services, self.zapToService, None, self.changeBouquetCB)
def changeBouquetCB(self, direction, epg):
if self.bouquetSel:
if direction > 0:
self.bouquetSel.down()
else:
self.bouquetSel.up()
bouquet = self.bouquetSel.getCurrent()
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
epg.setServices(services)
def closed(self, ret=False):
closedScreen = self.dlg_stack.pop()
if self.bouquetSel and closedScreen == self.bouquetSel:
self.bouquetSel = None
elif self.eventView and closedScreen == self.eventView:
self.eventView = None
if ret:
dlgs=len(self.dlg_stack)
if dlgs > 0:
self.dlg_stack[dlgs-1].close(dlgs > 1)
def openMultiServiceEPG(self, withCallback=True):
bouquets = self.servicelist.getBouquetList()
if bouquets is None:
cnt = 0
else:
cnt = len(bouquets)
if config.usage.multiepg_ask_bouquet.value:
self.openMultiServiceEPGAskBouquet(bouquets, cnt, withCallback)
else:
self.openMultiServiceEPGSilent(bouquets, cnt, withCallback)
def openMultiServiceEPGAskBouquet(self, bouquets, cnt, withCallback):
if cnt > 1: # show bouquet list
if withCallback:
self.bouquetSel = self.session.openWithCallback(self.closed, BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
self.dlg_stack.append(self.bouquetSel)
else:
self.bouquetSel = self.session.open(BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
elif cnt == 1:
self.openBouquetEPG(bouquets[0][1], withCallback)
def openMultiServiceEPGSilent(self, bouquets, cnt, withCallback):
root = self.servicelist.getRoot()
rootstr = root.toCompareString()
current = 0
for bouquet in bouquets:
if bouquet[1].toCompareString() == rootstr:
break
current += 1
if current >= cnt:
current = 0
if cnt > 1: # create bouquet list for bouq+/-
self.bouquetSel = SilentBouquetSelector(bouquets, True, self.servicelist.getBouquetNumOffset(root))
if cnt >= 1:
self.openBouquetEPG(root, withCallback)
def changeServiceCB(self, direction, epg):
if self.serviceSel:
if direction > 0:
self.serviceSel.nextService()
else:
self.serviceSel.prevService()
epg.setService(self.serviceSel.currentService())
def SingleServiceEPGClosed(self, ret=False):
self.serviceSel = None
def openSingleServiceEPG(self):
ref = self.servicelist.getCurrentSelection()
if ref:
if self.servicelist.getMutableList(): # bouquet in channellist
current_path = self.servicelist.getRoot()
services = self.getBouquetServices(current_path)
self.serviceSel = SimpleServicelist(services)
if self.serviceSel.selectService(ref):
self.epg_bouquet = current_path
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref, self.zapToService, serviceChangeCB=self.changeServiceCB)
else:
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref)
else:
self.session.open(EPGSelection, ref)
def runPlugin(self, plugin):
plugin(session = self.session, servicelist = self.servicelist)
def showEventInfoPlugins(self):
pluginlist = self.getEPGPluginList()
if pluginlist:
self.session.openWithCallback(self.EventInfoPluginChosen, ChoiceBox, title=_("Please choose an extension..."), list=pluginlist, skin_name="EPGExtensionsList", reorderConfig="eventinfo_order")
else:
self.openSingleServiceEPG()
def EventInfoPluginChosen(self, answer):
if answer is not None:
answer[1]()
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, None, eventid)
def getNowNext(self):
epglist = [ ]
service = self.session.nav.getCurrentService()
info = service and service.info()
ptr = info and info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr = info and info.getEvent(1)
if ptr:
epglist.append(ptr)
self.epglist = epglist
def __evEventInfoChanged(self):
if self.is_now_next and len(self.dlg_stack) == 1:
self.getNowNext()
if self.eventView and self.epglist:
self.eventView.setEvent(self.epglist[0])
def showDefaultEPG(self):
self.openEventView()
def showSingleEPG(self):
self.openSingleServiceEPG()
def showMultiEPG(self):
self.openMultiServiceEPG()
def openEventView(self):
from Components.ServiceEventTracker import InfoBarCount
if InfoBarCount > 1:
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
else:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
self.dlg_stack.append(self.eventView)
if not epglist:
print "no epg for the service avail.. so we show multiepg instead of eventinfo"
self.openMultiServiceEPG(False)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0]=epglist[1]
epglist[1]=tmp
setEvent(epglist[0])
class InfoBarRdsDecoder:
"""provides RDS and Rass support/display"""
def __init__(self):
self.rds_display = self.session.instantiateDialog(RdsInfoDisplay)
self.session.instantiateSummaryDialog(self.rds_display)
self.rass_interactive = None
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.__serviceStopped,
iPlayableService.evUpdatedRassSlidePic: self.RassSlidePicChanged
})
self["RdsActions"] = ActionMap(["InfobarRdsActions"],
{
"startRassInteractive": self.startRassInteractive
},-1)
self["RdsActions"].setEnabled(False)
self.onLayoutFinish.append(self.rds_display.show)
self.rds_display.onRassInteractivePossibilityChanged.append(self.RassInteractivePossibilityChanged)
def RassInteractivePossibilityChanged(self, state):
self["RdsActions"].setEnabled(state)
def RassSlidePicChanged(self):
if not self.rass_interactive:
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if decoder:
decoder.showRassSlidePicture()
def __serviceStopped(self):
if self.rass_interactive is not None:
rass_interactive = self.rass_interactive
self.rass_interactive = None
rass_interactive.close()
def startRassInteractive(self):
self.rds_display.hide()
self.rass_interactive = self.session.openWithCallback(self.RassInteractiveClosed, RassInteractive)
def RassInteractiveClosed(self, *val):
if self.rass_interactive is not None:
self.rass_interactive = None
self.RassSlidePicChanged()
self.rds_display.show()
class InfoBarSeek:
"""handles actions like seeking, pause"""
SEEK_STATE_PLAY = (0, 0, 0, ">")
SEEK_STATE_PAUSE = (1, 0, 0, "||")
SEEK_STATE_EOF = (1, 0, 0, "END")
def __init__(self, actionmap = "InfobarSeekActions"):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evEOF: self.__evEOF,
iPlayableService.evSOF: self.__evSOF,
})
self.fast_winding_hint_message_showed = False
class InfoBarSeekActionMap(HelpableActionMap):
def __init__(self, screen, *args, **kwargs):
HelpableActionMap.__init__(self, screen, *args, **kwargs)
self.screen = screen
def action(self, contexts, action):
print "action:", action
if action[:5] == "seek:":
time = int(action[5:])
self.screen.doSeekRelative(time * 90000)
return 1
elif action[:8] == "seekdef:":
key = int(action[8:])
time = (-config.seek.selfdefined_13.value, False, config.seek.selfdefined_13.value,
-config.seek.selfdefined_46.value, False, config.seek.selfdefined_46.value,
-config.seek.selfdefined_79.value, False, config.seek.selfdefined_79.value)[key-1]
self.screen.doSeekRelative(time * 90000)
return 1
else:
return HelpableActionMap.action(self, contexts, action)
self["SeekActions"] = InfoBarSeekActionMap(self, actionmap,
{
"playpauseService": (self.playpauseService, _("Pauze/Continue playback")),
"pauseService": (self.pauseService, _("Pause playback")),
"unPauseService": (self.unPauseService, _("Continue playback")),
"okButton": (self.okButton, _("Continue playback")),
"seekFwd": (self.seekFwd, _("Seek forward")),
"seekFwdManual": (self.seekFwdManual, _("Seek forward (enter time)")),
"seekBack": (self.seekBack, _("Seek backward")),
"seekBackManual": (self.seekBackManual, _("Seek backward (enter time)")),
"jumpPreviousMark": (self.seekPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.seekNextMark, _("Jump to next marked position")),
}, prio=-1)
# give them a little more priority to win over color buttons
self["SeekActions"].setEnabled(False)
self.seekstate = self.SEEK_STATE_PLAY
self.lastseekstate = self.SEEK_STATE_PLAY
self.onPlayStateChanged = [ ]
self.lockedBecauseOfSkipping = False
self.__seekableStatusChanged()
def makeStateForward(self, n):
return (0, n, 0, ">> %dx" % n)
def makeStateBackward(self, n):
return (0, -n, 0, "<< %dx" % n)
def makeStateSlowMotion(self, n):
return (0, 0, n, "/%d" % n)
def isStateForward(self, state):
return state[1] > 1
def isStateBackward(self, state):
return state[1] < 0
def isStateSlowMotion(self, state):
return state[1] == 0 and state[2] > 1
def getHigher(self, n, lst):
for x in lst:
if x > n:
return x
return False
def getLower(self, n, lst):
lst = lst[:]
lst.reverse()
for x in lst:
if x < n:
return x
return False
def showAfterSeek(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def up(self):
pass
def down(self):
pass
def getSeek(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
seek = service.seek()
if seek is None or not seek.isCurrentlySeekable():
return None
return seek
def isSeekable(self):
if self.getSeek() is None or (isStandardInfoBar(self) and not self.timeshiftEnabled()):
return False
return True
def __seekableStatusChanged(self):
# print "seekable status changed!"
if not self.isSeekable():
self["SeekActions"].setEnabled(False)
# print "not seekable, return to play"
self.setSeekState(self.SEEK_STATE_PLAY)
else:
self["SeekActions"].setEnabled(True)
# print "seekable"
def __serviceStarted(self):
self.fast_winding_hint_message_showed = False
self.setSeekState(self.SEEK_STATE_PLAY)
self.__seekableStatusChanged()
def setSeekState(self, state):
service = self.session.nav.getCurrentService()
if service is None:
return False
if not self.isSeekable():
if state not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE):
state = self.SEEK_STATE_PLAY
pauseable = service.pause()
if pauseable is None:
print "not pauseable."
state = self.SEEK_STATE_PLAY
self.seekstate = state
if pauseable is not None:
if self.seekstate[0]:
print "resolved to PAUSE"
pauseable.pause()
elif self.seekstate[1]:
if not pauseable.setFastForward(self.seekstate[1]):
print "resolved to FAST FORWARD"
else:
self.seekstate = self.SEEK_STATE_PLAY
print "FAST FORWARD not possible: resolved to PLAY"
elif self.seekstate[2]:
if not pauseable.setSlowMotion(self.seekstate[2]):
print "resolved to SLOW MOTION"
else:
self.seekstate = self.SEEK_STATE_PAUSE
print "SLOW MOTION not possible: resolved to PAUSE"
else:
print "resolved to PLAY"
pauseable.unpause()
for c in self.onPlayStateChanged:
c(self.seekstate)
self.checkSkipShowHideLock()
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
return True
def playpauseService(self):
if self.seekstate != self.SEEK_STATE_PLAY:
self.unPauseService()
else:
self.pauseService()
def okButton(self):
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
elif self.seekstate == self.SEEK_STATE_PAUSE:
self.pauseService()
else:
self.unPauseService()
def pauseService(self):
if self.seekstate == self.SEEK_STATE_PAUSE:
if config.seek.on_pause.value == "play":
self.unPauseService()
elif config.seek.on_pause.value == "step":
self.doSeekRelative(1)
elif config.seek.on_pause.value == "last":
self.setSeekState(self.lastseekstate)
self.lastseekstate = self.SEEK_STATE_PLAY
else:
if self.seekstate != self.SEEK_STATE_EOF:
self.lastseekstate = self.seekstate
self.setSeekState(self.SEEK_STATE_PAUSE)
def unPauseService(self):
print "unpause"
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
self.setSeekState(self.SEEK_STATE_PLAY)
def doSeek(self, pts):
seekable = self.getSeek()
if seekable is None:
return
seekable.seekTo(pts)
def doSeekRelative(self, pts):
seekable = self.getSeek()
if seekable is None:
return
prevstate = self.seekstate
if self.seekstate == self.SEEK_STATE_EOF:
if prevstate == self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_PAUSE)
else:
self.setSeekState(self.SEEK_STATE_PLAY)
seekable.seekRelative(pts<0 and -1 or 1, abs(pts))
if abs(pts) > 100 and config.usage.show_infobar_on_skip.value:
self.showAfterSeek()
def seekFwd(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
if self.seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_PAUSE:
if len(config.seek.speeds_slowmotion.value):
self.setSeekState(self.makeStateSlowMotion(config.seek.speeds_slowmotion.value[-1]))
else:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_EOF:
pass
elif self.isStateForward(self.seekstate):
speed = self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_forward.value) or config.seek.speeds_forward.value[-1]
self.setSeekState(self.makeStateForward(speed))
elif self.isStateBackward(self.seekstate):
speed = -self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getLower(speed, config.seek.speeds_backward.value)
if speed:
self.setSeekState(self.makeStateBackward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateSlowMotion(self.seekstate):
speed = self.getLower(self.seekstate[2], config.seek.speeds_slowmotion.value) or config.seek.speeds_slowmotion.value[0]
self.setSeekState(self.makeStateSlowMotion(speed))
def seekBack(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
seekstate = self.seekstate
if seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
elif seekstate == self.SEEK_STATE_EOF:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
self.doSeekRelative(-6)
elif seekstate == self.SEEK_STATE_PAUSE:
self.doSeekRelative(-1)
elif self.isStateForward(seekstate):
speed = seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getLower(speed, config.seek.speeds_forward.value)
if speed:
self.setSeekState(self.makeStateForward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateBackward(seekstate):
speed = -seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_backward.value) or config.seek.speeds_backward.value[-1]
self.setSeekState(self.makeStateBackward(speed))
elif self.isStateSlowMotion(seekstate):
speed = self.getHigher(seekstate[2], config.seek.speeds_slowmotion.value)
if speed:
self.setSeekState(self.makeStateSlowMotion(speed))
else:
self.setSeekState(self.SEEK_STATE_PAUSE)
def seekFwdManual(self):
self.session.openWithCallback(self.fwdSeekTo, MinuteInput)
def fwdSeekTo(self, minutes):
print "Seek", minutes, "minutes forward"
self.doSeekRelative(minutes * 60 * 90000)
def seekBackManual(self):
self.session.openWithCallback(self.rwdSeekTo, MinuteInput)
def rwdSeekTo(self, minutes):
print "rwdSeekTo"
self.doSeekRelative(-minutes * 60 * 90000)
def checkSkipShowHideLock(self):
wantlock = self.seekstate != self.SEEK_STATE_PLAY
if config.usage.show_infobar_on_skip.value:
if self.lockedBecauseOfSkipping and not wantlock:
self.unlockShow()
self.lockedBecauseOfSkipping = False
if wantlock and not self.lockedBecauseOfSkipping:
self.lockShow()
self.lockedBecauseOfSkipping = True
def calcRemainingTime(self):
seekable = self.getSeek()
if seekable is not None:
len = seekable.getLength()
try:
tmp = self.cueGetEndCutPosition()
if tmp:
len = (False, tmp)
except:
pass
pos = seekable.getPlayPosition()
speednom = self.seekstate[1] or 1
speedden = self.seekstate[2] or 1
if not len[0] and not pos[0]:
if len[1] <= pos[1]:
return 0
time = (len[1] - pos[1])*speedden/(90*speednom)
return time
return False
def __evEOF(self):
if self.seekstate == self.SEEK_STATE_EOF:
return
# if we are seeking forward, we try to end up ~1s before the end, and pause there.
seekstate = self.seekstate
if self.seekstate != self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_EOF)
if seekstate not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE): # if we are seeking
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-1)
if seekstate == self.SEEK_STATE_PLAY: # regular EOF
self.doEofInternal(True)
else:
self.doEofInternal(False)
def doEofInternal(self, playing):
pass # Defined in subclasses
def __evSOF(self):
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(0)
# This is needed, because some Mediaplayer use InfoBarSeek but not InfoBarCueSheetSupport
def seekPreviousMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpPreviousMark()
def seekNextMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpNextMark()
from Screens.PVRState import PVRState, TimeshiftState
class InfoBarPVRState:
def __init__(self, screen=PVRState, force_show = False):
self.onPlayStateChanged.append(self.__playStateChanged)
self.pvrStateDialog = self.session.instantiateDialog(screen)
self.onShow.append(self._mayShow)
self.onHide.append(self.pvrStateDialog.hide)
self.force_show = force_show
def _mayShow(self):
if self.shown and self.seekstate != self.SEEK_STATE_PLAY:
self.pvrStateDialog.show()
def __playStateChanged(self, state):
playstateString = state[3]
self.pvrStateDialog["state"].setText(playstateString)
# if we return into "PLAY" state, ensure that the dialog gets hidden if there will be no infobar displayed
if not config.usage.show_infobar_on_skip.value and self.seekstate == self.SEEK_STATE_PLAY and not self.force_show:
self.pvrStateDialog.hide()
else:
self._mayShow()
class TimeshiftLive(Screen):
def __init__(self, session):
Screen.__init__(self, session)
class InfoBarTimeshiftState(InfoBarPVRState):
def __init__(self):
InfoBarPVRState.__init__(self, screen=TimeshiftState, force_show = True)
self.timeshiftLiveScreen = self.session.instantiateDialog(TimeshiftLive)
self.onHide.append(self.timeshiftLiveScreen.hide)
self.secondInfoBarScreen and self.secondInfoBarScreen.onShow.append(self.timeshiftLiveScreen.hide)
self.timeshiftLiveScreen.hide()
self.__hideTimer = eTimer()
self.__hideTimer.callback.append(self.__hideTimeshiftState)
self.onFirstExecBegin.append(self.pvrStateDialog.show)
def _mayShow(self):
if self.timeshiftEnabled():
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
if self.timeshiftActivated():
self.pvrStateDialog.show()
self.timeshiftLiveScreen.hide()
elif self.showTimeshiftState:
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.show()
self.showTimeshiftState = False
if self.seekstate == self.SEEK_STATE_PLAY and config.usage.infobar_timeout.index and (self.pvrStateDialog.shown or self.timeshiftLiveScreen.shown):
self.__hideTimer.startLongTimer(config.usage.infobar_timeout.index)
else:
self.__hideTimeshiftState()
def __hideTimeshiftState(self):
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.hide()
class InfoBarShowMovies:
# i don't really like this class.
# it calls a not further specified "movie list" on up/down/movieList,
# so this is not more than an action map
def __init__(self):
self["MovieListActions"] = HelpableActionMap(self, "InfobarMovieListActions",
{
"movieList": (self.showMovies, _("Open the movie list")),
"up": (self.up, _("Open the movie list")),
"down": (self.down, _("Open the movie list"))
})
# InfoBarTimeshift requires InfoBarSeek, instantiated BEFORE!
# Hrmf.
#
# Timeshift works the following way:
# demux0 demux1 "TimeshiftActions" "TimeshiftActivateActions" "SeekActions"
# - normal playback TUNER unused PLAY enable disable disable
# - user presses "yellow" button. FILE record PAUSE enable disable enable
# - user presess pause again FILE record PLAY enable disable enable
# - user fast forwards FILE record FF enable disable enable
# - end of timeshift buffer reached TUNER record PLAY enable enable disable
# - user backwards FILE record BACK # !! enable disable enable
#
# in other words:
# - when a service is playing, pressing the "timeshiftStart" button ("yellow") enables recording ("enables timeshift"),
# freezes the picture (to indicate timeshift), sets timeshiftMode ("activates timeshift")
# now, the service becomes seekable, so "SeekActions" are enabled, "TimeshiftEnableActions" are disabled.
# - the user can now PVR around
# - if it hits the end, the service goes into live mode ("deactivates timeshift", it's of course still "enabled")
# the service looses it's "seekable" state. It can still be paused, but just to activate timeshift right
# after!
# the seek actions will be disabled, but the timeshiftActivateActions will be enabled
# - if the user rewinds, or press pause, timeshift will be activated again
# note that a timeshift can be enabled ("recording") and
# activated (currently time-shifting).
class InfoBarTimeshift:
ts_disabled = False
def __init__(self):
self["TimeshiftActions"] = HelpableActionMap(self, "InfobarTimeshiftActions",
{
"timeshiftStart": (self.startTimeshift, _("Start timeshift")), # the "yellow key"
"timeshiftStop": (self.stopTimeshift, _("Stop timeshift")) # currently undefined :), probably 'TV'
}, prio=1)
self["TimeshiftActivateActions"] = ActionMap(["InfobarTimeshiftActivateActions"],
{
"timeshiftActivateEnd": self.activateTimeshiftEnd, # something like "rewind key"
"timeshiftActivateEndAndPause": self.activateTimeshiftEndAndPause # something like "pause key"
}, prio=-1) # priority over record
self["TimeshiftActivateActions"].setEnabled(False)
self.ts_rewind_timer = eTimer()
self.ts_rewind_timer.callback.append(self.rewindService)
self.ts_start_delay_timer = eTimer()
self.ts_start_delay_timer.callback.append(self.startTimeshiftWithoutPause)
self.ts_current_event_timer = eTimer()
self.ts_current_event_timer.callback.append(self.saveTimeshiftFileForEvent)
self.save_timeshift_file = False
self.timeshift_was_activated = False
self.showTimeshiftState = False
self.save_timeshift_only_current_event = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evEnd: self.__serviceEnd
})
def getTimeshift(self):
if self.ts_disabled:
return None
service = self.session.nav.getCurrentService()
return service and service.timeshift()
def timeshiftEnabled(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftEnabled()
def timeshiftActivated(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftActive()
def startTimeshift(self, pauseService = True):
print "enable timeshift"
ts = self.getTimeshift()
if ts is None:
if not pauseService and not int(config.usage.timeshift_start_delay.value):
self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, simple = True)
print "no ts interface"
return 0
if ts.isTimeshiftEnabled():
print "hu, timeshift already enabled?"
else:
if not ts.startTimeshift():
# we remove the "relative time" for now.
#self.pvrStateDialog["timeshift"].setRelative(time.time())
if pauseService:
# PAUSE.
#self.setSeekState(self.SEEK_STATE_PAUSE)
self.activateTimeshiftEnd(False)
self.showTimeshiftState = True
else:
self.showTimeshiftState = False
# enable the "TimeshiftEnableActions", which will override
# the startTimeshift actions
self.__seekableStatusChanged()
# get current timeshift filename and calculate new
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
self.setCurrentEventTimer()
self.current_timeshift_filename = ts.getTimeshiftFilename()
self.new_timeshift_filename = self.generateNewTimeshiftFileName()
else:
print "timeshift failed"
def startTimeshiftWithoutPause(self):
self.startTimeshift(False)
def stopTimeshift(self):
ts = self.getTimeshift()
if ts and ts.isTimeshiftEnabled():
if int(config.usage.timeshift_start_delay.value):
ts.switchToLive()
else:
self.checkTimeshiftRunning(self.stopTimeshiftcheckTimeshiftRunningCallback)
else:
return 0
def stopTimeshiftcheckTimeshiftRunningCallback(self, answer):
ts = self.getTimeshift()
if answer and ts:
ts.stopTimeshift()
self.pvrStateDialog.hide()
self.setCurrentEventTimer()
# disable actions
self.__seekableStatusChanged()
# activates timeshift, and seeks to (almost) the end
def activateTimeshiftEnd(self, back = True):
self.showTimeshiftState = True
ts = self.getTimeshift()
print "activateTimeshiftEnd"
if ts is None:
return
if ts.isTimeshiftActive():
print "!! activate timeshift called - but shouldn't this be a normal pause?"
self.pauseService()
else:
print "play, ..."
ts.activateTimeshift() # activate timeshift will automatically pause
self.setSeekState(self.SEEK_STATE_PAUSE)
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-90000) # seek approx. 1 sec before end
self.timeshift_was_activated = True
if back:
self.ts_rewind_timer.start(200, 1)
def rewindService(self):
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
# generates only filename without path
def generateNewTimeshiftFileName(self):
name = "timeshift record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
service_name = ""
if isinstance(serviceref, eServiceReference):
service_name = ServiceReference(serviceref).getServiceName()
begin_date = strftime("%Y%m%d %H%M", localtime(time()))
filename = begin_date + " - " + service_name
if config.recording.filename_composition.value == "veryshort":
filename = service_name + " - " + begin_date
elif config.recording.filename_composition.value == "short":
filename = strftime("%Y%m%d", localtime(time())) + " - " + info["name"]
elif config.recording.filename_composition.value == "long":
filename += " - " + info["name"] + " - " + info["description"]
else:
filename += " - " + info["name"] # standard
if config.recording.ascii_filenames.value:
filename = ASCIItranslit.legacyEncode(filename)
print "New timeshift filename: ", filename
return filename
# same as activateTimeshiftEnd, but pauses afterwards.
def activateTimeshiftEndAndPause(self):
print "activateTimeshiftEndAndPause"
#state = self.seekstate
self.activateTimeshiftEnd(False)
def callServiceStarted(self):
self.__serviceStarted()
def __seekableStatusChanged(self):
self["TimeshiftActivateActions"].setEnabled(not self.isSeekable() and self.timeshiftEnabled())
state = self.getSeek() is not None and self.timeshiftEnabled()
self["SeekActions"].setEnabled(state)
if not state:
self.setSeekState(self.SEEK_STATE_PLAY)
self.restartSubtitle()
def __serviceStarted(self):
self.pvrStateDialog.hide()
self.__seekableStatusChanged()
if self.ts_start_delay_timer.isActive():
self.ts_start_delay_timer.stop()
if int(config.usage.timeshift_start_delay.value):
self.ts_start_delay_timer.start(int(config.usage.timeshift_start_delay.value) * 1000, True)
def checkTimeshiftRunning(self, returnFunction):
if self.timeshiftEnabled() and config.usage.check_timeshift.value and self.timeshift_was_activated:
message = _("Stop timeshift?")
if not self.save_timeshift_file:
choice = [(_("Yes"), "stop"), (_("No"), "continue"), (_("Yes and save"), "save"), (_("Yes and save in movie dir"), "save_movie")]
else:
choice = [(_("Yes"), "stop"), (_("No"), "continue")]
message += "\n" + _("Reminder, you have chosen to save timeshift file.")
if self.save_timeshift_only_current_event:
remaining = self.currentEventTime()
if remaining > 0:
message += "\n" + _("The %d min remaining before the end of the event.") % abs(remaining / 60)
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice)
else:
returnFunction(True)
def checkTimeshiftRunningCallback(self, returnFunction, answer):
if answer:
if "movie" in answer:
self.save_timeshift_in_movie_dir = True
if "save" in answer:
self.save_timeshift_file = True
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
del ts
if "continue" not in answer:
self.saveTimeshiftFiles()
returnFunction(answer and answer != "continue")
# renames/moves timeshift files if requested
def __serviceEnd(self):
self.saveTimeshiftFiles()
self.setCurrentEventTimer()
self.timeshift_was_activated = False
def saveTimeshiftFiles(self):
if self.save_timeshift_file and self.current_timeshift_filename and self.new_timeshift_filename:
if config.usage.timeshift_path.value and not self.save_timeshift_in_movie_dir:
dirname = config.usage.timeshift_path.value
else:
dirname = defaultMoviePath()
filename = getRecordingFilename(self.new_timeshift_filename, dirname) + ".ts"
fileList = []
fileList.append((self.current_timeshift_filename, filename))
if fileExists(self.current_timeshift_filename + ".sc"):
fileList.append((self.current_timeshift_filename + ".sc", filename + ".sc"))
if fileExists(self.current_timeshift_filename + ".cuts"):
fileList.append((self.current_timeshift_filename + ".cuts", filename + ".cuts"))
moveFiles(fileList)
self.save_timeshift_file = False
self.setCurrentEventTimer()
def currentEventTime(self):
remaining = 0
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(ref, -1, 0)
if event:
now = int(time())
start = event.getBeginTime()
duration = event.getDuration()
end = start + duration
remaining = end - now
return remaining
def saveTimeshiftFileForEvent(self):
if self.timeshiftEnabled() and self.save_timeshift_only_current_event and self.timeshift_was_activated and self.save_timeshift_file:
message = _("Current event is over.\nSelect an option to save the timeshift file.")
choice = [(_("Save and stop timeshift"), "save"), (_("Save and restart timeshift"), "restart"), (_("Don't save and stop timeshift"), "stop"), (_("Do nothing"), "continue")]
self.session.openWithCallback(self.saveTimeshiftFileForEventCallback, MessageBox, message, simple = True, list = choice, timeout=15)
def saveTimeshiftFileForEventCallback(self, answer):
self.save_timeshift_only_current_event = False
if answer:
ts = self.getTimeshift()
if ts and answer in ("save", "restart", "stop"):
self.stopTimeshiftcheckTimeshiftRunningCallback(True)
if answer in ("save", "restart"):
ts.saveTimeshiftFile()
del ts
self.saveTimeshiftFiles()
if answer == "restart":
self.ts_start_delay_timer.start(1000, True)
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
def setCurrentEventTimer(self, duration=0):
self.ts_current_event_timer.stop()
self.save_timeshift_only_current_event = False
if duration > 0:
self.save_timeshift_only_current_event = True
self.ts_current_event_timer.startLongTimer(duration)
from Screens.PiPSetup import PiPSetup
class InfoBarExtensions:
EXTENSION_SINGLE = 0
EXTENSION_LIST = 1
def __init__(self):
self.list = []
self["InstantExtensionsActions"] = HelpableActionMap(self, "InfobarExtensions",
{
"extensions": (self.showExtensionSelection, _("Show extensions...")),
}, 1) # lower priority
def addExtension(self, extension, key = None, type = EXTENSION_SINGLE):
self.list.append((type, extension, key))
def updateExtension(self, extension, key = None):
self.extensionsList.append(extension)
if key is not None:
if self.extensionKeys.has_key(key):
key = None
if key is None:
for x in self.availableKeys:
if not self.extensionKeys.has_key(x):
key = x
break
if key is not None:
self.extensionKeys[key] = len(self.extensionsList) - 1
def updateExtensions(self):
self.extensionsList = []
self.availableKeys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ]
self.extensionKeys = {}
for x in self.list:
if x[0] == self.EXTENSION_SINGLE:
self.updateExtension(x[1], x[2])
else:
for y in x[1]():
self.updateExtension(y[0], y[1])
def showExtensionSelection(self):
self.updateExtensions()
extensionsList = self.extensionsList[:]
keys = []
list = []
for x in self.availableKeys:
if self.extensionKeys.has_key(x):
entry = self.extensionKeys[x]
extension = self.extensionsList[entry]
if extension[2]():
name = str(extension[0]())
list.append((extension[0](), extension))
keys.append(x)
extensionsList.remove(extension)
else:
extensionsList.remove(extension)
list.extend([(x[0](), x) for x in extensionsList])
keys += [""] * len(extensionsList)
self.session.openWithCallback(self.extensionCallback, ChoiceBox, title=_("Please choose an extension..."), list=list, keys=keys, skin_name="ExtensionsList", reorderConfig="extension_order")
def extensionCallback(self, answer):
if answer is not None:
answer[1][1]()
from Tools.BoundFunction import boundFunction
import inspect
# depends on InfoBarExtensions
class InfoBarPlugins:
def __init__(self):
self.addExtension(extension = self.getPluginList, type = InfoBarExtensions.EXTENSION_LIST)
def getPluginName(self, name):
return name
def getPluginList(self):
l = []
for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EXTENSIONSMENU):
args = inspect.getargspec(p.__call__)[0]
if len(args) == 1 or len(args) == 2 and isinstance(self, InfoBarChannelSelection):
l.append(((boundFunction(self.getPluginName, p.name), boundFunction(self.runPlugin, p), lambda: True), None, p.name))
l.sort(key = lambda e: e[2]) # sort by name
return l
def runPlugin(self, plugin):
if isinstance(self, InfoBarChannelSelection):
plugin(session = self.session, servicelist = self.servicelist)
else:
plugin(session = self.session)
from Components.Task import job_manager
class InfoBarJobman:
def __init__(self):
self.addExtension(extension = self.getJobList, type = InfoBarExtensions.EXTENSION_LIST)
def getJobList(self):
return [((boundFunction(self.getJobName, job), boundFunction(self.showJobView, job), lambda: True), None) for job in job_manager.getPendingJobs()]
def getJobName(self, job):
return "%s: %s (%d%%)" % (job.getStatustext(), job.name, int(100*job.progress/float(job.end)))
def showJobView(self, job):
from Screens.TaskView import JobView
job_manager.in_background = False
self.session.openWithCallback(self.JobViewCB, JobView, job)
def JobViewCB(self, in_background):
job_manager.in_background = in_background
# depends on InfoBarExtensions
class InfoBarPiP:
def __init__(self):
try:
self.session.pipshown
except:
self.session.pipshown = False
self.lastPiPService = None
if SystemInfo["PIPAvailable"]:
self["PiPActions"] = HelpableActionMap(self, "InfobarPiPActions",
{
"activatePiP": (self.activePiP, self.activePiPName),
})
if (self.allowPiP):
self.addExtension((self.getShowHideName, self.showPiP, lambda: True), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.addExtension((self.getSwapName, self.swapPiP, self.pipShown), "yellow")
self.addExtension((self.getTogglePipzapName, self.togglePipzap, lambda: True), "red")
else:
self.addExtension((self.getShowHideName, self.showPiP, self.pipShown), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.lastPiPServiceTimeoutTimer = eTimer()
self.lastPiPServiceTimeoutTimer.callback.append(self.clearLastPiPService)
def pipShown(self):
return self.session.pipshown
def pipHandles0Action(self):
return self.pipShown() and config.usage.pip_zero_button.value != "standard"
def getShowHideName(self):
if self.session.pipshown:
return _("Disable Picture in Picture")
else:
return _("Activate Picture in Picture")
def getSwapName(self):
return _("Swap services")
def getMoveName(self):
return _("Picture in Picture Setup")
def getTogglePipzapName(self):
slist = self.servicelist
if slist and slist.dopipzap:
return _("Zap focus to main screen")
return _("Zap focus to Picture in Picture")
def togglePipzap(self):
if not self.session.pipshown:
self.showPiP()
slist = self.servicelist
if slist and self.session.pipshown:
slist.togglePipzap()
if slist.dopipzap:
currentServicePath = slist.getCurrentServicePath()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.servicePath = currentServicePath
def showPiP(self):
self.lastPiPServiceTimeoutTimer.stop()
slist = self.servicelist
if self.session.pipshown:
if slist and slist.dopipzap:
self.togglePipzap()
if self.session.pipshown:
lastPiPServiceTimeout = int(config.usage.pip_last_service_timeout.value)
if lastPiPServiceTimeout >= 0:
self.lastPiPService = self.session.pip.getCurrentServiceReference()
if lastPiPServiceTimeout:
self.lastPiPServiceTimeoutTimer.startLongTimer(lastPiPServiceTimeout)
del self.session.pip
if SystemInfo["LCDMiniTV"]:
if config.lcd.modepip.value >= "1":
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.modeminitv.value)
f.close()
self.session.pipshown = False
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
else:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
newservice = self.lastPiPService or self.session.nav.getCurrentlyPlayingServiceReference() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
if SystemInfo["LCDMiniTV"]:
if config.lcd.modepip.value >= "1":
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.modepip.value)
f.close()
f = open("/proc/stb/vmpeg/1/dst_width", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_height", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_apply", "w")
f.write("1")
f.close()
else:
newservice = self.session.nav.getCurrentlyPlayingServiceReference() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
else:
self.session.pipshown = False
del self.session.pip
if self.session.pipshown and hasattr(self, "screenSaverTimer"):
self.screenSaverTimer.stop()
self.lastPiPService = None
def clearLastPiPService(self):
self.lastPiPService = None
def activePiP(self):
if self.servicelist and self.servicelist.dopipzap or not self.session.pipshown:
self.showPiP()
else:
self.togglePipzap()
def activePiPName(self):
if self.servicelist and self.servicelist.dopipzap:
return _("Disable Picture in Picture")
if self.session.pipshown:
return _("Zap focus to Picture in Picture")
else:
return _("Activate Picture in Picture")
def swapPiP(self):
if self.pipShown():
swapservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
pipref = self.session.pip.getCurrentService()
if swapservice and pipref and pipref.toString() != swapservice.toString():
slist = self.servicelist
if slist:
currentServicePath = slist.getCurrentServicePath()
currentBouquet = slist.getRoot()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.playService(swapservice)
self.session.nav.playService(pipref, checkParentalControl=False, adjust=False)
if slist:
self.session.pip.servicePath = currentServicePath
self.session.pip.servicePath[1] = currentBouquet
if slist and slist.dopipzap:
# This unfortunately won't work with subservices
slist.setCurrentSelection(self.session.pip.getCurrentService())
def movePiP(self):
if self.pipShown():
self.session.open(PiPSetup, pip = self.session.pip)
def pipDoHandle0Action(self):
use = config.usage.pip_zero_button.value
if "swap" == use:
self.swapPiP()
elif "swapstop" == use:
self.swapPiP()
self.showPiP()
elif "stop" == use:
self.showPiP()
from RecordTimer import parseEvent, RecordTimerEntry
class InfoBarInstantRecord:
"""Instant Record - handles the instantRecord action in order to
start/stop instant records"""
def __init__(self):
self["InstantRecordActions"] = HelpableActionMap(self, "InfobarInstantRecord",
{
"instantRecord": (self.instantRecord, _("Instant recording...")),
})
self.SelectedInstantServiceRef = None
if isStandardInfoBar(self):
self.recording = []
else:
from Screens.InfoBar import InfoBar
InfoBarInstance = InfoBar.instance
if InfoBarInstance:
self.recording = InfoBarInstance.recording
def moveToTrash(self, entry):
print "instantRecord stop and delete recording: ", entry.name
import Tools.Trashcan
trash = Tools.Trashcan.createTrashFolder(entry.Filename)
from MovieSelection import moveServiceFiles
moveServiceFiles(entry.Filename, trash, entry.name, allowCopy=False)
def stopCurrentRecording(self, entry = -1):
def confirm(answer=False):
if answer:
self.session.nav.RecordTimer.removeEntry(self.recording[entry])
if self.deleteRecording:
self.moveToTrash(self.recording[entry])
self.recording.remove(self.recording[entry])
if entry is not None and entry != -1:
msg = _("Stop recording:")
if self.deleteRecording:
msg = _("Stop and delete recording:")
msg += "\n"
msg += " - " + self.recording[entry].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def stopAllCurrentRecordings(self, list):
def confirm(answer=False):
if answer:
for entry in list:
self.session.nav.RecordTimer.removeEntry(entry[0])
self.recording.remove(entry[0])
if self.deleteRecording:
self.moveToTrash(entry[0])
msg = _("Stop recordings:")
if self.deleteRecording:
msg = _("Stop and delete recordings:")
msg += "\n"
for entry in list:
msg += " - " + entry[0].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def getProgramInfoAndEvent(self, info, name):
info["serviceref"] = hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef or self.session.nav.getCurrentlyPlayingServiceOrGroup()
# try to get event info
event = None
try:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(info["serviceref"], -1, 0)
if event is None:
if hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef:
service_info = eServiceCenter.getInstance().info(self.SelectedInstantServiceRef)
event = service_info and service_info.getEvent(self.SelectedInstantServiceRef)
else:
service = self.session.nav.getCurrentService()
event = service and service.info().getEvent(0)
except:
pass
info["event"] = event
info["name"] = name
info["description"] = ""
info["eventid"] = None
if event is not None:
curEvent = parseEvent(event)
info["name"] = curEvent[2]
info["description"] = curEvent[3]
info["eventid"] = curEvent[4]
info["end"] = curEvent[1]
def startInstantRecording(self, limitEvent = False):
begin = int(time())
end = begin + 3600 # dummy
name = "instant record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
event = info["event"]
if event is not None:
if limitEvent:
end = info["end"]
else:
if limitEvent:
self.session.open(MessageBox, _("No event info found, recording indefinitely."), MessageBox.TYPE_INFO)
if isinstance(serviceref, eServiceReference):
serviceref = ServiceReference(serviceref)
recording = RecordTimerEntry(serviceref, begin, end, info["name"], info["description"], info["eventid"], dirname = preferredInstantRecordPath())
recording.dontSave = True
if event is None or limitEvent == False:
recording.autoincrease = True
recording.setAutoincreaseEnd()
simulTimerList = self.session.nav.RecordTimer.record(recording)
if simulTimerList is None: # no conflict
recording.autoincrease = False
self.recording.append(recording)
else:
if len(simulTimerList) > 1: # with other recording
name = simulTimerList[1].name
name_date = ' '.join((name, strftime('%F %T', localtime(simulTimerList[1].begin))))
print "[TIMER] conflicts with", name_date
recording.autoincrease = True # start with max available length, then increment
if recording.setAutoincreaseEnd():
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
self.session.open(MessageBox, _("Record time limited due to conflicting timer %s") % name_date, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to conflicting timer %s") % name, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to invalid service %s") % serviceref, MessageBox.TYPE_INFO)
recording.autoincrease = False
def isInstantRecordRunning(self):
print "self.recording:", self.recording
if self.recording:
for x in self.recording:
if x.isRunning():
return True
return False
def recordQuestionCallback(self, answer):
print "pre:\n", self.recording
if answer is None or answer[1] == "no":
return
list = []
recording = self.recording[:]
for x in recording:
if not x in self.session.nav.RecordTimer.timer_list:
self.recording.remove(x)
elif x.dontSave and x.isRunning():
list.append((x, False))
self.deleteRecording = False
if answer[1] == "changeduration":
if len(self.recording) == 1:
self.changeDuration(0)
else:
self.session.openWithCallback(self.changeDuration, TimerSelection, list)
elif answer[1] == "addrecordingtime":
if len(self.recording) == 1:
self.addRecordingTime(0)
else:
self.session.openWithCallback(self.addRecordingTime, TimerSelection, list)
elif answer[1] == "changeendtime":
if len(self.recording) == 1:
self.setEndtime(0)
else:
self.session.openWithCallback(self.setEndtime, TimerSelection, list)
elif answer[1] == "timer":
import TimerEdit
self.session.open(TimerEdit.TimerEditList)
elif answer[1] == "stop":
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopdelete":
self.deleteRecording = True
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopall":
self.stopAllCurrentRecordings(list)
elif answer[1] == "stopdeleteall":
self.deleteRecording = True
self.stopAllCurrentRecordings(list)
elif answer[1] in ( "indefinitely" , "manualduration", "manualendtime", "event"):
self.startInstantRecording(limitEvent = answer[1] in ("event", "manualendtime") or False)
if answer[1] == "manualduration":
self.changeDuration(len(self.recording)-1)
elif answer[1] == "manualendtime":
self.setEndtime(len(self.recording)-1)
elif "timeshift" in answer[1]:
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
self.save_timeshift_file = True
if "movie" in answer[1]:
self.save_timeshift_in_movie_dir = True
if "event" in answer[1]:
remaining = self.currentEventTime()
if remaining > 0:
self.setCurrentEventTimer(remaining-15)
print "after:\n", self.recording
def setEndtime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.endtime=ConfigClock(default = self.recording[self.selectedEntry].end)
dlg = self.session.openWithCallback(self.TimeDateInputClosed, TimeDateInput, self.endtime)
dlg.setTitle(_("Please change recording endtime"))
def TimeDateInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
print "stopping recording at", strftime("%F %T", localtime(ret[1]))
if self.recording[self.selectedEntry].end != ret[1]:
self.recording[self.selectedEntry].autoincrease = False
self.recording[self.selectedEntry].end = ret[1]
self.session.nav.RecordTimer.timeChanged(self.recording[self.selectedEntry])
def changeDuration(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputCallback, InputBox, title=_("How many minutes do you want to record?"), text="5", maxSize=False, type=Input.NUMBER)
def addRecordingTime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputAddRecordingTime, InputBox, title=_("How many minutes do you want add to record?"), text="5", maxSize=False, type=Input.NUMBER)
def inputAddRecordingTime(self, value):
if value:
print "added", int(value), "minutes for recording."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end += 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def inputCallback(self, value):
if value:
print "stopping recording after", int(value), "minutes."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end = int(time()) + 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def isTimerRecordRunning(self):
identical = timers = 0
for timer in self.session.nav.RecordTimer.timer_list:
if timer.isRunning() and not timer.justplay:
timers += 1
if self.recording:
for x in self.recording:
if x.isRunning() and x == timer:
identical += 1
return timers > identical
def instantRecord(self, serviceRef=None):
self.SelectedInstantServiceRef = serviceRef
pirr = preferredInstantRecordPath()
if not findSafeRecordPath(pirr) and not findSafeRecordPath(defaultMoviePath()):
if not pirr:
pirr = ""
self.session.open(MessageBox, _("Missing ") + "\n" + pirr +
"\n" + _("No HDD found or HDD not initialized!"), MessageBox.TYPE_ERROR)
return
if isStandardInfoBar(self):
common = ((_("Add recording (stop after current event)"), "event"),
(_("Add recording (indefinitely)"), "indefinitely"),
(_("Add recording (enter recording duration)"), "manualduration"),
(_("Add recording (enter recording endtime)"), "manualendtime"),)
else:
common = ()
if self.isInstantRecordRunning():
title =_("A recording is currently running.\nWhat do you want to do?")
list = common + \
((_("Change recording (duration)"), "changeduration"),
(_("Change recording (add time)"), "addrecordingtime"),
(_("Change recording (endtime)"), "changeendtime"),)
list += ((_("Stop recording"), "stop"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete recording"), "stopdelete"),)
if len(self.recording) > 1:
list += ((_("Stop all current recordings"), "stopall"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete all current recordings"), "stopdeleteall"),)
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
list += ((_("Do nothing"), "no"),)
else:
title=_("Start recording?")
list = common
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
if isStandardInfoBar(self):
list += ((_("Do not record"), "no"),)
if isStandardInfoBar(self) and self.timeshiftEnabled():
list = list + ((_("Save timeshift file"), "timeshift"),
(_("Save timeshift file in movie directory"), "timeshift_movie"))
if self.currentEventTime() > 0:
list += ((_("Save timeshift only for current event"), "timeshift_event"),)
if list:
self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox, title=title, list=list)
else:
return 0
from Tools.ISO639 import LanguageCodes
class InfoBarAudioSelection:
def __init__(self):
self["AudioSelectionAction"] = HelpableActionMap(self, "InfobarAudioSelectionActions",
{
"audioSelection": (self.audioSelection, _("Audio options...")),
})
def audioSelection(self):
from Screens.AudioSelection import AudioSelection
self.session.openWithCallback(self.audioSelected, AudioSelection, infobar=self)
def audioSelected(self, ret=None):
print "[infobar::audioSelected]", ret
class InfoBarSubserviceSelection:
def __init__(self):
self["SubserviceSelectionAction"] = HelpableActionMap(self, "InfobarSubserviceSelectionActions",
{
"subserviceSelection": (self.subserviceSelection, _("Subservice list...")),
})
self["SubserviceQuickzapAction"] = HelpableActionMap(self, "InfobarSubserviceQuickzapActions",
{
"nextSubservice": (self.nextSubservice, _("Switch to next sub service")),
"prevSubservice": (self.prevSubservice, _("Switch to previous sub service"))
}, -1)
self["SubserviceQuickzapAction"].setEnabled(False)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.checkSubservicesAvail
})
self.onClose.append(self.__removeNotifications)
self.bsel = None
def __removeNotifications(self):
self.session.nav.event.remove(self.checkSubservicesAvail)
def checkSubservicesAvail(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
if not subservices or subservices.getNumberOfSubservices() == 0:
self["SubserviceQuickzapAction"].setEnabled(False)
def nextSubservice(self):
self.changeSubservice(+1)
def prevSubservice(self):
self.changeSubservice(-1)
def changeSubservice(self, direction):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
n = subservices and subservices.getNumberOfSubservices()
if n and n > 0:
selection = -1
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
idx = 0
while idx < n:
if subservices.getSubservice(idx).toString() == ref.toString():
selection = idx
break
idx += 1
if selection != -1:
selection += direction
if selection >= n:
selection=0
elif selection < 0:
selection=n-1
newservice = subservices.getSubservice(selection)
if newservice.valid():
del subservices
del service
self.session.nav.playService(newservice, False)
def subserviceSelection(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
self.bouquets = self.servicelist.getBouquetList()
n = subservices and subservices.getNumberOfSubservices()
selection = 0
if n and n > 0:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
tlist = []
idx = 0
while idx < n:
i = subservices.getSubservice(idx)
if i.toString() == ref.toString():
selection = idx
tlist.append((i.getName(), i))
idx += 1
if self.bouquets and len(self.bouquets):
keys = ["red", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
if config.usage.multibouquet.value:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to bouquet"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to favourites"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
selection += 3
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), ("--", "")] + tlist
keys = ["red", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
selection += 2
self.session.openWithCallback(self.subserviceSelected, ChoiceBox, title=_("Please select a sub service..."), list = tlist, selection = selection, keys = keys, skin_name = "SubserviceSelection")
def subserviceSelected(self, service):
del self.bouquets
if not service is None:
if isinstance(service[1], str):
if service[1] == "quickzap":
from Screens.SubservicesQuickzap import SubservicesQuickzap
self.session.open(SubservicesQuickzap, service[2])
else:
self["SubserviceQuickzapAction"].setEnabled(True)
self.session.nav.playService(service[1], False)
def addSubserviceToBouquetCallback(self, service):
if len(service) > 1 and isinstance(service[1], eServiceReference):
self.selectedSubservice = service
if self.bouquets is None:
cnt = 0
else:
cnt = len(self.bouquets)
if cnt > 1: # show bouquet list
self.bsel = self.session.openWithCallback(self.bouquetSelClosed, BouquetSelector, self.bouquets, self.addSubserviceToBouquet)
elif cnt == 1: # add to only one existing bouquet
self.addSubserviceToBouquet(self.bouquets[0][1])
self.session.open(MessageBox, _("Service has been added to the favourites."), MessageBox.TYPE_INFO)
def bouquetSelClosed(self, confirmed):
self.bsel = None
del self.selectedSubservice
if confirmed:
self.session.open(MessageBox, _("Service has been added to the selected bouquet."), MessageBox.TYPE_INFO)
def addSubserviceToBouquet(self, dest):
self.servicelist.addServiceToBouquet(dest, self.selectedSubservice[1])
if self.bsel:
self.bsel.close(True)
else:
del self.selectedSubservice
class InfoBarRedButton:
def __init__(self):
self["RedButtonActions"] = HelpableActionMap(self, "InfobarRedButtonActions",
{
"activateRedButton": (self.activateRedButton, _("Red button...")),
})
self.onHBBTVActivation = [ ]
self.onRedButtonActivation = [ ]
def activateRedButton(self):
service = self.session.nav.getCurrentService()
info = service and service.info()
if info and info.getInfoString(iServiceInformation.sHBBTVUrl) != "":
for x in self.onHBBTVActivation:
x()
elif False: # TODO: other red button services
for x in self.onRedButtonActivation:
x()
class InfoBarTimerButton:
def __init__(self):
self["TimerButtonActions"] = HelpableActionMap(self, "InfobarTimerButtonActions",
{
"timerSelection": (self.timerSelection, _("Timer selection...")),
})
def timerSelection(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
class InfoBarVmodeButton:
def __init__(self):
self["VmodeButtonActions"] = HelpableActionMap(self, "InfobarVmodeButtonActions",
{
"vmodeSelection": (self.vmodeSelection, _("Letterbox zoom")),
})
def vmodeSelection(self):
self.session.open(VideoMode)
class VideoMode(Screen):
def __init__(self,session):
Screen.__init__(self, session)
self["videomode"] = Label()
self["actions"] = NumberActionMap( [ "InfobarVmodeButtonActions" ],
{
"vmodeSelection": self.selectVMode
})
self.Timer = eTimer()
self.Timer.callback.append(self.quit)
self.selectVMode()
def selectVMode(self):
policy = config.av.policy_43
if self.isWideScreen():
policy = config.av.policy_169
idx = policy.choices.index(policy.value)
idx = (idx + 1) % len(policy.choices)
policy.value = policy.choices[idx]
self["videomode"].setText(policy.value)
self.Timer.start(1000, True)
def isWideScreen(self):
from Components.Converter.ServiceInfo import WIDESCREEN
service = self.session.nav.getCurrentService()
info = service and service.info()
return info.getInfo(iServiceInformation.sAspect) in WIDESCREEN
def quit(self):
self.Timer.stop()
self.close()
class InfoBarAdditionalInfo:
def __init__(self):
self["RecordingPossible"] = Boolean(fixed=harddiskmanager.HDDCount() > 0)
self["TimeshiftPossible"] = self["RecordingPossible"]
self["ExtensionsAvailable"] = Boolean(fixed=1)
# TODO: these properties should be queried from the input device keymap
self["ShowTimeshiftOnYellow"] = Boolean(fixed=0)
self["ShowAudioOnYellow"] = Boolean(fixed=0)
self["ShowRecordOnRed"] = Boolean(fixed=0)
class InfoBarNotifications:
def __init__(self):
self.onExecBegin.append(self.checkNotifications)
Notifications.notificationAdded.append(self.checkNotificationsIfExecing)
self.onClose.append(self.__removeNotification)
def __removeNotification(self):
Notifications.notificationAdded.remove(self.checkNotificationsIfExecing)
def checkNotificationsIfExecing(self):
if self.execing:
self.checkNotifications()
def checkNotifications(self):
notifications = Notifications.notifications
if notifications:
n = notifications[0]
del notifications[0]
cb = n[0]
if n[3].has_key("onSessionOpenCallback"):
n[3]["onSessionOpenCallback"]()
del n[3]["onSessionOpenCallback"]
if cb:
dlg = self.session.openWithCallback(cb, n[1], *n[2], **n[3])
elif not Notifications.current_notifications and n[4] == "ZapError":
if n[3].has_key("timeout"):
del n[3]["timeout"]
n[3]["enable_input"] = False
dlg = self.session.instantiateDialog(n[1], *n[2], **n[3])
self.hide()
dlg.show()
self.notificationDialog = dlg
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressNotification)
else:
dlg = self.session.open(n[1], *n[2], **n[3])
# remember that this notification is currently active
d = (n[4], dlg)
Notifications.current_notifications.append(d)
dlg.onClose.append(boundFunction(self.__notificationClosed, d))
def closeNotificationInstantiateDialog(self):
if hasattr(self, "notificationDialog"):
self.session.deleteDialog(self.notificationDialog)
del self.notificationDialog
eActionMap.getInstance().unbindAction('', self.keypressNotification)
def keypressNotification(self, key, flag):
if flag:
self.closeNotificationInstantiateDialog()
def __notificationClosed(self, d):
Notifications.current_notifications.remove(d)
class InfoBarServiceNotifications:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.serviceHasEnded
})
def serviceHasEnded(self):
print "service end!"
try:
self.setSeekState(self.SEEK_STATE_PLAY)
except:
pass
class InfoBarCueSheetSupport:
CUT_TYPE_IN = 0
CUT_TYPE_OUT = 1
CUT_TYPE_MARK = 2
CUT_TYPE_LAST = 3
ENABLE_RESUME_SUPPORT = False
def __init__(self, actionmap = "InfobarCueSheetActions"):
self["CueSheetActions"] = HelpableActionMap(self, actionmap,
{
"jumpPreviousMark": (self.jumpPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.jumpNextMark, _("Jump to next marked position")),
"toggleMark": (self.toggleMark, _("Toggle a cut mark at the current position"))
}, prio=1)
self.cut_list = [ ]
self.is_closing = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evCuesheetChanged: self.downloadCuesheet,
})
def __serviceStarted(self):
if self.is_closing:
return
print "new service started! trying to download cuts!"
self.downloadCuesheet()
if self.ENABLE_RESUME_SUPPORT:
for (pts, what) in self.cut_list:
if what == self.CUT_TYPE_LAST:
last = pts
break
else:
last = getResumePoint(self.session)
if last is None:
return
# only resume if at least 10 seconds ahead, or <10 seconds before the end.
seekable = self.__getSeekable()
if seekable is None:
return # Should not happen?
length = seekable.getLength() or (None,0)
print "seekable.getLength() returns:", length
# Hmm, this implies we don't resume if the length is unknown...
if (last > 900000) and (not length[1] or (last < length[1] - 900000)):
self.resume_point = last
l = last / 90000
if "ask" in config.usage.on_movie_start.value or not length[1]:
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Do you want to resume this playback?") + "\n" + (_("Resume position at %s") % ("%d:%02d:%02d" % (l/3600, l%3600/60, l%60))), timeout=10, default="yes" in config.usage.on_movie_start.value)
elif config.usage.on_movie_start.value == "resume":
# TRANSLATORS: The string "Resuming playback" flashes for a moment
# TRANSLATORS: at the start of a movie, when the user has selected
# TRANSLATORS: "Resume from last position" as start behavior.
# TRANSLATORS: The purpose is to notify the user that the movie starts
# TRANSLATORS: in the middle somewhere and not from the beginning.
# TRANSLATORS: (Some translators seem to have interpreted it as a
# TRANSLATORS: question or a choice, but it is a statement.)
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Resuming playback"), timeout=2, type=MessageBox.TYPE_INFO)
def playLastCB(self, answer):
if answer == True:
self.doSeek(self.resume_point)
self.hideAfterResume()
def hideAfterResume(self):
if isinstance(self, InfoBarShowHide):
self.hide()
def __getSeekable(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.seek()
def cueGetCurrentPosition(self):
seek = self.__getSeekable()
if seek is None:
return None
r = seek.getPlayPosition()
if r[0]:
return None
return long(r[1])
def cueGetEndCutPosition(self):
ret = False
isin = True
for cp in self.cut_list:
if cp[1] == self.CUT_TYPE_OUT:
if isin:
isin = False
ret = cp[0]
elif cp[1] == self.CUT_TYPE_IN:
isin = True
return ret
def jumpPreviousNextMark(self, cmp, start=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
return False
mark = self.getNearestCutPoint(current_pos, cmp=cmp, start=start)
if mark is not None:
pts = mark[0]
else:
return False
self.doSeek(pts)
return True
def jumpPreviousMark(self):
# we add 5 seconds, so if the play position is <5s after
# the mark, the mark before will be used
self.jumpPreviousNextMark(lambda x: -x-5*90000, start=True)
def jumpNextMark(self):
if not self.jumpPreviousNextMark(lambda x: x-90000):
self.doSeek(-1)
def getNearestCutPoint(self, pts, cmp=abs, start=False):
# can be optimized
beforecut = True
nearest = None
bestdiff = -1
instate = True
if start:
bestdiff = cmp(0 - pts)
if bestdiff >= 0:
nearest = [0, False]
for cp in self.cut_list:
if beforecut and cp[1] in (self.CUT_TYPE_IN, self.CUT_TYPE_OUT):
beforecut = False
if cp[1] == self.CUT_TYPE_IN: # Start is here, disregard previous marks
diff = cmp(cp[0] - pts)
if start and diff >= 0:
nearest = cp
bestdiff = diff
else:
nearest = None
bestdiff = -1
if cp[1] == self.CUT_TYPE_IN:
instate = True
elif cp[1] == self.CUT_TYPE_OUT:
instate = False
elif cp[1] in (self.CUT_TYPE_MARK, self.CUT_TYPE_LAST):
diff = cmp(cp[0] - pts)
if instate and diff >= 0 and (nearest is None or bestdiff > diff):
nearest = cp
bestdiff = diff
return nearest
def toggleMark(self, onlyremove=False, onlyadd=False, tolerance=5*90000, onlyreturn=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
print "not seekable"
return
nearest_cutpoint = self.getNearestCutPoint(current_pos)
if nearest_cutpoint is not None and abs(nearest_cutpoint[0] - current_pos) < tolerance:
if onlyreturn:
return nearest_cutpoint
if not onlyadd:
self.removeMark(nearest_cutpoint)
elif not onlyremove and not onlyreturn:
self.addMark((current_pos, self.CUT_TYPE_MARK))
if onlyreturn:
return None
def addMark(self, point):
insort(self.cut_list, point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def removeMark(self, point):
self.cut_list.remove(point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def showAfterCuesheetOperation(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def __getCuesheet(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.cueSheet()
def uploadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "upload failed, no cuesheet interface"
return
cue.setCutList(self.cut_list)
def downloadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "download failed, no cuesheet interface"
self.cut_list = [ ]
else:
self.cut_list = cue.getCutList()
class InfoBarSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="82,18" font="Regular;16" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="82,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.Event_Now" render="Progress" position="6,46" size="46,18" borderWidth="1" >
<convert type="EventTime">Progress</convert>
</widget>
</screen>"""
# for picon: (path="piconlcd" will use LCD picons)
# <widget source="session.CurrentService" render="Picon" position="6,0" size="120,64" path="piconlcd" >
# <convert type="ServiceName">Reference</convert>
# </widget>
class InfoBarSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarSummary
class InfoBarMoviePlayerSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="64,18" font="Regular;16" halign="right" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="64,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.CurrentService" render="Progress" position="6,46" size="56,18" borderWidth="1" >
<convert type="ServicePosition">Position</convert>
</widget>
</screen>"""
class InfoBarMoviePlayerSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarMoviePlayerSummary
class InfoBarTeletextPlugin:
def __init__(self):
self.teletext_plugin = None
for p in plugins.getPlugins(PluginDescriptor.WHERE_TELETEXT):
self.teletext_plugin = p
if self.teletext_plugin is not None:
self["TeletextActions"] = HelpableActionMap(self, "InfobarTeletextActions",
{
"startTeletext": (self.startTeletext, _("View teletext..."))
})
else:
print "no teletext plugin found!"
def startTeletext(self):
self.teletext_plugin and self.teletext_plugin(session=self.session, service=self.session.nav.getCurrentService())
class InfoBarSubtitleSupport(object):
def __init__(self):
object.__init__(self)
self["SubtitleSelectionAction"] = HelpableActionMap(self, "InfobarSubtitleSelectionActions",
{
"subtitleSelection": (self.subtitleSelection, _("Subtitle selection...")),
})
self.selected_subtitle = None
if isStandardInfoBar(self):
self.subtitle_window = self.session.instantiateDialog(SubtitleDisplay)
else:
from Screens.InfoBar import InfoBar
self.subtitle_window = InfoBar.instance.subtitle_window
self.subtitle_window.hide()
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceChanged,
iPlayableService.evEnd: self.__serviceChanged,
iPlayableService.evUpdatedInfo: self.__updatedInfo
})
def getCurrentServiceSubtitle(self):
service = self.session.nav.getCurrentService()
return service and service.subtitle()
def subtitleSelection(self):
subtitle = self.getCurrentServiceSubtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if self.selected_subtitle or subtitlelist and len(subtitlelist)>0:
from Screens.AudioSelection import SubtitleSelection
self.session.open(SubtitleSelection, self)
else:
return 0
def __serviceChanged(self):
if self.selected_subtitle:
self.selected_subtitle = None
self.subtitle_window.hide()
def __updatedInfo(self):
if not self.selected_subtitle:
subtitle = self.getCurrentServiceSubtitle()
cachedsubtitle = subtitle.getCachedSubtitle()
if cachedsubtitle:
self.enableSubtitle(cachedsubtitle)
def enableSubtitle(self, selectedSubtitle):
subtitle = self.getCurrentServiceSubtitle()
self.selected_subtitle = selectedSubtitle
if subtitle and self.selected_subtitle:
subtitle.enableSubtitles(self.subtitle_window.instance, self.selected_subtitle)
self.subtitle_window.show()
else:
if subtitle:
subtitle.disableSubtitles(self.subtitle_window.instance)
self.subtitle_window.hide()
def restartSubtitle(self):
if self.selected_subtitle:
self.enableSubtitle(self.selected_subtitle)
class InfoBarServiceErrorPopupSupport:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evTuneFailed: self.__tuneFailed,
iPlayableService.evTunedIn: self.__serviceStarted,
iPlayableService.evStart: self.__serviceStarted
})
self.__serviceStarted()
def __serviceStarted(self):
self.closeNotificationInstantiateDialog()
self.last_error = None
Notifications.RemovePopup(id = "ZapError")
def __tuneFailed(self):
if not config.usage.hide_zap_errors.value or not config.usage.remote_fallback_enabled.value:
service = self.session.nav.getCurrentService()
info = service and service.info()
error = info and info.getInfo(iServiceInformation.sDVBState)
if not config.usage.remote_fallback_enabled.value and (error == eDVBServicePMTHandler.eventMisconfiguration or error == eDVBServicePMTHandler.eventNoResources):
self.session.nav.currentlyPlayingServiceReference = None
self.session.nav.currentlyPlayingServiceOrGroup = None
if error == self.last_error:
error = None
else:
self.last_error = error
error = {
eDVBServicePMTHandler.eventNoResources: _("No free tuner!"),
eDVBServicePMTHandler.eventTuneFailed: _("Tune failed!"),
eDVBServicePMTHandler.eventNoPAT: _("No data on transponder!\n(Timeout reading PAT)"),
eDVBServicePMTHandler.eventNoPATEntry: _("Service not found!\n(SID not found in PAT)"),
eDVBServicePMTHandler.eventNoPMT: _("Service invalid!\n(Timeout reading PMT)"),
eDVBServicePMTHandler.eventNewProgramInfo: None,
eDVBServicePMTHandler.eventTuned: None,
eDVBServicePMTHandler.eventSOF: None,
eDVBServicePMTHandler.eventEOF: None,
eDVBServicePMTHandler.eventMisconfiguration: _("Service unavailable!\nCheck tuner configuration!"),
}.get(error) #this returns None when the key not exist in the dict
if error and not config.usage.hide_zap_errors.value:
self.closeNotificationInstantiateDialog()
if hasattr(self, "dishDialog") and not self.dishDialog.dishState():
Notifications.AddPopup(text = error, type = MessageBox.TYPE_ERROR, timeout = 5, id = "ZapError")
class InfoBarPowersaver:
def __init__(self):
self.inactivityTimer = eTimer()
self.inactivityTimer.callback.append(self.inactivityTimeout)
self.restartInactiveTimer()
self.sleepTimer = eTimer()
self.sleepStartTime = 0
self.sleepTimer.callback.append(self.sleepTimerTimeout)
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypress)
def keypress(self, key, flag):
if flag:
self.restartInactiveTimer()
def restartInactiveTimer(self):
time = abs(int(config.usage.inactivity_timer.value))
if time:
self.inactivityTimer.startLongTimer(time)
else:
self.inactivityTimer.stop()
def inactivityTimeout(self):
if config.usage.inactivity_timer_blocktime.value:
curtime = localtime(time())
if curtime.tm_year > 1970: #check if the current time is valid
curtime = (curtime.tm_hour, curtime.tm_min, curtime.tm_sec)
begintime = tuple(config.usage.inactivity_timer_blocktime_begin.value)
endtime = tuple(config.usage.inactivity_timer_blocktime_end.value)
begintime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_begin.value)
endtime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_end.value)
if begintime <= endtime and (curtime >= begintime and curtime < endtime) or begintime > endtime and (curtime >= begintime or curtime < endtime) or config.usage.inactivity_timer_blocktime_extra.value and\
(begintime_extra <= endtime_extra and (curtime >= begintime_extra and curtime < endtime_extra) or begintime_extra > endtime_extra and (curtime >= begintime_extra or curtime < endtime_extra)):
duration = (endtime[0]*3600 + endtime[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2])
if duration:
if duration < 0:
duration += 24*3600
self.inactivityTimer.startLongTimer(duration)
return
if Screens.Standby.inStandby:
self.inactivityTimeoutCallback(True)
else:
message = _("Your receiver will got to standby due to inactivity.") + "\n" + _("Do you want this?")
self.session.openWithCallback(self.inactivityTimeoutCallback, MessageBox, message, timeout=60, simple=True, default=False, timeout_default=True)
def inactivityTimeoutCallback(self, answer):
if answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
def sleepTimerState(self):
if self.sleepTimer.isActive():
return (self.sleepStartTime - time()) / 60
return 0
def setSleepTimer(self, sleepTime):
print "[InfoBarPowersaver] set sleeptimer", sleepTime
if sleepTime:
m = abs(sleepTime / 60)
message = _("The sleep timer has been activated.") + "\n" + _("And will put your receiver in standby over ") + ngettext("%d minute", "%d minutes", m) % m
self.sleepTimer.startLongTimer(sleepTime)
self.sleepStartTime = time() + sleepTime
else:
message = _("The sleep timer has been disabled.")
self.sleepTimer.stop()
Notifications.AddPopup(message, type = MessageBox.TYPE_INFO, timeout = 5)
def sleepTimerTimeout(self):
if not Screens.Standby.inStandby:
list = [ (_("Yes"), True), (_("Extend sleeptimer 15 minutes"), "extend"), (_("No"), False) ]
message = _("Your receiver will got to stand by due to the sleeptimer.")
message += "\n" + _("Do you want this?")
self.session.openWithCallback(self.sleepTimerTimeoutCallback, MessageBox, message, timeout=60, simple=True, list=list, default=False, timeout_default=True)
def sleepTimerTimeoutCallback(self, answer):
if answer == "extend":
print "[InfoBarPowersaver] extend sleeptimer"
self.setSleepTimer(900)
elif answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
self.setSleepTimer(0)
def goStandby(self):
if not Screens.Standby.inStandby:
print "[InfoBarPowersaver] goto standby"
self.session.open(Screens.Standby.Standby)
class InfoBarHDMI:
def HDMIIn(self):
slist = self.servicelist
if slist.dopipzap:
curref = self.session.pip.getCurrentService()
if curref and curref.type != 8192:
self.session.pip.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
else:
self.session.pip.playService(slist.servicelist.getCurrent())
else:
curref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if curref and curref.type != 8192:
if curref and curref.type != -1 and os.path.splitext(curref.toString().split(":")[10])[1].lower() in AUDIO_EXTENSIONS.union(MOVIE_EXTENSIONS, DVD_EXTENSIONS):
setResumePoint(self.session)
self.session.nav.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
elif isStandardInfoBar(self):
self.session.nav.playService(slist.servicelist.getCurrent())
else:
self.session.nav.playService(self.cur_service)
| ssh1/stbgui | lib/python/Screens/InfoBarGenerics.py | Python | gpl-2.0 | 119,400 |
from django.contrib import admin
from .models import EssNutr, FoodCategory
# Register your models here.
admin.site.register(EssNutr)
admin.site.register(FoodCategory) | GalinaDimitrova/Essential-nutrients-in-Bulgarian-foods | ess_nutr_site/essential_nutritions/admin.py | Python | gpl-2.0 | 166 |
import models
def borradoPlan(sender, **kwargs):
borrada = kwargs['instance']
if borrada.tipo == "pr":
aBorrar = models.PlanPrepago.objects.filter(codplan=borrada)
else:
aBorrar = models.PlanPostpago.objects.filter(codplan=borrada)
aBorrar.delete()
def insertadoServicio(sender, **kwargs):
insertado = kwargs['instance']
print insertado
if insertado.id is not None:
nuevoPaq = models.Paquete(codpaq=insertado.codserv,nombrepaq=insertado.nombreserv + ' Paquete',precio=insertado.costo)
nuevoPaq.save()
nuevoContiene = models.Contiene(codpaq=nuevoPaq,codserv=insertado,cantidad=1)
nuevoContiene.save()
def borradoServicio(sender, **kwargs):
borrado = kwargs['instance']
if borrado.id is not None:
contieneBorrar = models.Contiene.objects.all().filter(codpaq=borrado.codserv)
contieneBorrar.delete()
paqBorrar = models.Paquete.objects.all().filter(codpaq=borrado.codserv)
paqBorrar.delete()
| gres147679/IngSoftwareRectaFinal | Tarea5/ServiSoft/ServiSoft/WebAccess/signalActions.py | Python | gpl-2.0 | 948 |
# ===========================================================================
# eXe
# Copyright 2004-2005, University of Auckland
# Copyright 2004-2007 eXe Project, New Zealand Tertiary Education Commission
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
This class transforms an eXe node into a page on a self-contained website
"""
import logging
import re
from cgi import escape
from urllib import quote
from exe.webui.blockfactory import g_blockFactory
from exe.engine.error import Error
from exe.engine.path import Path
from exe.export.pages import Page, uniquifyNames
from exe.webui import common
from exe import globals as G
log = logging.getLogger(__name__)
# ===========================================================================
class WebsitePage(Page):
"""
This class transforms an eXe node into a page on a self-contained website
"""
def save(self, outputDir, prevPage, nextPage, pages):
"""
This is the main function. It will render the page and save it to a
file. 'outputDir' is the directory where the filenames will be saved
(a 'path' instance)
"""
outfile = open(outputDir / self.name+".html", "wb")
outfile.write(self.render(prevPage, nextPage, pages))
outfile.close()
def render(self, prevPage, nextPage, pages):
"""
Returns an XHTML string rendering this page.
"""
html = u"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
html += u'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 '
html += u'Transitional//EN" '
html += u'"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n'
#html += u"<html xmlns=\"http://www.w3.org/1999/xhtml\">\n"
lenguaje = G.application.config.locale
html += u"<html lang=\"" + lenguaje + "\" xml:lang=\"" + lenguaje + "\" xmlns=\"http://www.w3.org/1999/xhtml\">\n"
html += u"<!-- Created using eXe: http://exelearning.org -->\n"
html += u"<head>\n"
html += u"<link rel=\"stylesheet\" type=\"text/css\" href=\"base.css\" />"
if common.hasGalleryIdevice(self.node):
html += u"<link rel=\"stylesheet\" type=\"text/css\" href=\"exe_lightbox.css\" />"
html += u"<link rel=\"stylesheet\" type=\"text/css\" href=\"content.css\" />"
html += u"<link rel=\"stylesheet\" type=\"text/css\" href=\"nav.css\" />"
html += u"<title>"
if self.node.id=='0':
if self.node.package.title!='':
html += escape(self.node.package.title)
else:
html += escape(self.node.titleLong)
else:
if self.node.package.title!='':
html += escape(self.node.titleLong)+" | "+escape(self.node.package.title)
else:
html += escape(self.node.titleLong)
html += u" </title>\n"
html += u"<link rel=\"shortcut icon\" href=\"favicon.ico\" type=\"image/x-icon\" />\n"
html += u"<meta http-equiv=\"Content-Type\" content=\"text/html; "
html += u" charset=utf-8\" />\n";
html += u'<script type="text/javascript" src="common.js"></script>\n'
#modification by lernmodule.net
html += u'<script type="text/javascript" src="lernmodule_net.js"></script>\n'
#end modification
html += u"</head>\n"
html += u"<body>\n"
html += u"<div id=\"content\">\n"
if self.node.package.backgroundImg or self.node.package.title:
html += u"<div id=\"header\" "
if self.node.package.backgroundImg:
html += u" style=\"background-image: url("
html += quote(self.node.package.backgroundImg.basename())
html += u"); "
if self.node.package.backgroundImgTile:
html += "background-repeat: repeat-x;"
else:
html += "background-repeat: no-repeat;"
html += u"\""
html += u">\n"
html += escape(self.node.package.title)
html += u"</div>\n"
else:
html += "<div id=\"emptyHeader\"></div>"
# add left navigation html
html += u"<div id=\"siteNav\">\n"
html += self.leftNavigationBar(pages)
html += u"</div>\n"
html += "<div id='topPagination'>"
html += self.getNavigationLink(prevPage, nextPage)
html += "</div>"
html += u"<div id=\"main\">\n"
style = self.node.package.style
html += '<div id=\"nodeDecoration\">'
html += '<h1 id=\"nodeTitle\">'
html += escape(self.node.titleLong)
html += '</h1></div>\n'
for idevice in self.node.idevices:
html += u'<div class="%s" id="id%s">\n' % (idevice.klass,
idevice.id)
block = g_blockFactory.createBlock(None, idevice)
if not block:
log.critical("Unable to render iDevice.")
raise Error("Unable to render iDevice.")
if hasattr(idevice, "isQuiz"):
html += block.renderJavascriptForWeb()
if idevice.title != "Forum Discussion":
html += self.processInternalLinks(self.node.package,
block.renderView(style))
html += u'</div>\n' # iDevice div
html += "<div id='bottomPagination'>"
#modification by lernmodule.net
html += u"<div id=\"lmsubmit\"></div><script type=\"text/javascript\" language=\"javascript\">doStart();</script></body></html>\n"
#end modification
html += self.getNavigationLink(prevPage, nextPage)
html += "</div>"
# writes the footer for each page
html += self.renderLicense()
html += self.renderFooter()
html += u"</div>\n"
html += u"</div>\n"
html += u"</body></html>\n"
html = html.encode('utf8')
# JR: Eliminamos los atributos de las ecuaciones
aux = re.compile("exe_math_latex=\"[^\"]*\"")
html = aux.sub("", html)
aux = re.compile("exe_math_size=\"[^\"]*\"")
html = aux.sub("", html)
#JR: Cambio el & en los enlaces del glosario
html = html.replace("&concept", "&concept")
# Remove "resources/" from data="resources/ and the url param
html = html.replace("video/quicktime\" data=\"resources/", "video/quicktime\" data=\"")
html = html.replace("application/x-mplayer2\" data=\"resources/", "application/x-mplayer2\" data=\"")
html = html.replace("audio/x-pn-realaudio-plugin\" data=\"resources/", "audio/x-pn-realaudio-plugin\" data=\"")
html = html.replace("<param name=\"url\" value=\"resources/", "<param name=\"url\" value=\"")
return html
def leftNavigationBar(self, pages, inSameLevelTitle = True, excludeTitle = False):
"""
Generate the left navigation string for this page
"""
if inSameLevelTitle:
depth = 1
else:
depth = 0
nodePath = [None] + list(self.node.ancestors()) + [self.node]
html = "<ul>\n"
for page in pages:
if page.node.parent == None and not inSameLevelTitle:
page.depth = 0
if page.node.parent == None and excludeTitle:
depth = 1
continue
while depth < page.depth:
html += "<ul"
if page.node.parent not in nodePath:
html += " class=\"other-section\""
html += ">\n"
depth += 1
while depth > page.depth:
html += "</ul>\n</li>\n"
depth -= 1
if page.node == self.node:
html += "<li id=\"active\"><a href=\""+quote(page.name)+".html\" "
if page.node.children:
html += "class=\"active daddy"
else:
html += "class=\"active no-ch"
elif page.node in nodePath and page.node.parent != None:
html += "<li class=\"current-page-parent\"><a href=\""+quote(page.name)+".html\" "
if page.node.children:
html += "class=\"current-page-parent daddy"
else:
html += "<li><a href=\""+quote(page.name)+".html\" class=\""
if page.node.children:
html += "daddy"
else:
html += "no-ch"
if page.node.id=="0":
html += " main-node"
html += "\">"
html += escape(page.node.titleShort)
html += "</a>"
if inSameLevelTitle and page.node.id=="0":
html += "</li>"
if not page.node.children and page.node.id!="0":
html += "</li>\n"
if excludeTitle or inSameLevelTitle:
html += "</ul>\n"
else:
html += "</ul></li></ul>\n"
return html
def getNavigationLink(self, prevPage, nextPage):
"""
return the next link url of this page
"""
html = "<div class=\"pagination noprt\">"
if prevPage:
html += "<a href=\""+quote(prevPage.name)+".html\" class=\"prev\">"
html += "<span>« </span>%s</a>" % _('Previous')
if nextPage:
if prevPage:
html += " | "
html += "<a href=\""+quote(nextPage.name)+".html\" class=\"next\">"
html += " %s<span> »</span></a>" % _('Next')
html += "</div>\n"
return html
def processInternalLinks(self, package, html):
"""
take care of any internal links which are in the form of:
href="exe-node:Home:Topic:etc#Anchor"
For this WebSite Export, go ahead and process the link entirely,
using the fully exported (and unique) file names for each node.
"""
return common.renderInternalLinkNodeFilenames(package, html)
| kohnle-lernmodule/palama | exe/export/websitepage.py | Python | gpl-2.0 | 10,861 |
from flask_wtf import Form
from flask_babel import gettext
from wtforms import StringField, validators
class ApplicationForm(Form):
name = StringField(gettext('Application name'), [
validators.DataRequired(message=gettext("Application name field is empty.")),
validators.Length(min=3, message=gettext("Application name needs to be at least 3 characters long.")),
validators.Length(max=64, message=gettext("Application name needs to be at most 64 characters long."))])
desc = StringField(gettext('Description'), [
validators.DataRequired(message=gettext("Client description field is empty.")),
validators.Length(min=3, message=gettext("Client description needs to be at least 3 characters long.")),
validators.Length(max=512, message=gettext("Client description needs to be at most 512 characters long."))])
website = StringField(gettext('Homepage'), [
validators.DataRequired(message=gettext("Homepage field is empty.")),
validators.URL(message=gettext("Homepage is not a valid URI."))])
redirect_uri = StringField(gettext('Authorization callback URL'), [
validators.DataRequired(message=gettext("Authorization callback URL field is empty.")),
validators.URL(message=gettext("Authorization callback URL is invalid."))])
| dufferzafar/critiquebrainz | critiquebrainz/frontend/profile/applications/forms.py | Python | gpl-2.0 | 1,318 |
from __future__ import print_function
import sys
sys.path.append('..')
from src.sim import Sim
from src.packet import Packet
from networks.network import Network
class BroadcastApp(object):
def __init__(self, node):
self.node = node
def receive_packet(self, packet):
print(Sim.scheduler.current_time(), self.node.hostname, packet.ident)
def main():
# parameters
Sim.scheduler.reset()
# setup network
net = Network('../networks/five-nodes.txt')
# get nodes
n1 = net.get_node('n1')
n2 = net.get_node('n2')
n3 = net.get_node('n3')
n4 = net.get_node('n4')
n5 = net.get_node('n5')
# setup broadcast application
b1 = BroadcastApp(n1)
n1.add_protocol(protocol="broadcast", handler=b1)
b2 = BroadcastApp(n2)
n2.add_protocol(protocol="broadcast", handler=b2)
b3 = BroadcastApp(n3)
n3.add_protocol(protocol="broadcast", handler=b3)
b4 = BroadcastApp(n4)
n4.add_protocol(protocol="broadcast", handler=b4)
b5 = BroadcastApp(n5)
n5.add_protocol(protocol="broadcast", handler=b5)
# send a broadcast packet from 1 with TTL 2, so everyone should get it
p = Packet(
source_address=n1.get_address('n2'),
destination_address=0,
ident=1, ttl=2, protocol='broadcast', length=100)
Sim.scheduler.add(delay=0, event=p, handler=n1.send_packet)
# send a broadcast packet from 1 with TTL 1, so just nodes 2 and 3
# should get it
p = Packet(
source_address=n1.get_address('n2'),
destination_address=0,
ident=2, ttl=1, protocol='broadcast', length=100)
Sim.scheduler.add(delay=1, event=p, handler=n1.send_packet)
# send a broadcast packet from 3 with TTL 1, so just nodes 1, 4, and 5
# should get it
p = Packet(
source_address=n3.get_address('n1'),
destination_address=0,
ident=3, ttl=1, protocol='broadcast', length=100)
Sim.scheduler.add(delay=2, event=p, handler=n3.send_packet)
# run the simulation
Sim.scheduler.run()
if __name__ == '__main__':
main()
| zappala/bene | examples/broadcast.py | Python | gpl-2.0 | 2,081 |
from .trace_decorator import getLog
from .exception import StateError
class State(object):
def __init__(self):
self._state = []
# can be "unknown", "success" or "fail"
self.result = "unknown"
self.state_log = getLog("mockbuild.Root.state")
def state(self):
if not len(self._state):
raise StateError("state called on empty state stack")
return self._state[-1]
def start(self, state):
if state is None:
raise StateError("start called with None State")
self._state.append(state)
self.state_log.info("Start: %s" % state)
def finish(self, state):
if len(self._state) == 0:
raise StateError("finish called on empty state list")
current = self._state.pop()
if state != current:
raise StateError("state finish mismatch: current: %s, state: %s" % (current, state))
self.state_log.info("Finish: %s" % state)
def alldone(self):
if len(self._state) != 0:
raise StateError("alldone called with pending states: %s" % ",".join(self._state))
| nbartos/mock | py/mockbuild/state.py | Python | gpl-2.0 | 1,124 |
# coding: utf-8
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'LAK_2014.views.index', name='index'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
handler404 = 'LAK_2014.views.view404'
| aitoralmeida/geo-lak | LAK_2014/LAK_2014/urls.py | Python | gpl-2.0 | 657 |
import threading
import time
class Status:
lock = None
statusno =0
def __init__(self):
self.lock = threading.Lock()
def update(self, add):
self.lock.acquire()
self.statusno = self.statusno + add
self.lock.release()
def get(self):
self.lock.acquire()
n = self.statusno
self.lock.release()
return n
def md5calc(status, args):
for i in args:
time.sleep (1)
#print i
status.update(1)
def show_status(status):
while threading.active_count() > 2:
time.sleep(1)
print status.get()
status = Status()
slaves = []
for i in range(5):
t = threading.Thread(target=md5calc, args=(status, [1,2,5]))
t.start()
slaves.append(t)
m = threading.Thread(target=show_status, args=(status,))
m.start()
m.join()
for t in slaves:
t.join()
| RedFoxPi/Playground | threadtest.py | Python | gpl-2.0 | 929 |
from django.views.generic import DetailView
from entries.models import Entry
class EntryView(DetailView):
model = Entry
template_name = "entry.html"
context_object_name = 'entry'
slug_field = 'sname'
slug_url_kwarg = 'entry_sname' | IlyaGusev/PersonalPage | PersonalPage/apps/entries/views.py | Python | gpl-2.0 | 252 |
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.utils.translation import ugettext as _
from apps.mainapp.models import Report, ReportSubscriber
from apps.mainapp.forms import ReportSubscriberForm
def new( request, report_id ):
report = get_object_or_404(Report, id=report_id)
error_msg = None
if request.method == 'POST':
form = ReportSubscriberForm( request.POST )
if form.is_valid():
subscriber = form.save( commit = False )
subscriber.report = report;
if report.is_subscribed(subscriber.email):
error_msg = _("You are already subscribed to this report.")
else:
subscriber.save()
return( HttpResponseRedirect( '/reports/subscribers/create/' ) )
else:
form = ReportSubscriberForm()
return render_to_response("reports/subscribers/new.html",
{ "subscriber_form": form,
"report": report,
"error_msg": error_msg, },
context_instance=RequestContext(request))
def create( request ):
return render_to_response("reports/subscribers/create.html",
{ },
context_instance=RequestContext(request))
def confirm( request, confirm_token ):
subscriber = get_object_or_404(ReportSubscriber, confirm_token = confirm_token )
subscriber.is_confirmed = True
subscriber.save()
return render_to_response("reports/subscribers/confirm.html",
{ "subscriber": subscriber, },
context_instance=RequestContext(request))
def unsubscribe(request, confirm_token ):
subscriber = get_object_or_404(ReportSubscriber, confirm_token = confirm_token )
report = subscriber.report
subscriber.delete()
return render_to_response("reports/subscribers/message.html",
{ "message": _("You have unsubscribed from updates to:") + report.title, },
context_instance=RequestContext(request))
| tigeorgia/fixmystreet | apps/mainapp/views/reports/subscribers.py | Python | gpl-2.0 | 2,154 |
import os
import sys
import drivecasa
import logging
import shlex
import shutil
import subprocess
import yaml
import glob
casa = drivecasa.Casapy(log2term=True, echo_to_stdout=True, timeout=24*3600*10)
CONFIG = os.environ["CONFIG"]
INPUT = os.environ["INPUT"]
OUTPUT = os.environ["OUTPUT"]
MSDIR = os.environ["MSDIR"]
with open(CONFIG, "r") as _std:
cab = yaml.safe_load(_std)
junk = cab["junk"]
args = {}
for param in cab['parameters']:
name = param['name']
value = param['value']
if value is None:
continue
args[name] = value
script = ['{0}(**{1})'.format(cab['binary'], args)]
def log2term(result):
if result[1]:
err = '\n'.join(result[1] if result[1] else [''])
failed = err.lower().find('an error occurred running task') >= 0
if failed:
raise RuntimeError('CASA Task failed. See error message above')
sys.stdout.write('WARNING:: SEVERE messages from CASA run')
try:
result = casa.run_script(script, raise_on_severe=False)
log2term(result)
finally:
for item in junk:
for dest in [OUTPUT, MSDIR]: # these are the only writable volumes in the container
items = glob.glob("{dest}/{item}".format(**locals()))
for f in items:
if os.path.isfile(f):
os.remove(f)
elif os.path.isdir(f):
shutil.rmtree(f)
| SpheMakh/Stimela | stimela/cargo/cab/casa47_setjy/src/run.py | Python | gpl-2.0 | 1,403 |
import operator
import math
class Machine():
"""Simple stack based machine designed for genetic programming (GP) experiments.
Easy to use and forgiving with nonfatal errors.
See README and tests for examples.
"""
def __init__(self, debug=False):
self.stack = []
self.debug = debug
self.code = ""
self.stack_safety = False
self.has_errors = False
self._max_runlines = 1000
self._max_stack = 1000
self.instructions = {
'CLR': self._clr,
'PUSH': self._push, # takes 1 value
'POP': self._pop,
'SWP': self._swp,
'ROT': self._rot,
'DUP': self._dup,
'INC': self._inc,
'MUL': lambda: self._operator2(operator.mul),
'DIV': lambda: self._operator2(operator.div),
'MOD': lambda: self._operator2(operator.mod),
'ADD': lambda: self._operator2(operator.add),
'SUB': lambda: self._operator2(operator.sub),
'EXP': lambda: self._operator2(operator.pow),
'MIN': lambda: self._operator2(min),
'MAX': lambda: self._operator2(max),
'LOG': lambda: self._operator1(math.log),
'TRUNC':lambda: self._operator1(math.trunc),
'JMP': self._jmp, # all jumps take an offset value
'JZ': self._jz,
'JE': self._je,
'JNE': self._jne,
'JLT': self._jlt,
'JGT': self._jgt,
'END': None
}
def _operator1(self, operator):
if self.stack:
self.stack.append(operator(self.stack.pop()))
def _operator2(self, operator):
if len(self.stack) < 2:
self.stack = [0]
else:
val = operator(self.stack[-1], self.stack[-2])
self.stack = self.stack[:-2]
self.stack.append(val)
def _clr(self):
self.stack = []
def _push(self, a):
try:
a = float(a)
self.stack.append(a)
except:
pass
def _pop(self):
if self.stack:
self.stack.pop()
def _inc(self):
if self.stack:
self.stack[-1] += 1
def _swp(self):
if len(self.stack) > 1:
self.stack[-2], self.stack[-1] = self.stack[-1], self.stack[-2]
def _rot(self):
if len(self.stack) > 1:
self.stack = self.stack[1:] + self.stack[:1]
def _dup(self):
if self.stack:
self.stack.append(self.stack[-1])
def _jmp(self, a):
n = self._curline + int(a)
if n == self._curline or n < 0 or n > len(self.lines) - 1:
return
self._curline = n-1
def _jz(self, a):
if self.stack:
if self.stack.pop() == 0:
self._jmp(a)
def _je(self, a):
if len(self.stack) > 1:
if self.stack.pop() == self.stack.pop():
self._jmp(a)
def _jne(self, a):
if len(self.stack) > 1:
if self.stack.pop() != self.stack.pop():
self._jmp(a)
def _jlt(self, a):
if len(self.stack) > 1:
if self.stack.pop() < self.stack.pop():
self._jmp(a)
def _jgt(self, a):
if len(self.stack) > 1:
if self.stack.pop() > self.stack.pop():
self._jmp(a)
def verify_stack(self):
if len(self.stack) > self._max_stack:
return False
allowed_types = [int, float, long]
return all([type(v) in allowed_types for v in self.stack])
def code_listing(self):
self.lines = self.code.split('\n')
for num, line in enumerate(self.lines):
line = line.strip().upper()
print num, '\t', line
def evaluate(self, line):
if line:
debug = self.debug
if debug: print self._curline, '> ', line
tokens = line.split()
instr = tokens[0]
if instr == 'END': return False
if len(tokens) > 1:
values = tokens [1:]
else: values = []
try:
self.instructions[instr](*values)
except Exception as e:
if debug: print "Error:", e
self.has_errors = True
if debug: print self.stack, '\n'
self._curline += 1
return True
def run(self):
# Note: some class members are duplicated with locals for faster comparisons in the main loop
self._curline = 0
self.has_errors = False
self._lines_executed = 0
lines_exec = 0
max_exec = self._max_runlines
lines = [line.split(';')[0].strip().upper() for line in self.code.split('\n')]
self.lines = lines
if self.stack_safety and not self.verify_stack():
if self.debug: print "Invalid stack, must only contain ints, longs, and floats"
return
while(self.evaluate(self.lines[self._curline])):
lines_exec += 1
if lines_exec > max_exec:
if self.debug: print "Reached maximum runlines:", self._max_runlines
self.has_errors = True
break
if self._curline >= len(self.lines):
break
self._lines_executed = lines_exec
return self.has_errors
| sangelone/gstack | stackmachine.py | Python | gpl-2.0 | 5,419 |
__author__ = 'dani882'
# lecture 3.2, slide 6
# Find the cube root of a perfect cube
x = int(raw_input('Enter an integer: '))
ans = 0
while ans**3 < abs(x):
ans = ans + 1
if ans**3 != abs(x):
print(str(x) + ' is not a perfect cube')
else:
if x < 0:
ans = -ans
print('Cube root of ' + str(x) + ' is ' + str(ans)) | dani882/edX---Computer-Science | python/lecture 3.2.py | Python | gpl-2.0 | 336 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# mdfjr - admin.py
#
# Created by PyCharm.
# User: fajar
# Date: 5/13/17
# Time: 3:50 PM
#
#
from django.contrib import admin
from . import models
@admin.register(models.OAuthUser)
class OAuthUserAdmin(admin.ModelAdmin):
list_display = ['__str__', 'provider', 'uid', 'email', 'user']
| heyost/django-skeleton | oauth/admin.py | Python | gpl-2.0 | 345 |
#
# BitBake Graphical GTK based Dependency Explorer
#
# Copyright (C) 2007 Ross Burton
# Copyright (C) 2007 - 2008 Richard Purdie
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Gdk, GObject
from multiprocessing import Queue
import threading
from xmlrpc import client
import time
import bb
import bb.event
# Package Model
(COL_PKG_NAME) = (0)
# Dependency Model
(TYPE_DEP, TYPE_RDEP) = (0, 1)
(COL_DEP_TYPE, COL_DEP_PARENT, COL_DEP_PACKAGE) = (0, 1, 2)
class PackageDepView(Gtk.TreeView):
def __init__(self, model, dep_type, label):
Gtk.TreeView.__init__(self)
self.current = None
self.dep_type = dep_type
self.filter_model = model.filter_new()
self.filter_model.set_visible_func(self._filter, data=None)
self.set_model(self.filter_model)
self.append_column(Gtk.TreeViewColumn(label, Gtk.CellRendererText(), text=COL_DEP_PACKAGE))
def _filter(self, model, iter, data):
this_type = model[iter][COL_DEP_TYPE]
package = model[iter][COL_DEP_PARENT]
if this_type != self.dep_type: return False
return package == self.current
def set_current_package(self, package):
self.current = package
self.filter_model.refilter()
class PackageReverseDepView(Gtk.TreeView):
def __init__(self, model, label):
Gtk.TreeView.__init__(self)
self.current = None
self.filter_model = model.filter_new()
self.filter_model.set_visible_func(self._filter)
self.set_model(self.filter_model)
self.append_column(Gtk.TreeViewColumn(label, Gtk.CellRendererText(), text=COL_DEP_PARENT))
def _filter(self, model, iter, data):
package = model[iter][COL_DEP_PACKAGE]
return package == self.current
def set_current_package(self, package):
self.current = package
self.filter_model.refilter()
class DepExplorer(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self)
self.set_title("Task Dependency Explorer")
self.set_default_size(500, 500)
self.connect("delete-event", Gtk.main_quit)
# Create the data models
self.pkg_model = Gtk.ListStore(GObject.TYPE_STRING)
self.pkg_model.set_sort_column_id(COL_PKG_NAME, Gtk.SortType.ASCENDING)
self.depends_model = Gtk.ListStore(GObject.TYPE_INT, GObject.TYPE_STRING, GObject.TYPE_STRING)
self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, Gtk.SortType.ASCENDING)
pane = Gtk.HPaned()
pane.set_position(250)
self.add(pane)
# The master list of packages
scrolled = Gtk.ScrolledWindow()
scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
scrolled.set_shadow_type(Gtk.ShadowType.IN)
self.pkg_treeview = Gtk.TreeView(self.pkg_model)
self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed)
column = Gtk.TreeViewColumn("Package", Gtk.CellRendererText(), text=COL_PKG_NAME)
self.pkg_treeview.append_column(column)
pane.add1(scrolled)
scrolled.add(self.pkg_treeview)
box = Gtk.VBox(homogeneous=True, spacing=4)
# Task Depends
scrolled = Gtk.ScrolledWindow()
scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
scrolled.set_shadow_type(Gtk.ShadowType.IN)
self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Dependencies")
self.dep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
scrolled.add(self.dep_treeview)
box.add(scrolled)
pane.add2(box)
# Reverse Task Depends
scrolled = Gtk.ScrolledWindow()
scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
scrolled.set_shadow_type(Gtk.ShadowType.IN)
self.revdep_treeview = PackageReverseDepView(self.depends_model, "Dependent Tasks")
self.revdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PARENT)
scrolled.add(self.revdep_treeview)
box.add(scrolled)
pane.add2(box)
self.show_all()
def on_package_activated(self, treeview, path, column, data_col):
model = treeview.get_model()
package = model.get_value(model.get_iter(path), data_col)
pkg_path = []
def finder(model, path, iter, needle):
package = model.get_value(iter, COL_PKG_NAME)
if package == needle:
pkg_path.append(path)
return True
else:
return False
self.pkg_model.foreach(finder, package)
if pkg_path:
self.pkg_treeview.get_selection().select_path(pkg_path[0])
self.pkg_treeview.scroll_to_cell(pkg_path[0])
def on_cursor_changed(self, selection):
(model, it) = selection.get_selected()
if it is None:
current_package = None
else:
current_package = model.get_value(it, COL_PKG_NAME)
self.dep_treeview.set_current_package(current_package)
self.revdep_treeview.set_current_package(current_package)
def parse(self, depgraph):
for task in depgraph["tdepends"]:
self.pkg_model.insert(0, (task,))
for depend in depgraph["tdepends"][task]:
self.depends_model.insert (0, (TYPE_DEP, task, depend))
class gtkthread(threading.Thread):
quit = threading.Event()
def __init__(self, shutdown):
threading.Thread.__init__(self)
self.setDaemon(True)
self.shutdown = shutdown
if not Gtk.init_check()[0]:
sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n")
gtkthread.quit.set()
def run(self):
GObject.threads_init()
Gdk.threads_init()
Gtk.main()
gtkthread.quit.set()
def main(server, eventHandler, params):
shutdown = 0
gtkgui = gtkthread(shutdown)
gtkgui.start()
try:
params.updateFromServer(server)
cmdline = params.parseActions()
if not cmdline:
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
return 1
if 'msg' in cmdline and cmdline['msg']:
print(cmdline['msg'])
return 1
cmdline = cmdline['action']
if not cmdline or cmdline[0] != "generateDotGraph":
print("This UI requires the -g option")
return 1
ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
if error:
print("Error running command '%s': %s" % (cmdline, error))
return 1
elif ret != True:
print("Error running command '%s': returned %s" % (cmdline, ret))
return 1
except client.Fault as x:
print("XMLRPC Fault getting commandline:\n %s" % x)
return
if gtkthread.quit.isSet():
return
Gdk.threads_enter()
dep = DepExplorer()
bardialog = Gtk.Dialog(parent=dep,
flags=Gtk.DialogFlags.MODAL|Gtk.DialogFlags.DESTROY_WITH_PARENT)
bardialog.set_default_size(400, 50)
box = bardialog.get_content_area()
pbar = Gtk.ProgressBar()
box.pack_start(pbar, True, True, 0)
bardialog.show_all()
bardialog.connect("delete-event", Gtk.main_quit)
Gdk.threads_leave()
progress_total = 0
while True:
try:
event = eventHandler.waitEvent(0.25)
if gtkthread.quit.isSet():
_, error = server.runCommand(["stateForceShutdown"])
if error:
print('Unable to cleanly stop: %s' % error)
break
if event is None:
continue
if isinstance(event, bb.event.CacheLoadStarted):
progress_total = event.total
Gdk.threads_enter()
bardialog.set_title("Loading Cache")
pbar.set_fraction(0.0)
Gdk.threads_leave()
if isinstance(event, bb.event.CacheLoadProgress):
x = event.current
Gdk.threads_enter()
pbar.set_fraction(x * 1.0 / progress_total)
Gdk.threads_leave()
continue
if isinstance(event, bb.event.CacheLoadCompleted):
continue
if isinstance(event, bb.event.ParseStarted):
progress_total = event.total
if progress_total == 0:
continue
Gdk.threads_enter()
pbar.set_fraction(0.0)
bardialog.set_title("Processing recipes")
Gdk.threads_leave()
if isinstance(event, bb.event.ParseProgress):
x = event.current
Gdk.threads_enter()
pbar.set_fraction(x * 1.0 / progress_total)
Gdk.threads_leave()
continue
if isinstance(event, bb.event.ParseCompleted):
Gdk.threads_enter()
bardialog.set_title("Generating dependency tree")
Gdk.threads_leave()
continue
if isinstance(event, bb.event.DepTreeGenerated):
Gdk.threads_enter()
bardialog.hide()
dep.parse(event._depgraph)
Gdk.threads_leave()
if isinstance(event, bb.command.CommandCompleted):
continue
if isinstance(event, bb.event.NoProvider):
if event._runtime:
r = "R"
else:
r = ""
extra = ''
if not event._reasons:
if event._close_matches:
extra = ". Close matches:\n %s" % '\n '.join(event._close_matches)
if event._dependees:
print("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s" % (r, event._item, ", ".join(event._dependees), r, extra))
else:
print("Nothing %sPROVIDES '%s'%s" % (r, event._item, extra))
if event._reasons:
for reason in event._reasons:
print(reason)
_, error = server.runCommand(["stateShutdown"])
if error:
print('Unable to cleanly shutdown: %s' % error)
break
if isinstance(event, bb.command.CommandFailed):
print("Command execution failed: %s" % event.error)
return event.exitcode
if isinstance(event, bb.command.CommandExit):
return event.exitcode
if isinstance(event, bb.cooker.CookerExit):
break
continue
except EnvironmentError as ioerror:
# ignore interrupted io
if ioerror.args[0] == 4:
pass
except KeyboardInterrupt:
if shutdown == 2:
print("\nThird Keyboard Interrupt, exit.\n")
break
if shutdown == 1:
print("\nSecond Keyboard Interrupt, stopping...\n")
_, error = server.runCommand(["stateForceShutdown"])
if error:
print('Unable to cleanly stop: %s' % error)
if shutdown == 0:
print("\nKeyboard Interrupt, closing down...\n")
_, error = server.runCommand(["stateShutdown"])
if error:
print('Unable to cleanly shutdown: %s' % error)
shutdown = shutdown + 1
pass
| schleichdi2/OPENNFR-6.1-CORE | bitbake/lib/bb/ui/taskexp.py | Python | gpl-2.0 | 12,394 |
__author__= "barun"
__date__ = "$20 May, 2011 12:19:25 PM$"
## Defines a collection of metrics that can be used to analyze the performance
# of a network.
class Metrics(object):
## Calculate average throughput as: total_bytes_rcvd / duration.
#
# @param pkts_list An iterator object in the format [(timestamp, size),]
# @param duration Time duration (in s) over which thruput is to be computed. Typically it is the simulation period.
# @return Average throughput in Kbps; return -1 if duration is not positive
@staticmethod
def average_throughput(pkts_list, duration):
#print 'Average throughput'
avg_thruput = 0
start = -1
stop = 0
if pkts_list:
for record in pkts_list:
#print record
try:
avg_thruput += long(record[1])
if start == -1:
start = float(record[0])
stop = float(record[0])
#print record[0], record[1]
except IndexError:
pass
if duration <= 0:
duration = stop - start + 0.00000001
#print 'duration:', duration
avg_thruput = 8 * float(avg_thruput) / (1024 * duration) # Since pkt len is in bytes
return avg_thruput
@staticmethod
## Calculate instantaneous throughput as total bytes_rcvd at each time instant.
#
# <b>Logic</b>: To determine total bytes received at any instant, say, at t = 5, sum
# up sizes of all packets received in the interval 5.00000... to 5.99999...
#
# This procedure is repeated for all the time instances.
# @param pkts_list An iterator object in the format [(timestamp, size),]
# @return A list in the form [(time_instance, total_Kbytes),]
def instantaneous_throughput(pkts_list=None):
#print 'Instantaneous throughput'
result = []
start_time = -1 # Anything less than 0
this_instance = 0
bytes_this_instance = 0
#i_duration = long(duration)
if pkts_list:
for record in pkts_list:
try:
if start_time < 0: # This is the first record encountered
start_time = float(record[0])
#print start_time
this_instance = int(start_time)
#print this_instance
bytes_this_instance = long(record[1])
continue
cur_time = float(record[0])
if this_instance < cur_time and\
cur_time < (this_instance + 1):
bytes_this_instance += long(record[1])
else:
result.append( (this_instance, bytes_this_instance * 8 / 1024) )
this_instance += 1
bytes_this_instance = long(record[1])
except IndexError:
pass
# Append the last record
result.append( (this_instance, bytes_this_instance * 8 / 1024) )
return result
@staticmethod
def cumulative_bytes_received(pkts_list=None):
#print 'Cumulative plot of bytes received'
result = []
start_time = -1 # Anything less than 0
this_instance = 0
bytes_this_instance = 0
if pkts_list:
for record in pkts_list:
try:
if start_time < 0:
start_time = float(record[0])
this_instance = int(start_time)
bytes_this_instance = long(record[1])
continue
cur_time = float(record[0])
bytes_this_instance += long(record[1])
if this_instance < cur_time and\
cur_time < (this_instance + 1):
continue
else:
result.append( (this_instance, ( float(bytes_this_instance / 1024) ) * 8 ) )
this_instance += 1
#print cur_time
except IndexError:
pass
# Append the last record
result.append( (this_instance, ( float(bytes_this_instance / 1024) ) * 8 ) )
return result
@staticmethod
## Calculate throughput as total bytes_rcvd upto current instance of time / total duration upto current instance
# @param pkts_list An iterator object in the format [(timestamp, size),]
# @return A list in the form [(time_instance, total_bytes),]
def cumulative_throughput(pkts_list=None):
#print 'Current throughput'
result = []
start_time = -1 # Anything less than 0
this_instance = 0
bytes_this_instance = 0
if pkts_list:
for record in pkts_list:
try:
if start_time < 0:
start_time = float(record[0])
this_instance = int(start_time)
bytes_this_instance = long(record[1])
continue
cur_time = float(record[0])
bytes_this_instance += long(record[1])
if this_instance < cur_time and\
cur_time < (this_instance + 1):
continue
else:
result.append( (this_instance, ( float(bytes_this_instance / 1024) / ( this_instance - int(start_time) + 1) ) * 8 ) )
this_instance += 1
except IndexError:
pass
# Append the last record
result.append( (this_instance, ( float(bytes_this_instance / 1024) / ( this_instance - int(start_time) + 1) ) * 8 ) )
return result
## Return the end to end delay for each packet moving between a source and
# destination node, and identified by a flow ID. The delay is computed as
# the difference between sending time of the packet at source node and
# receiving time of the packet at the destination node.
# @param send_pkts_list An iterator object in the format [(seq_num, timestamp)]
# @param rcvd_pkts_list An iterator object in the format [(seq_num, timestamp)]
# @return A list in the form [(seq_num, delay),]
@staticmethod
def end2end_delay(send_pkts_list=None, rcvd_pkts_list=None):
#print 'End to end delay'
send_pkts = {}
rcvd_pkts = {}
for pkt in send_pkts_list:
send_pkts[pkt[0]] = float(pkt[1])
for pkt in rcvd_pkts_list:
rcvd_pkts[pkt[0]] = float(pkt[1])
pkt_delay = []
for seq_num in send_pkts:
if seq_num in rcvd_pkts:
if rcvd_pkts[seq_num] >= send_pkts[seq_num]:
delay = rcvd_pkts[seq_num] - send_pkts[seq_num]
pkt_delay.append( (seq_num, delay) )
# Sort pkt_delay in integer order of seq_num -- otherwise displayed
# graph would be garbage
pkt_delay = [ ( int(e[0]), e[1], ) for e in pkt_delay ]
pkt_delay.sort()
return pkt_delay
# @param send_pkts_list An iterator object in the format [seq_num]
@staticmethod
def packet_retransmissions(send_pkts_list=None):
#print 'Packet retransmissions'
send_pkts = {}
send_pkts_list = [ int(item) for item in send_pkts_list ]
for seq_num in send_pkts_list:
if seq_num in send_pkts:
send_pkts[seq_num] += 1
else:
send_pkts[seq_num] = 0
pkt_retransmits = []
for (seq_num, retransmits) in send_pkts.items():
if retransmits != 0:
pkt_retransmits.append( (seq_num, retransmits) )
pkt_retransmits.sort()
return pkt_retransmits
| barun-saha/ns2web | ns2trace/metrics.py | Python | gpl-2.0 | 8,308 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Utilities for working with DataCite metadata."""
from __future__ import absolute_import
import re
import urllib2
from invenio_utils.xmlDict import ElementTree, XmlDictConfig
__all__ = (
'DataciteMetadata',
)
class DataciteMetadata(object):
"""Helper class for working with DataCite metadata."""
def __init__(self, doi):
"""Initialize object."""
self.url = "http://data.datacite.org/application/x-datacite+xml/"
self.error = False
try:
data = urllib2.urlopen(self.url + doi).read()
except urllib2.HTTPError:
self.error = True
if not self.error:
# Clean the xml for parsing
data = re.sub('<\?xml.*\?>', '', data, count=1)
# Remove the resource tags
data = re.sub('<resource .*xsd">', '', data)
self.data = '<?xml version="1.0"?><datacite>' + \
data[0:len(data) - 11] + '</datacite>'
self.root = ElementTree.XML(self.data)
self.xml = XmlDictConfig(self.root)
def get_creators(self, attribute='creatorName'):
"""Get DataCite creators."""
if 'creators' in self.xml:
if isinstance(self.xml['creators']['creator'], list):
return [c[attribute] for c in self.xml['creators']['creator']]
else:
return self.xml['creators']['creator'][attribute]
return None
def get_titles(self):
"""Get DataCite titles."""
if 'titles' in self.xml:
return self.xml['titles']['title']
return None
def get_publisher(self):
"""Get DataCite publisher."""
if 'publisher' in self.xml:
return self.xml['publisher']
return None
def get_dates(self):
"""Get DataCite dates."""
if 'dates' in self.xml:
if isinstance(self.xml['dates']['date'], dict):
return self.xml['dates']['date'].values()[0]
return self.xml['dates']['date']
return None
def get_publication_year(self):
"""Get DataCite publication year."""
if 'publicationYear' in self.xml:
return self.xml['publicationYear']
return None
def get_language(self):
"""Get DataCite language."""
if 'language' in self.xml:
return self.xml['language']
return None
def get_related_identifiers(self):
"""Get DataCite related identifiers."""
pass
def get_description(self, description_type='Abstract'):
"""Get DataCite description."""
if 'descriptions' in self.xml:
if isinstance(self.xml['descriptions']['description'], list):
for description in self.xml['descriptions']['description']:
if description_type in description:
return description[description_type]
elif isinstance(self.xml['descriptions']['description'], dict):
description = self.xml['descriptions']['description']
if description_type in description:
return description[description_type]
elif len(description) == 1:
# return the only description
return description.values()[0]
return None
def get_rights(self):
"""Get DataCite rights."""
if 'titles' in self.xml:
return self.xml['rights']
return None
| jirikuncar/invenio-utils | invenio_utils/datacite.py | Python | gpl-2.0 | 4,248 |
# -*- coding: utf-8 -*-
from distutils.core import setup
setup(
name='popy',
description='Parser for GNU Po files',
long_description=open('README.rst').read(),
version='0.3.0',
packages=['popy'],
author='Murat Aydos',
author_email='murataydos@yandex.com',
url='https://github.com/murataydos/popy',
license='MIT',
zip_safe=False,
include_package_data=True
)
| murataydos/popy | setup.py | Python | gpl-2.0 | 402 |
# -*- coding:utf-8 -*-
from django.shortcuts import render
import models
# Create your views here.
class Pager(object):
def __init__(self,current_page):
self.current_page = current_page
@property
def start(self):
return (self.current_page-1)*10
@property
def end(self):
return self.current_page*10
def page_str(self,all_item,base_url):
all_page, div = divmod(all_item,10)
if div>0:
all_page += 1
page_list=[]
# # start = self.current_page-5
# # end = self.current_page+6
# #
# # page_str = ""
# # for i in range(start,end):
# # if i == self.current_page:
# # temp = '<a style="color:red;font-size:32px" href="/user_list/?page=%d"> %d </a>' % (i,i)
# # else:
# # temp = '<a href="/user_list/?page=%d"> %d </a>' % (i,i)
# # page_str += temp
if all_page<=11:
start =1
end = all_page+1
else:
if self.current_page<=6:
start = 1
end =12
else:
start = self.current_page-5
end = self.current_page+6
if self.current_page + 6 >all_page:
start = all_page-11
end = all_page + 1
for i in range(start,end):
if i == self.current_page:
temp = '<a style="color:red;font-size:32px" href="%s%d"> %d </a>' % (base_url,i,i)
else:
temp = '<a href="%s%d"> %d </a>' % (base_url,i,i)
page_list.append(temp)
if self.current_page>1:
pre_page = '<a href="%s%d"> 上一页 </a>' % (base_url,self.current_page-1)
else:
pre_page= temp = '<a href="javascript:void(0)"> 上一页 </a>'
if self.current_page>=all_page:
next_page = '<a href="javascript:void(0)"> 下一页 </a>'
else:
next_page = '<a href="%s%d"> 下一页 </a>' % (base_url,self.current_page+1)
page_list.insert(0,pre_page)
page_list.append(next_page)
return "".join(page_list)
def user_list(request):
# for item in range(100):
# models.user_list.objects.create(username="user%d" % item ,age = item)
# print models.user_list.objects.all().count()
current_page =int(request.GET.get("page",1))
# start = (current_page-1)*10
# end = current_page*10
page_obj = Pager(current_page)
result = models.user_list.objects.all()[page_obj.start:page_obj.end]
all_item = models.user_list.objects.all().count()
page_str = page_obj.page_str(all_item,"/user_list/?page=")
return render(request,"user_list.html",{"result":result,"page_str":page_str}) | willre/homework | day21-22/source/app01/views.py | Python | gpl-2.0 | 2,803 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# smallcommitmetadata.py - stores a small amount of metadata associated with a commit
from . import json
from .node import bin, hex
from .util import altsortdict
# Stores a mapping of (node, category) -> data, with a FIFO-limited number of entries
class smallcommitmetadata(object):
def __init__(self, vfs, entrylimit):
self.vfs = vfs
self.limit = entrylimit
self.contents = altsortdict()
self.reload()
def reload(self):
"""Read the database from disk."""
if not self.vfs.exists("commit_metadata"):
self.contents = altsortdict()
return
try:
entries = json.loads(self.vfs.tryreadutf8("commit_metadata"))[-self.limit :]
except ValueError:
entries = []
for entry in entries:
self.contents[(bin(entry["node"]), entry["category"])] = entry["data"]
def write(self):
"""Write the database to disk."""
with self.vfs("commit_metadata", "w", atomictemp=True) as f:
entries = [
{"node": hex(node), "category": category, "data": data}
for ((node, category), data) in self.contents.items()
]
json.dump(entries, f)
def store(self, node, category, data):
"""Adds a new entry with the specified node and category, and updates the data on disk. Returns the removed entry, if any."""
self.contents[(node, category)] = data
popped = None
while len(self.contents) > self.limit:
popped = self.contents.popitem(last=False)
self.write()
return popped
def delete(self, node, category):
"""Removes the entry with matching node and category and returns its value."""
value = self.contents[(node, category)]
del self.contents[(node, category)]
return value
def read(self, node, category):
"""Returns the value of the entry with specified node and category."""
return self.contents[(node, category)]
def find(self, node=None, category=None):
"""Returns a map of all entries with matching node and/or category. If both are None, returns all entries."""
return altsortdict(
(
((node_, category_), data)
for ((node_, category_), data) in self.contents.items()
if node is None or node == node_
if category is None or category == category_
)
)
def finddelete(self, node=None, category=None):
"""Removes and returns any entries with matching node and/or category."""
entriestoremove = [
((node_, category_), data_)
for ((node_, category_), data_) in self.contents.items()
if node is None or node == node_
if category is None or category == category_
]
for (key, _value) in entriestoremove:
del self.contents[key]
return altsortdict(entriestoremove)
def clear(self):
"""Removes and returns all entries."""
deleted = self.contents
self.contents = altsortdict()
return deleted
| facebookexperimental/eden | eden/hg-server/edenscm/mercurial/smallcommitmetadata.py | Python | gpl-2.0 | 3,324 |
# flatspice.py
#-----------------------------------------------------------
# Python script which writes a SPICE-format netlist.
# Replaces the code formerly in "netlist.c" (deprecated).
# Python scripting is now the preferred method for handling
# netlist output formats.
#-----------------------------------------------------------
# Select the device string corresponding to the given prefix.
# Return the body of the string, or an empty string if the prefix doesn't match.
def select(sstr, prefix):
ltext = ''
if sstr.startswith(prefix):
ltext += sstr[len(prefix) + 1:]
return ltext
# Generate an ASCII string from an xcircuit string (list)
def textprint(slist, params):
ltext = ''
is_symbol = 0
is_iso = 0
for x in slist:
try:
f = x.keys()[0]
except AttributeError: # must be a string
if x == 'Return':
ltext += '\n'
elif x == 'Underline':
ltext += '_'
elif x == 'Overline':
ltext += '!'
else: # is a dictionary; will have only one key
if f == 'Font':
lfont = x[x.keys()[0]]
if lfont.startswith('Symbol'):
is_symbol = 1
else:
is_symbol = 0
if lfont.endswith('ISO'):
is_iso = 1
else:
is_iso = 0
elif f == 'Parameter':
ltext += textprint(params[x[x.keys()[0]]], [])
else: # text: SPICE translates "mu" to "u"
for y in x[x.keys()[0]]:
if is_symbol:
if y == 'f':
ltext += 'phi'
elif y == 'm':
ltext += 'u'
else:
ltext += y
else:
if ord(y) == 181:
ltext += 'u'
elif ord(y) > 127:
ltext += '/' + str(ord(y))
else:
ltext += y
return ltext
# Flatten the netlist and write to the output
def recurseflat(outfile, ckt, clist):
try:
v = ckt['calls'] # calls to subcircuits
except KeyError: # A bottom-level circuit element
pass
else:
for y in v:
for z in clist:
if z['name'] == y['name']:
# copy the object and substitute net names into subcircuit ports
lobj = z
lobj['ports'] = y['ports']
recurseflat(outfile, lobj, clist)
break;
try:
w = ckt['devices']
except KeyError:
pass
else:
for y in w:
for u in y:
lstr = select(textprint(u, []), 'spice')
if lstr <> '':
outfile.write('device: ' + lstr + '\n')
# Top of the flattened-circuit writing routine
def writespiceflat():
p=netlist()
g=p['globals']
c=p['circuit']
l=len(c)
top=c[l-1]
topname=top['name']
topname += '.spc'
try:
outfile=open(topname, 'w')
except IOError:
return
# print header line
outfile.write('*SPICE flattened circuit "' + topname + '"')
outfile.write(' from XCircuit v' + str(xc_version))
outfile.write(' (Python script "flatspice.py")\n')
# print global variables
for x in g: # 'globals' is a list of strings
outfile.write('.GLOBAL ' + textprint(x, []) + '\n')
outfile.write('\n')
recurseflat(outfile, top, c)
outfile.write('.end\n')
outfile.close()
# Key binding and menu button for the spice netlist output
# bind('Alt_F', 'writespiceflat')
newbutton('Netlist', 'Write Flattened Spice', 'writespiceflat')
| snmishra/xcircuit-3.8 | lib/python/flatspice.py | Python | gpl-2.0 | 3,186 |
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
from tests import TestCase, mkstemp
import os
from quodlibet.qltk.cbes import ComboBoxEntrySave, StandaloneEditor
import quodlibet.config
class TComboBoxEntrySave(TestCase):
memory = "pattern 1\npattern 2\n"
saved = "pattern text\npattern name\n"
def setUp(self):
quodlibet.config.init()
h, self.fname = mkstemp()
os.close(h)
with open(self.fname, "w") as f:
f.write(self.memory)
with open(self.fname + ".saved", "w") as f:
f.write(self.saved)
self.cbes = ComboBoxEntrySave(self.fname, count=2)
self.cbes2 = ComboBoxEntrySave(self.fname, count=2)
self.cbes3 = ComboBoxEntrySave(self.fname, count=2,
filter=lambda ls, it, *d: ls.get_value(it, 0) == "filter")
def test_equivalence(self):
model1 = self.cbes.model_store
model2 = self.cbes2.model_store
self.failUnlessEqual(model1, model2)
rows1 = list(model1)
rows2 = list(model2)
for row1, row2 in zip(rows1, rows2):
self.failUnlessEqual(row1[0], row2[0])
self.failUnlessEqual(row1[1], row2[1])
self.failUnlessEqual(row1[2], row2[2])
def test_text_changed_signal(self):
called = [0]
def cb(*args):
called[0] += 1
def get_count():
c = called[0]
called[0] = 0
return c
self.cbes.connect("text-changed", cb)
entry = self.cbes.get_child()
entry.set_text("foo")
self.failUnlessEqual(get_count(), 1)
self.cbes.prepend_text("bar")
# in case the model got changed but the entry is still the same
# the text-changed signal should not be triggered
self.failUnlessEqual(entry.get_text(), "foo")
self.failUnlessEqual(get_count(), 0)
def test_shared_model(self):
self.cbes.prepend_text("a test")
self.test_equivalence()
def test_initial_size(self):
# 1 saved, Edit, separator, 3 remembered
self.failUnlessEqual(len(self.cbes.get_model()), 5)
def test_prepend_text(self):
self.cbes.prepend_text("pattern 3")
self.memory = "pattern 3\npattern 1\n"
self.test_save()
def test_save(self):
self.cbes.write()
self.failUnlessEqual(self.memory, open(self.fname).read())
self.failUnlessEqual(self.saved, open(self.fname + ".saved").read())
def test_set_text_then_prepend(self):
self.cbes.get_child().set_text("foobar")
self.cbes.prepend_text("foobar")
self.memory = "foobar\npattern 1\n"
self.test_save()
def test_filter(self):
self.cbes3.prepend_text("filter")
self.failUnlessEqual(1, len(self.cbes3.get_model()))
def tearDown(self):
self.cbes.destroy()
self.cbes2.destroy()
self.cbes3.destroy()
os.unlink(self.fname)
os.unlink(self.fname + ".saved")
quodlibet.config.quit()
class TStandaloneEditor(TestCase):
TEST_KV_DATA = [
("Search Foo", "https://foo.com/search?q=<artist>-<title>")]
def setUp(self):
quodlibet.config.init()
h, self.fname = mkstemp()
os.close(h)
with open(self.fname + ".saved", "w") as f:
f.write(
"%s\n%s\n" % (self.TEST_KV_DATA[0][1],
self.TEST_KV_DATA[0][0]))
self.sae = StandaloneEditor(self.fname, "test", None, None)
def test_constructor(self):
self.failUnless(self.sae.model)
data = [(row[1], row[0]) for row in self.sae.model]
self.failUnlessEqual(data, self.TEST_KV_DATA)
def test_load_values(self):
values = StandaloneEditor.load_values(self.fname + ".saved")
self.failUnlessEqual(self.TEST_KV_DATA, values)
def test_defaults(self):
defaults = [("Dot-com Dream", "http://<artist>.com")]
try:
os.unlink(self.fname)
except OSError:
pass
# Now create a new SAE without saved results and use defaults
self.fname = "foo"
self.sae.destroy()
self.sae = StandaloneEditor(self.fname, "test2", defaults, None)
self.sae.write()
data = [(row[1], row[0]) for row in self.sae.model]
self.failUnlessEqual(defaults, data)
def tearDown(self):
self.sae.destroy()
try:
os.unlink(self.fname)
os.unlink(self.fname + ".saved")
except OSError:
pass
quodlibet.config.quit()
| elbeardmorez/quodlibet | quodlibet/tests/test_qltk_cbes.py | Python | gpl-2.0 | 4,802 |
#!/usr/bin/env python
"""This does HTTP GET requests given a host:port and path and returns
a subset of the headers plus the body of the result."""
from __future__ import absolute_import, print_function
import json
import os
import sys
from edenscm.mercurial import util
httplib = util.httplib
try:
import msvcrt
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
except ImportError:
pass
twice = False
if "--twice" in sys.argv:
sys.argv.remove("--twice")
twice = True
headeronly = False
if "--headeronly" in sys.argv:
sys.argv.remove("--headeronly")
headeronly = True
formatjson = False
if "--json" in sys.argv:
sys.argv.remove("--json")
formatjson = True
hgproto = None
if "--hgproto" in sys.argv:
idx = sys.argv.index("--hgproto")
hgproto = sys.argv[idx + 1]
sys.argv.pop(idx)
sys.argv.pop(idx)
tag = None
def request(host, path, show):
assert not path.startswith("/"), path
global tag
headers = {}
if tag:
headers["If-None-Match"] = tag
if hgproto:
headers["X-HgProto-1"] = hgproto
conn = httplib.HTTPConnection(host)
conn.request("GET", "/" + path, None, headers)
response = conn.getresponse()
print(response.status, response.reason)
if show[:1] == ["-"]:
show = sorted(h for h, v in response.getheaders() if h.lower() not in show)
for h in [h.lower() for h in show]:
if response.getheader(h, None) is not None:
print("%s: %s" % (h, response.getheader(h)))
if not headeronly:
print()
data = response.read()
# Pretty print JSON. This also has the beneficial side-effect
# of verifying emitted JSON is well-formed.
if formatjson:
# json.dumps() will print trailing newlines. Eliminate them
# to make tests easier to write.
data = json.loads(data)
lines = json.dumps(data, sort_keys=True, indent=2).splitlines()
for line in lines:
print(line.rstrip())
else:
sys.stdout.write(data)
if twice and response.getheader("ETag", None):
tag = response.getheader("ETag")
return response.status
status = request(sys.argv[1], sys.argv[2], sys.argv[3:])
if twice:
status = request(sys.argv[1], sys.argv[2], sys.argv[3:])
if 200 <= status <= 305:
sys.exit(0)
sys.exit(1)
| facebookexperimental/eden | eden/hg-server/tests/get-with-headers.py | Python | gpl-2.0 | 2,434 |
DEBUG = 0
# cardinal diretions
directions = ("left","up","right","down")
# logic
maxExamined = 75000 # maximum number of tries when solving
maxMoves = 19 # maximum number of moves
cullFrequency = 75000 # number of tries per cull update
cullCutoff = 1.2 # fraction of average to cull
# grid size
gridRows = 5
gridColumns = 6
# text strings
textCalculateCurrentCombos = "Calculate Damage"
textClose = "Close"
textDamageDisplayAmount = "Total: "
textChoosePaint = "Choose a color to paint:"
textSolve = "Solve"
textTitle = "Puzzle and Dragons Helper"
# orbs
orbDefault = "light"
orbDefaultConfig = ("heal","light","wood","wood","fire","light","dark","heal","wood","water","heal","dark","fire","light","light","fire","fire","wood","heal","wood","dark","wood","water","light","light","dark","heal","heal","fire","dark")
orbDefaultStrength = 100
orbList = ("heal","fire","water","wood","light","dark")
# orb image URLs
orbImageURL = dict(light="img/light.png",
dark="img/dark.png",
fire="img/fire.png",
water="img/water.png",
wood="img/wood.png",
heal="img/heal.png",
bg="img/bgOrb.png"
);
# TKinter styles
tkButtonInactive = "flat"
tkButtonActive = "groove"
tkButtonBorder = 3
tkOrbStrengthEntryWidth = 7 | discomethod/pad-helper | constants.py | Python | gpl-2.0 | 1,240 |
from ..dataset.arff import ArffFile
from scipy.spatial import kdtree, distance
from ..representations.intervalsProbability import IntervalsProbability
import numpy as np
from math import exp
class IPKNN(object):
"""IPKNN implements a K-nearest neighbour method using lower previsions.
If data are all precise, it returns
:class:`~classifip.representations.intervalsProbability.IntervalsProbability`
equivalent to a linear vacuous model. The method is based on [#destercke2012]_
:param tree: kdtree structure storing learning data set instances
:type tree: scipy.spatial.kdtree
:param truelabels: store the true labels of learning instances
:type truelabels: list of labels
:param beta: exponent parameter used in discounting rate
:type beta: positive float
:param epsilon: base discounting rate
:type epsilon: float between 0 and 1
:param av_dist: average distances of members of a given class
:type av_dist: float
:param classes: list of class names
.. note::
* Assumes that the class attribute is the last one in samples in the learning method
* If too many data, average distance approximated by sampling
.. todo::
* Make it possible for the class to be in any column (retrieve index)
"""
def __init__(self):
"""Build an empty IPKNN structure
"""
self.tree=None
self.truelabels=[]
self.beta=1.5
self.epsilon=0.99
self.classes=[]
self.av_dist=[]
def learn(self,learndataset):
"""learn the KNN structure required to evaluate new instances
:param learndataset: learning instances
:type learndataset: :class:`~classifip.dataset.arff.ArffFile`
"""
self.__init__()
self.classes=learndataset.attribute_data['class'][:]
#Initialize average distance for every possible class
for i in learndataset.attribute_data['class']:
class_set=learndataset.select_class([i])
values=[row[0:len(row)-1] for row in class_set.data]
if len(values) > 1000:
valred=np.random.permutation(values)[0:1000]
class_distances=distance.cdist(valred,valred)
else:
class_distances=distance.cdist(values,values)
average=class_distances.sum()/(len(class_distances)**2
-len(class_distances))
self.av_dist.append(average)
# training the whole thing
learndata=[row[0:len(row)-1] for row in learndataset.data]
self.truelabels=[row[-1] for row in learndataset.data]
self.tree=kdtree.KDTree(learndata)
def evaluate(self,testdataset,knn_beta=1.5,knn_epsilon=0.99,knn_nb_neigh=3):
"""evaluate the instances and return a list of probability intervals
:param testdataset: list of input features of instances to evaluate
:type dataset: list
:param knn_beta: value of beta parameter used in evaluation
:type knn_beta: float
:param knn_epsilon: value of base discounting rate to use
:type knn_epsilon: float
:param knn_nb_neigh: values of number f neighbours to use
:type knn_nb_neigh: list of int
:returns: for each value of knn_nb_neigh, a set of probability intervals
:rtype: lists of :class:`~classifip.representations.intervalsProbability.IntervalsProbability`
"""
final=[]
self.beta=knn_beta
self.epsilon=knn_epsilon
answers=[]
for i in testdataset:
resulting_int=np.zeros((2,len(self.classes)))
query=self.tree.query(i,knn_nb_neigh)
#ensure query returns list of array
if query[0].__class__.__name__!='ndarray':
query=list(query)
query[0]=[query[0]]
query[1]=[query[1]]
for k in range(len(query[0])):
#retrieve class index of kth neighbour
neigh_class=self.classes.index(self.truelabels[query[1][k]])
#compute the linear vacuous model of this neighbour
#the higher discount, the most original info is kept
#discount~reliability of the information between [0,1]
expon=-((query[0][k])**(self.beta))/self.av_dist[neigh_class]
discount=(self.epsilon)*(exp(expon))
up=np.zeros(len(self.classes))
up.fill(1-discount)
up[neigh_class]=1
down=np.zeros(len(self.classes))
down[neigh_class]=discount
resulting_int[0]+=up
resulting_int[1]+=down
# make the average of all k obtained models
resulting_int[0]=resulting_int[0]/knn_nb_neigh
resulting_int[1]=resulting_int[1]/knn_nb_neigh
result=IntervalsProbability(resulting_int)
answers.append(result)
return answers
| sdestercke/classifip | classifip/models/knn.py | Python | gpl-2.0 | 5,129 |
from spyparty.ReplayOffsets import ReplayOffsets
class ReplayVersion3Offsets(ReplayOffsets):
def extract_number_offset(self):
return 0x00
def get_file_version_offset(self):
return 0x04
def get_protocol_version_offset(self):
return 0x08
def get_spy_party_version_offset(self):
return 0x0C
def get_duration_offset(self):
return 0x14
def get_uuid_offset(self):
return 0x18
def get_timestamp_offset(self):
return 0x28
def get_sequence_number_offset(self):
return 0x2C
def extract_spy_username(self, bytes):
spy_username_length = bytes[0x2E]
return self._read_bytes(bytes, 0x50, spy_username_length).decode()
def extract_sniper_username(self, bytes):
spy_username_length = bytes[0x2E]
sniper_username_length = bytes[0x2F]
return self._read_bytes(bytes, 0x50 + spy_username_length, sniper_username_length).decode()
def contains_display_names(self):
return False
def contains_guest_count(self):
return False
def contains_start_clock(self):
return False
def get_game_result_offset(self):
return 0x30
def get_game_type_offset(self):
return 0x34
def get_level_offset(self):
return 0x38
def get_selected_missions_offset(self):
return 0x3C
def get_picked_missions_offset(self):
return 0x40
def get_completed_missions_offset(self):
return 0x44
| LtHummus/SpyPartyParse | spyparty/ReplayVersion3Offsets.py | Python | gpl-2.0 | 1,504 |
# -*- coding: utf-8 -*-
from django.db import models, migrations
import autoslug.fields
import django.core.files.storage
import marking.models
class Migration(migrations.Migration):
dependencies = [
('grades', '0001_initial'),
('groups', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ActivityComponent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('max_mark', models.DecimalField(max_digits=8, decimal_places=2)),
('title', models.CharField(max_length=30)),
('description', models.TextField(max_length=200, null=True, blank=True)),
('position', models.IntegerField(default=0, null=True, blank=True)),
('deleted', models.BooleanField(default=False, db_index=True)),
('slug', autoslug.fields.AutoSlugField(editable=False)),
('numeric_activity', models.ForeignKey(to='grades.NumericActivity', on_delete=models.CASCADE)),
],
options={
'ordering': ['numeric_activity', 'deleted', 'position'],
'verbose_name_plural': 'Activity Marking Components',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ActivityComponentMark',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('value', models.DecimalField(verbose_name=b'Mark', max_digits=8, decimal_places=2)),
('comment', models.TextField(max_length=1000, null=True, blank=True)),
('activity_component', models.ForeignKey(to='marking.ActivityComponent', on_delete=models.CASCADE)),
],
options={
'ordering': ('activity_component',),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ActivityMark',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('overall_comment', models.TextField(max_length=1000, null=True, blank=True)),
('late_penalty', models.DecimalField(decimal_places=2, default=0, max_digits=5, blank=True, help_text=b'Percentage to deduct from the total due to late submission', null=True)),
('mark_adjustment', models.DecimalField(decimal_places=2, default=0, max_digits=8, blank=True, help_text=b'Points to deduct for any special reasons (may be negative for bonus)', null=True, verbose_name=b'Mark Penalty')),
('mark_adjustment_reason', models.TextField(max_length=1000, null=True, verbose_name=b'Mark Penalty Reason', blank=True)),
('file_attachment', models.FileField(storage=django.core.files.storage.FileSystemStorage(base_url=None, location=b'submitted_files'), max_length=500, null=True, upload_to=marking.models.attachment_upload_to, blank=True)),
('file_mediatype', models.CharField(max_length=200, null=True, blank=True)),
('created_by', models.CharField(help_text=b'Userid who gives the mark', max_length=8)),
('created_at', models.DateTimeField(auto_now_add=True)),
('mark', models.DecimalField(max_digits=8, decimal_places=2)),
],
options={
'ordering': ['created_at'],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ActivityMark_LetterGrade',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('overall_comment', models.TextField(max_length=1000, null=True, blank=True)),
('created_by', models.CharField(help_text=b'Userid who gives the mark', max_length=8)),
('created_at', models.DateTimeField(auto_now_add=True)),
('mark', models.CharField(max_length=2, choices=[(b'A+', b'A+ (Excellent performance)'), (b'A', b'A (Excellent performance)'), (b'A-', b'A- (Excellent performance)'), (b'B+', b'B+ (Good performance)'), (b'B', b'B (Good performance)'), (b'B-', b'B- (Good performance)'), (b'C+', b'C+ (Satisfactory performance)'), (b'C', b'C (Satisfactory performance)'), (b'C-', b'C- (Marginal performance)'), (b'D', b'D (Marginal performance)'), (b'F', b'F (Fail. Unsatisfactory Performance)'), (b'N', b'N (Did not write exam or did not complete course)'), (b'P', b'P (Satisfactory performance or better (pass, ungraded))'), (b'DE', b'DE (Deferred grade)'), (b'GN', b'GN (Grade not reported)'), (b'IP', b'IP (In progress)')])),
],
options={
'ordering': ['created_at'],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CommonProblem',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=30)),
('penalty', models.DecimalField(max_digits=8, decimal_places=2)),
('description', models.TextField(max_length=200, null=True, blank=True)),
('deleted', models.BooleanField(default=False, db_index=True)),
('activity_component', models.ForeignKey(to='marking.ActivityComponent', on_delete=models.CASCADE)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='GroupActivityMark',
fields=[
('activitymark_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='marking.ActivityMark', on_delete=models.CASCADE)),
('group', models.ForeignKey(to='groups.Group', on_delete=models.CASCADE)),
('numeric_activity', models.ForeignKey(to='grades.NumericActivity', on_delete=models.CASCADE)),
],
options={
},
bases=('marking.activitymark',),
),
migrations.CreateModel(
name='GroupActivityMark_LetterGrade',
fields=[
('activitymark_lettergrade_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='marking.ActivityMark_LetterGrade', on_delete=models.CASCADE)),
('letter_grade', models.CharField(max_length=2, choices=[(b'A+', b'A+ (Excellent performance)'), (b'A', b'A (Excellent performance)'), (b'A-', b'A- (Excellent performance)'), (b'B+', b'B+ (Good performance)'), (b'B', b'B (Good performance)'), (b'B-', b'B- (Good performance)'), (b'C+', b'C+ (Satisfactory performance)'), (b'C', b'C (Satisfactory performance)'), (b'C-', b'C- (Marginal performance)'), (b'D', b'D (Marginal performance)'), (b'F', b'F (Fail. Unsatisfactory Performance)'), (b'N', b'N (Did not write exam or did not complete course)'), (b'P', b'P (Satisfactory performance or better (pass, ungraded))'), (b'DE', b'DE (Deferred grade)'), (b'GN', b'GN (Grade not reported)'), (b'IP', b'IP (In progress)')])),
('group', models.ForeignKey(to='groups.Group', on_delete=models.CASCADE)),
('letter_activity', models.ForeignKey(to='grades.LetterActivity', on_delete=models.CASCADE)),
],
options={
},
bases=('marking.activitymark_lettergrade',),
),
migrations.CreateModel(
name='StudentActivityMark',
fields=[
('activitymark_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='marking.ActivityMark', on_delete=models.CASCADE)),
('numeric_grade', models.ForeignKey(to='grades.NumericGrade', on_delete=models.CASCADE)),
],
options={
},
bases=('marking.activitymark',),
),
migrations.CreateModel(
name='StudentActivityMark_LetterGrade',
fields=[
('activitymark_lettergrade_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='marking.ActivityMark_LetterGrade', on_delete=models.CASCADE)),
('letter_grade', models.ForeignKey(to='grades.LetterGrade', on_delete=models.CASCADE, choices=[(b'A+', b'A+ (Excellent performance)'), (b'A', b'A (Excellent performance)'), (b'A-', b'A- (Excellent performance)'), (b'B+', b'B+ (Good performance)'), (b'B', b'B (Good performance)'), (b'B-', b'B- (Good performance)'), (b'C+', b'C+ (Satisfactory performance)'), (b'C', b'C (Satisfactory performance)'), (b'C-', b'C- (Marginal performance)'), (b'D', b'D (Marginal performance)'), (b'F', b'F (Fail. Unsatisfactory Performance)'), (b'N', b'N (Did not write exam or did not complete course)'), (b'P', b'P (Satisfactory performance or better (pass, ungraded))'), (b'DE', b'DE (Deferred grade)'), (b'GN', b'GN (Grade not reported)'), (b'IP', b'IP (In progress)')])),
],
options={
},
bases=('marking.activitymark_lettergrade',),
),
migrations.AddField(
model_name='activitymark_lettergrade',
name='activity',
field=models.ForeignKey(to='grades.LetterActivity', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='activitymark',
name='activity',
field=models.ForeignKey(to='grades.NumericActivity', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='activitycomponentmark',
name='activity_mark',
field=models.ForeignKey(to='marking.ActivityMark', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='activitycomponentmark',
unique_together=set([('activity_mark', 'activity_component')]),
),
]
| sfu-fas/coursys | marking/migrations/0001_initial.py | Python | gpl-3.0 | 10,263 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
'''Pychemqt, Chemical Engineering Process simulator
Copyright (C) 2009-2017, Juan José Gómez Romera <jjgomera@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
This library implement a chemical element with several properties
* id: atomic number
* name
* altname
* symbol
* serie
* group
* period
* block
* density_Solid
* density_Liq
* density_Gas
* appearance
* date
* country
* discover
* etymology
* atomic_mass
* atomic_volume
* atomic_radius
* covalent_radius
* vanderWaals_radius
* ionic_radii
* lattice_type
* space_group
* lattice_edges
* lattice_angles
* electron_configuration
* oxidation
* electronegativity
* electron_affinity
* first_ionization
* Tf
* Tb
* Heat_f
* Heat_b
* Cp
* k
* T_debye
* color
* notes
'''
import os
import sqlite3
from numpy import linspace, logspace, log
from PyQt5.QtCore import QLocale
from lib.utilities import colors
# Connection to database with element data
connection = sqlite3.connect(os.path.join(
os.environ["pychemqt"], "dat", "elemental.db"))
databank = connection.cursor()
# Load system locale to implement a custon translation system (non qt)
locale = QLocale.system().name().upper()
if "_" in locale:
locale = locale.split("_")[0]
databank.execute("PRAGMA table_info(TRANSLATION)")
translation = []
for i, name, type_, other, other2, primary_key in databank:
if "name_" in name:
translation.append(name.split("_")[-1])
if locale in translation:
tr_available = True
else:
tr_available = False
def cleanFloat(flo):
if flo:
try:
value = float(flo)
except ValueError:
value = float(flo.split("(")[1].split(",")[0])
else:
value = 0
return value
color_serie = ["#DDDDDD", "#795681", "#B92D2D", "#B8873A", "#D7C848",
"#94738F", "#6186AC", "#88AE62", "#949692", "#BF924E",
"#C44343"]
color_phase = ["#DDDDDD", "#BB8F4A", "#7BB245", "#5D82A8"]
NUMERIC_VALUES = ["density_Solid", "density_Liq", "density_Gas", "date",
"atomic_mass", "atomic_volume", "atomic_radius",
"covalent_radius", "vanderWaals_radius", "electronegativity",
"electron_affinity", "first_ionization", "Tf", "Tb",
"Heat_f", "Heat_b", "Cp", "k", "T_debye"]
def _configValues(Preferences):
PROP = Preferences.get("Applications", "elementalColorby")
NUM = Preferences.getint("Applications", "elementalDefinition")
LOG = Preferences.getboolean("Applications", "elementalLog")
PMIN = None
PMAX = None
if PROP == "phase":
CATEGORIES = ["", "Solid", "Liquid", "Gas"]
COLORS = color_phase
elif PROP in NUMERIC_VALUES:
databank.execute("SELECT %s FROM ELEMENTS" % PROP)
PMAX = 0
for st, in databank:
value = cleanFloat(st)
if value > PMAX:
PMAX = value
if LOG:
PMIN = 1
CATEGORIES = logspace(log(PMIN), log(PMAX), NUM)
else:
PMIN = 0
CATEGORIES = linspace(PMIN, PMAX, NUM)
COLORS = colors(NUM, scale=True)
elif PROP == "Element":
CATEGORIES = []
COLORS = []
else:
q = "SELECT %s, COUNT(*) c FROM ELEMENTS GROUP BY %s HAVING c > 0" % (
PROP, PROP)
databank.execute(q)
CATEGORIES = []
for category, count in databank:
CATEGORIES.append(category)
if PROP == "serie":
COLORS = color_serie
else:
COLORS = colors(len(CATEGORIES))
return CATEGORIES, PROP, COLORS, PMAX
class Elemental(object):
"""Chemical element class"""
def __init__(self, id):
"""
Parameters
------------
id : int
atomic number of element, [-]
"""
if id > 118:
id = 118
databank.execute("SELECT * FROM ELEMENTS WHERE id=='%i'" % id)
data = databank.fetchone()
self.id = int(data[0])
self.altname = data[2]
self.symbol = data[3]
self.serie = data[4]
self.group = int(data[5])
self.period = int(data[6])
self.block = data[7]
self.density_Solid = self._unit(data[8])
self.density_Liq = self._unit(data[9])
self.density_Gas = self._unit(data[10])
self.appearance = data[11]
self.date = data[12]
self.country = data[13]
self.discover = data[14]
self.etymology = data[15]
self.atomic_mass = self._unit(data[16])
self.atomic_volume = self._unit(data[17])
self.atomic_radius = self._unit(data[18])
self.covalent_radius = self._unit(data[19])
self.vanderWaals_radius = self._unit(data[20])
self.ionic_radii = data[21]
self.lattice_type = data[22]
self.space_group = data[23]
self.lattice_edges = eval(data[24])
self.lattice_volume = self.lattice_edges[0]*self.lattice_edges[1] * \
self.lattice_edges[2] / 1e9
self.lattice_angles = eval(data[25])
self.electron_configuration = data[26]
self.oxidation = data[27]
self.electronegativity = self._unit(data[28])
self.electron_affinity = self._unit(data[29])
self.first_ionization = self._unit(data[30])
self.Tf = self._unit(data[31])
self.Tb = self._unit(data[32])
if not self.Tf or not self.Tb:
self.phase = ""
elif self.Tf > 273.15:
self.phase = "Solid"
elif self.Tb < 273.15:
self.phase = "Gas"
else:
self.phase = "Liquid"
self.Heat_f = self._unit(data[33])
self.Heat_b = self._unit(data[34])
self.Cp = self._unit(data[35])
self.k = self._unit(data[36])
self.T_debye = self._unit(data[37])
self.color = data[38]
self.notes = data[39]
# Translation
self.name = data[1]
if tr_available:
qu = "SELECT name_%s FROM TRANSLATION WHERE id==%i" % (locale, id)
databank.execute(qu)
tr_name = databank.fetchone()[0]
if tr_name:
self.name = tr_name
# Isotopes
query = "SELECT * FROM ISOTOPES WHERE atomic_number==?" + \
"ORDER BY mass_number"
databank.execute(query, (self.id, ))
self.isotopes = []
for data in databank:
self.isotopes.append((int(data[4]), data[2], data[3]))
def _unit(self, str):
aproximate = False
try:
value = float(str)
except:
if not str:
value = None
elif str[-1] == ")":
value = float(str.split("(")[1].split(",")[0])
aproximate = True
if aproximate:
value.code = "stimated"
return value
| jjgomera/pychemqt | lib/elemental.py | Python | gpl-3.0 | 7,567 |
import psycopg2
from db.enums import *
base = psycopg2.connect("dbname='cardkeepersample' user='andrew' host='localhost' password='1234'")
cursor = base.cursor()
# Wrapped queries in alphabetic order
def active_packs(user_id, start=0, count=10):
query = """SELECT packs.pack_id, packs.name FROM user_packs, packs WHERE packs.pack_id = user_packs.pack_id
AND user_packs.status = %s AND user_id = %s ORDER BY pack_id
OFFSET %s LIMIT %s;"""
cursor.execute(query, (CardStatusType.ACTIVE.value, user_id, start, count))
return cursor.fetchall()
def add_pack(user_id, pack_id):
query = """INSERT INTO user_packs (pack_id, user_id, status) VALUES (%s, %s, 'Active');"""
cursor.execute(query, (pack_id, user_id))
query = """SELECT card_id FROM cards WHERE cards.pack_id = %s"""
cursor.execute(query, (pack_id,))
cards = cursor.fetchall()
for i in cards:
query = """INSERT INTO user_cards (user_id, card_id, times_reviewed, correct_answers, status) VALUES (%s, %s, 0, 0, 'Active');"""
cursor.execute(query, (user_id, i[0]))
base.commit()
def add_user(user):
query = """INSERT INTO users (user_id, name, general_goal, weekly_goal, notifications_learn, notifications_stats, joined)
VALUES (%s, %s, %s, %s, %s, %s, current_date);"""
cursor.execute(query, tuple(user))
base.commit()
def available_packs(user_id):
query = """SELECT packs.pack_id, packs.name FROM packs
WHERE packs.privacy = 'public' LIMIT 105;"""
cursor.execute(query)
return cursor.fetchall()
def available_groups(user_id, rights=RightsType.USER, include_higher=False):
query = """SELECT groups.group_id, groups.name FROM groups, user_groups
WHERE groups.group_id = user_groups.group_id
AND user_groups.user_id = %s
AND user_groups.rights """ + ("<" if include_higher else "") + "= %s;"""
cursor.execute(query, (user_id, rights))
return cursor.fetchall()
def delete_pack(pack_id):
owner_id = get_pack(pack_id)['owner_id']
cursor.execute('''
DELETE FROM user_cards
USING cards
WHERE
user_cards.card_id = cards.card_id AND
cards.pack_id = %s;
''', (pack_id,))
cursor.execute(
'DELETE FROM cards WHERE pack_id = %s;',
(pack_id,)
)
cursor.execute(
'DELETE FROM user_packs WHERE pack_id = %s;',
(pack_id,)
)
cursor.execute(
'DELETE FROM packs WHERE pack_id = %s;',
(pack_id,)
)
base.commit()
def get_all_cards_in_pack(pack_id):
cursor.execute('''
SELECT card_id, front, back, comment, type
FROM cards
WHERE pack_id = %s;
''', (pack_id,))
return [{'card_id': card_id, 'front': front, 'back': back,
'comment': comment, 'type': tp}
for card_id, front, back, comment, tp
in cursor.fetchall()]
def get_pack(pack_id, user_id=None):
cursor.execute(
'SELECT name, owner_id, privacy FROM packs WHERE pack_id = %s;',
(pack_id,)
)
name, owner_id, privacy = cursor.fetchone()
status = None
if user_id is not None:
cursor.execute('''
SELECT status FROM user_packs
WHERE user_id = %s AND pack_id = %s;
''', (user_id, pack_id))
status = cursor.fetchone()[0]
return {
'pack_id': pack_id,
'name': name,
'owner_id': owner_id,
'privacy': privacy,
'status': status
}
def if_added(user_id, pack_id):
query = "SELECT * FROM user_packs WHERE user_id = %s AND pack_id = %s;"
cursor.execute(query, (user_id, pack_id))
return list(cursor.fetchall())
# TODO: Take permissions lists into account
def has_pack_read_access(pack_id, user_id):
pack_info = get_pack(pack_id)
return user_id == pack_info['owner_id'] or pack_info['privacy'] == 'public'
def if_registered(user_id):
query = "SELECT * FROM users WHERE users.user_id = %s;"
cursor.execute(query, (user_id,))
return True if len(cursor.fetchall()) else False
def cards_for_learning(user_id):
query = """SELECT cards.front, cards.back, cards.comment FROM user_cards, cards
WHERE user_cards.card_id = cards.card_id AND
user_id = %s AND cards.type = %s"""
cursor.execute(query, (user_id, CardType.SHORT))
return cursor.fetchall()
def new_card(front, back):
query = "INSERT INTO cards (front, back) VALUES (%s, %s);"
cursor.execute(query, (front, back))
base.commit()
def new_group(name, owner, privacy="public"):
query = "INSERT INTO groups (name, privacy, owner_id) VALUES (%s, %s, %s);"
cursor.execute(query, (name, privacy, owner))
base.commit()
def new_pack(name, owner, privacy=PrivacyType.PUBLIC, status=CardStatusType.ACTIVE, cards=[]):
if isinstance(privacy, PrivacyType):
privacy = privacy.value
if isinstance(status, CardStatusType):
status = status.value
query = "INSERT INTO packs (name, owner_id, privacy) VALUES (%s, %s, %s);"
cursor.execute(query, (name, owner, privacy))
query = "SELECT pack_id FROM packs WHERE name = %s AND owner_id = %s;"
cursor.execute(query, (name, owner))
pack_id = cursor.fetchone()[0]
query = "INSERT INTO user_packs (user_id, pack_id, status) VALUES (%s, %s, %s);"
cursor.execute(query, (owner, pack_id, status))
insert_query = "INSERT INTO cards (pack_id, front, back, comment, type) VALUES (%s, %s, %s, %s, %s) RETURNING card_id;"
insert2_query = "INSERT INTO user_cards (user_id, card_id, times_reviewed, correct_answers, status)" \
"VALUES (%s, %s, 0, 0, 'Active');"
for card in cards:
front = card['front']
back = card['back']
comment = card['comment']
cursor.execute(insert_query, (pack_id, front, back, comment, CardType.SHORT.value))
card_id = cursor.fetchone()[0]
cursor.execute(insert2_query, (owner, card_id))
base.commit()
return pack_id
def select_cards(user_id, pack_id):
print(user_id, pack_id)
query = """SELECT cards.card_id, cards.front, cards.back, cards.comment
FROM cards, user_cards
WHERE cards.card_id = user_cards.card_id
AND user_cards.status = %s
AND cards.pack_id = %s
AND user_cards.user_id = %s"""
cursor.execute(query, (CardStatusType.ACTIVE.value, pack_id, user_id))
return cursor.fetchall()
def update_card_data(user_id, card_id, answer):
query = """UPDATE user_cards SET times_reviewed = times_reviewed+1, correct_answers = correct_answers+%s
WHERE user_id = %s AND card_id = %s"""
cursor.execute(query, (answer, user_id, card_id))
base.commit()
def update_card_status(user_id, card_id, status):
query = """UPDATE user_cards SET status = %s
WHERE user_id = %s AND card_id = %s"""
cursor.execute(query, (status, user_id, card_id))
base.commit()
def update_pack_name(pack_id, new_name):
query = 'UPDATE packs SET name = %s WHERE pack_id = %s;'
cursor.execute(query, (new_name, pack_id))
base.commit()
def update_pack_privacy(pack_id, new_privacy):
if isinstance(new_privacy, PrivacyType):
new_privacy = new_privacy.value
query = 'UPDATE packs SET privacy = %s WHERE pack_id = %s;'
cursor.execute(query, (new_privacy, pack_id))
base.commit()
def update_pack_status(user_id, pack_id, status):
query = """UPDATE user_cards SET status = %s
WHERE user_id = %s AND card_id = %s"""
cursor.execute(query, (status, user_id, pack_id))
base.commit()
| andrewgolman/Learning_Cards | bot/db/queries.py | Python | gpl-3.0 | 7,726 |
import shesha.config as conf
simul_name = "bench_scao_sh_16x16_8pix"
# loop
p_loop = conf.Param_loop()
p_loop.set_niter(100)
p_loop.set_ittime(0.002) # =1/500
# geom
p_geom = conf.Param_geom()
p_geom.set_zenithangle(0.)
# tel
p_tel = conf.Param_tel()
p_tel.set_diam(4.0)
p_tel.set_cobs(0.12)
# atmos
p_atmos = conf.Param_atmos()
p_atmos.set_r0(0.16)
p_atmos.set_nscreens(1)
p_atmos.set_frac([1.0])
p_atmos.set_alt([0.0])
p_atmos.set_windspeed([20.0])
p_atmos.set_winddir([45.])
p_atmos.set_L0([1.e5])
# target
p_target = conf.Param_target()
p_targets = [p_target]
# p_target.set_ntargets(1)
p_target.set_xpos(0.)
p_target.set_ypos(0.)
p_target.set_Lambda(1.65)
p_target.set_mag(10.)
# wfs
p_wfs0 = conf.Param_wfs()
p_wfss = [p_wfs0]
p_wfs0.set_type("sh")
p_wfs0.set_nxsub(8)
p_wfs0.set_npix(8)
p_wfs0.set_pixsize(0.3)
p_wfs0.set_fracsub(0.8)
p_wfs0.set_xpos(0.)
p_wfs0.set_ypos(0.)
p_wfs0.set_Lambda(0.5)
p_wfs0.set_gsmag(8.)
p_wfs0.set_optthroughput(0.5)
p_wfs0.set_zerop(1.e11)
p_wfs0.set_noise(3.)
p_wfs0.set_atmos_seen(1)
# lgs parameters
# p_wfs0.set_gsalt(90*1.e3)
# p_wfs0.set_lltx(0)
# p_wfs0.set_llty(0)
# p_wfs0.set_laserpower(10)
# p_wfs0.set_lgsreturnperwatt(1.e3)
# p_wfs0.set_proftype("Exp")
# p_wfs0.set_beamsize(0.8)
# dm
p_dm0 = conf.Param_dm()
p_dm1 = conf.Param_dm()
p_dms = [p_dm0, p_dm1]
p_dm0.set_type("pzt")
nact = p_wfs0.nxsub + 1
p_dm0.set_nact(nact)
p_dm0.set_alt(0.)
p_dm0.set_thresh(0.3)
p_dm0.set_coupling(0.2)
p_dm0.set_unitpervolt(0.01)
p_dm0.set_push4imat(100.)
p_dm1.set_type("tt")
p_dm1.set_alt(0.)
p_dm1.set_unitpervolt(0.0005)
p_dm1.set_push4imat(10.)
# centroiders
p_centroider0 = conf.Param_centroider()
p_centroiders = [p_centroider0]
p_centroider0.set_nwfs(0)
p_centroider0.set_type("cog")
# p_centroider0.set_type("corr")
# p_centroider0.set_type_fct("model")
# controllers
p_controller0 = conf.Param_controller()
p_controllers = [p_controller0]
p_controller0.set_type("ls")
p_controller0.set_nwfs([0])
p_controller0.set_ndm([0, 1])
p_controller0.set_maxcond(1500.)
p_controller0.set_delay(1.)
p_controller0.set_gain(0.4)
p_controller0.set_modopti(0)
p_controller0.set_nrec(2048)
p_controller0.set_nmodes(216)
p_controller0.set_gmin(0.001)
p_controller0.set_gmax(0.5)
p_controller0.set_ngain(500)
| ANR-COMPASS/shesha | data/par/par4tests/test_sh_base.py | Python | gpl-3.0 | 2,260 |
# -*- coding: utf-8 -*-
"""
Setup script for the WDmodel package
"""
import sys
import os
import re
import glob
from setuptools import find_packages, setup
dep_file = 'dependencies_py36.txt'
with open(dep_file,'r') as f:
required = f.read().splitlines()
dir_path = os.path.dirname(os.path.realpath(__file__))
init_string = open(os.path.join(dir_path, 'WDmodel', '__init__.py')).read()
VERS = r"^__version__\s+=\s+[\'\"]([0-9\.]*)[\'\"]$"
mo = re.search(VERS, init_string, re.M)
__version__ = mo.group(1)
AUTH = r"^__author__\s+=\s+[\'\"]([A-za-z\s]*)[\'\"]$"
mo = re.search(AUTH, init_string, re.M)
__author__ = mo.group(1)
LICE = r"^__license__ \s+=\s+[\'\"]([A-za-z\s0-9]*)[\'\"]$"
mo = re.search(LICE, init_string, re.M)
__license__ = mo.group(1)
long_description = open('README.rst').read()
scripts = glob.glob('bin/*')
print(scripts)
setup(
name='WDmodel',
packages=find_packages(),
entry_points={'console_scripts': [
'WDmodel = WDmodel.main:main'
]},
include_package_data=True,
version=__version__, # noqa
description=('Bayesian inference of '
'faint DA white dwarf spectral energy distributions'
'from ground-based spectroscopy and HST photometry'
'to establish faint CALSPEC spectrophotometric standards.'),
scripts = scripts,
license=__license__, # noqa
author=__author__, # noqa
author_email='gsnarayan@gmail.com',
install_requires=required,
url='https://github.com/gnarayan/WDmodel',
keywords=['astronomy', 'fitting', 'monte carlo', 'modeling', 'calibration'],
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Scientific/Engineering :: Physics'
])
| gnarayan/WDmodel | setup.py | Python | gpl-3.0 | 1,976 |
#!/usr/bin/python
# (c) 2019, NetApp Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""AWS Cloud Volumes Services - Manage fileSystem"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: aws_netapp_cvs_FileSystems
short_description: NetApp AWS Cloud Volumes Service Manage FileSystem.
extends_documentation_fragment:
- netapp.awscvs
version_added: '2.9'
author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com>
description:
- Create, Update, Delete fileSystem on AWS Cloud Volumes Service.
options:
state:
description:
- Whether the specified fileSystem should exist or not.
required: true
choices: ['present', 'absent']
type: str
region:
description:
- The region to which the filesystem belongs to.
required: true
type: str
creationToken:
description:
- Name of the filesystem
required: true
type: str
quotaInBytes:
description:
- Size of the filesystem
- Required for create
type: int
serviceLevel:
description:
- Service Level of a filesystem.
choices: ['standard', 'premium', 'extreme']
type: str
exportPolicy:
description:
- The policy rules to export the filesystem
type: dict
suboptions:
rules:
description:
- Set of rules to export the filesystem
- Requires allowedClients, access and protocol
type: list
suboptions:
allowedClients:
description:
- Comma separated list of ip address blocks of the clients to access the fileSystem
- Each address block contains the starting IP address and size for the block
type: str
cifs:
description:
- Enable or disable cifs filesystem
type: bool
nfsv3:
description:
- Enable or disable nfsv3 fileSystem
type: bool
nfsv4:
description:
- Enable or disable nfsv4 filesystem
type: bool
ruleIndex:
description:
- Index number of the rule
type: int
unixReadOnly:
description:
- Should fileSystem have read only permission or not
type: bool
unixReadWrite:
description:
- Should fileSystem have read write permission or not
type: bool
'''
EXAMPLES = """
- name: Create FileSystem
aws_netapp_cvs_FileSystems:
state: present
region: us-east-1
creationToken: newVolume-1
exportPolicy:
rules:
- allowedClients: 172.16.0.4
cifs: False
nfsv3: True
nfsv4: True
ruleIndex: 1
unixReadOnly: True
unixReadWrite: False
quotaInBytes: 100000000000
api_url : cds-aws-bundles.netapp.com
api_key: Q1ZRR0p0VGNuZ3VhMnJBYk5zczM1RkZ3Z0lCbUE3
secret_key : U1FwdHdKSGRQQUhIdkIwMktMU1ZCV2x6WUowZWRD
- name: Update FileSystem
aws_netapp_cvs_FileSystems:
state: present
region: us-east-1
creationToken: newVolume-1
exportPolicy:
rules:
- allowedClients: 172.16.0.4
cifs: False
nfsv3: True
nfsv4: True
ruleIndex: 1
unixReadOnly: True
unixReadWrite: False
quotaInBytes: 200000000000
api_url : cds-aws-bundles.netapp.com
api_key: Q1ZRR0p0VGNuZ3VhMnJBYk5zczM1RkZ3Z0lCbUE3
secret_key : U1FwdHdKSGRQQUhIdkIwMktMU1ZCV2x6WUowZWRD
- name: Delete FileSystem
aws_netapp_cvs_FileSystems:
state: present
region: us-east-1
creationToken: newVolume-1
quotaInBytes: 100000000000
api_url : cds-aws-bundles.netapp.com
api_key: Q1ZRR0p0VGNuZ3VhMnJBYk5zczM1RkZ3Z0lCbUE3
secret_key : U1FwdHdKSGRQQUhIdkIwMktMU1ZCV2x6WUowZWRD
"""
RETURN = """
"""
import ansible.module_utils.netapp as netapp_utils
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netapp_module import NetAppModule
from ansible.module_utils.netapp import AwsCvsRestAPI
class AwsCvsNetappFileSystem(object):
"""
Contains methods to parse arguments,
derive details of AWS_CVS objects
and send requests to AWS CVS via
the restApi
"""
def __init__(self):
"""
Parse arguments, setup state variables,
check paramenters and ensure request module is installed
"""
self.argument_spec = netapp_utils.aws_cvs_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
region=dict(required=True, type='str'),
creationToken=dict(required=True, type='str'),
quotaInBytes=dict(required=False, type='int'),
serviceLevel=dict(required=False, choices=['standard', 'premium', 'extreme']),
exportPolicy=dict(
type='dict',
options=dict(
rules=dict(
type='list',
options=dict(
allowedClients=dict(required=False, type='str'),
cifs=dict(required=False, type='bool'),
nfsv3=dict(required=False, type='bool'),
nfsv4=dict(required=False, type='bool'),
ruleIndex=dict(required=False, type='int'),
unixReadOnly=dict(required=False, type='bool'),
unixReadWrite=dict(required=False, type='bool')
)
)
)
),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
required_if=[
('state', 'present', ['region', 'creationToken', 'quotaInBytes']),
],
supports_check_mode=True
)
self.na_helper = NetAppModule()
# set up state variables
self.parameters = self.na_helper.set_parameters(self.module.params)
# Calling generic AWSCVS restApi class
self.restApi = AwsCvsRestAPI(self.module)
self.data = {}
for key in self.parameters.keys():
self.data[key] = self.parameters[key]
def get_filesystemId(self):
# Check given FileSystem is exists
# Return fileSystemId is found, None otherwise
list_filesystem, error = self.restApi.get('FileSystems')
if error:
self.module.fail_json(msg=error)
for FileSystem in list_filesystem:
if FileSystem['creationToken'] == self.parameters['creationToken']:
return FileSystem['fileSystemId']
return None
def get_filesystem(self, fileSystemId):
# Get FileSystem information by fileSystemId
# Return fileSystem Information
filesystemInfo, error = self.restApi.get('FileSystems/%s' % fileSystemId)
if error:
self.module.fail_json(msg=error)
else:
return filesystemInfo
return None
def is_job_done(self, response):
# check jobId is present and equal to 'done'
# return True on success, False otherwise
try:
job_id = response['jobs'][0]['jobId']
except TypeError:
job_id = None
if job_id is not None and self.restApi.get_state(job_id) == 'done':
return True
return False
def create_fileSystem(self):
# Create fileSystem
api = 'FileSystems'
response, error = self.restApi.post(api, self.data)
if not error:
if self.is_job_done(response):
return
error = "Error: unexpected response on FileSystems create: %s" % str(response)
self.module.fail_json(msg=error)
def delete_fileSystem(self, fileSystemId):
# Delete FileSystem
api = 'FileSystems/' + fileSystemId
self.data = None
response, error = self.restApi.delete(api, self.data)
if not error:
if self.is_job_done(response):
return
error = "Error: unexpected response on FileSystems delete: %s" % str(response)
self.module.fail_json(msg=error)
def update_fileSystem(self, fileSystemId):
# Update FileSystem
api = 'FileSystems/' + fileSystemId
response, error = self.restApi.put(api, self.data)
if not error:
if self.is_job_done(response):
return
error = "Error: unexpected response on FileSystems update: %s" % str(response)
self.module.fail_json(msg=error)
def apply(self):
"""
Perform pre-checks, call functions and exit
"""
fileSystem = None
fileSystemId = self.get_filesystemId()
if fileSystemId:
# Getting the FileSystem details
fileSystem = self.get_filesystem(fileSystemId)
cd_action = self.na_helper.get_cd_action(fileSystem, self.parameters)
if cd_action is None and self.parameters['state'] == 'present':
# Check if we need to update the fileSystem
update_fileSystem = False
if fileSystem['quotaInBytes'] is not None and 'quotaInBytes' in self.parameters \
and fileSystem['quotaInBytes'] != self.parameters['quotaInBytes']:
update_fileSystem = True
elif fileSystem['creationToken'] is not None and 'creationToken' in self.parameters \
and fileSystem['creationToken'] != self.parameters['creationToken']:
update_fileSystem = True
elif fileSystem['serviceLevel'] is not None and 'serviceLevel' in self.parameters \
and fileSystem['serviceLevel'] != self.parameters['serviceLevel']:
update_fileSystem = True
elif fileSystem['exportPolicy']['rules'] is not None and 'exportPolicy' in self.parameters:
for rule_org in fileSystem['exportPolicy']['rules']:
for rule in self.parameters['exportPolicy']['rules']:
if rule_org['allowedClients'] != rule['allowedClients']:
update_fileSystem = True
elif rule_org['unixReadOnly'] != rule['unixReadOnly']:
update_fileSystem = True
elif rule_org['unixReadWrite'] != rule['unixReadWrite']:
update_fileSystem = True
if update_fileSystem:
self.na_helper.changed = True
result_message = ""
if self.na_helper.changed:
if self.module.check_mode:
# Skip changes
result_message = "Check mode, skipping changes"
else:
if cd_action == "create":
self.create_fileSystem()
result_message = "FileSystem Created"
elif cd_action == "delete":
self.delete_fileSystem(fileSystemId)
result_message = "FileSystem Deleted"
else: # modify
self.update_fileSystem(fileSystemId)
result_message = "FileSystem Updated"
self.module.exit_json(changed=self.na_helper.changed, msg=result_message)
def main():
"""
Main function
"""
aws_cvs_netapp_filesystem = AwsCvsNetappFileSystem()
aws_cvs_netapp_filesystem.apply()
if __name__ == '__main__':
main()
| hyperized/ansible | lib/ansible/modules/cloud/amazon/aws_netapp_cvs_FileSystems.py | Python | gpl-3.0 | 12,011 |
# service.py is part of Panopticon.
# Panopticon is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Panopticon is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Panopticon. If not, see <http://www.gnu.org/licenses/>.
from panopticon.core.base import ServiceAttribute
from panopticon.core.actions.base import (ManagerDependantActionLauncher,
DependantAction, DecoratorAction, ActionManager)
class ServiceAction(ServiceAttribute, DependantAction):
_excluded_values_names = ["manager"]
def __init__(self, name=None, service=None, launcher=None):
super(ServiceAction, self).__init__(name=name, service=service)
DependantAction.__init__(self, launcher=launcher)
def check_required(self, action):
return action.service in self.service.required_services
def set_running_env(self, running_env):
running_env.action = self
running_env.service = self.service
def __repr__(self):
fields = []
if self.service is not None:
fields.append("service:'%s'" % self.service)
if self.name is not None:
fields.append("name:'%s'" % self.name)
return "<%s %s>" % (self.__class__.__name__, " ".join(fields))
def __str__(self):
if self.service is None:
return self.name
else:
return ".".join((str(self.service), self.name))
class ServiceDecoratorAction(DecoratorAction, ServiceAction):
def __init__(self, function, name=None, service=None, launcher=None):
super(ServiceDecoratorAction, self).__init__(function)
ServiceAction.__init__(self, name=name, service=service,
launcher=launcher)
service_action = ServiceDecoratorAction
class ServiceActionLauncher(ManagerDependantActionLauncher, ServiceAction):
def __init__(self, name=None, service=None, launcher=None):
super(ServiceActionLauncher, self).__init__(name, service.roles)
ServiceAction.__init__(self, name=name, service=service,
launcher=launcher)
def launch(self, *args, **kwargs):
super(ServiceActionLauncher, self).launch(*args, **kwargs)
class ServiceActionManager(ActionManager):
action_launcher_class = ServiceActionLauncher
_managed_obj_name = "service"
_manager_attribute_class = ServiceAction
def _get_base_dict(self):
service_action_class = self.action_launcher_class
actions = {}
defined_action_names = []
for aname, action in self.service._meta["actions"]:
defined_action_names.append(aname)
actions[aname] = action
for rname, role in self.service.roles:
for raname, action in role.actions:
if not raname in defined_action_names:
new_action = service_action_class(name=raname,
service=self.service)
actions[raname] = new_action
defined_action_names.append(raname)
return actions
| llou/panopticon | panopticon/core/actions/service.py | Python | gpl-3.0 | 3,447 |
#############################################################################
##
## Copyright (c) 2011 Riverbank Computing Limited <info@riverbankcomputing.com>
##
## This file is part of PyQt.
##
## This file may be used under the terms of the GNU General Public
## License versions 2.0 or 3.0 as published by the Free Software
## Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
## included in the packaging of this file. Alternatively you may (at
## your option) use any later version of the GNU General Public
## License if such license has been publicly approved by Riverbank
## Computing Limited (or its successors, if any) and the KDE Free Qt
## Foundation. In addition, as a special exception, Riverbank gives you
## certain additional rights. These rights are described in the Riverbank
## GPL Exception version 1.1, which can be found in the file
## GPL_EXCEPTION.txt in this package.
##
## Please review the following information to ensure GNU General
## Public Licensing requirements will be met:
## http://trolltech.com/products/qt/licenses/licensing/opensource/. If
## you are unsure which license is appropriate for your use, please
## review the following information:
## http://trolltech.com/products/qt/licenses/licensing/licensingoverview
## or contact the sales department at sales@riverbankcomputing.com.
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
#############################################################################
# If pluginType is MODULE, the plugin loader will call moduleInformation. The
# variable MODULE is inserted into the local namespace by the plugin loader.
pluginType = MODULE
# moduleInformation() must return a tuple (module, widget_list). If "module"
# is "A" and any widget from this module is used, the code generator will write
# "import A". If "module" is "A[.B].C", the code generator will write
# "from A[.B] import C". Each entry in "widget_list" must be unique.
def moduleInformation():
return "PyQt4.Qsci", ("QsciScintilla", )
| pyqt/maya2012-qt4 | PyQt4/uic/widget-plugins/qscintilla.py | Python | gpl-3.0 | 2,135 |
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport.
required: true
port:
description:
- Specifies the port to use when building the connection to the remote
device.
required: false
default: 22
username:
description:
- User to authenticate the SSH session to the remote device. If the
value is not specified in the task, the value of environment variable
ANSIBLE_NET_USERNAME will be used instead.
required: false
password:
description:
- Password to authenticate the SSH session to the remote device. If the
value is not specified in the task, the value of environment variable
ANSIBLE_NET_PASSWORD will be used instead.
required: false
default: null
ssh_keyfile:
description:
- Path to an ssh key used to authenticate the SSH session to the remote
device. If the value is not specified in the task, the value of
environment variable ANSIBLE_NET_SSH_KEYFILE will be used instead.
required: false
authorize:
description:
- Instructs the module to enter priviledged mode on the remote device
before sending any commands. If not specified, the device will
attempt to excecute all commands in non-priviledged mode. If the value
is not specified in the task, the value of environment variable
ANSIBLE_NET_AUTHORIZE will be used instead.
required: false
default: no
choices: ['yes', 'no']
auth_pass:
description:
- Specifies the password to use if required to enter privileged mode
on the remote device. If I(authorize) is false, then this argument
does nothing. If the value is not specified in the task, the value of
environment variable ANSIBLE_NET_AUTH_PASS will be used instead.
required: false
default: none
timeout:
description:
- Specifies idle timeout (in seconds) for the connection. Useful if the
console freezes before continuing. For example when saving
configurations.
required: false
default: 10
provider:
description:
- Convenience method that allows all M(dnos9) arguments to be passed as
a dict object. All constraints (required, choices, etc) must be
met either by individual arguments or values in this dict.
required: false
default: null
"""
| wkeeling/ansible | lib/ansible/utils/module_docs_fragments/dnos9.py | Python | gpl-3.0 | 3,305 |
template = """# Generated on {{dt}}
*filter
:INPUT DROP
:FORWARD ACCEPT
:OUTPUT ACCEPT
-A INPUT -i lo -j ACCEPT
-A INPUT -p tcp -m tcp --dport 22 -j ACCEPT
-A INPUT -i eth0 -m state --state RELATED,ESTABLISHED -j ACCEPT
-A INPUT -i eth1 -m state --state RELATED,ESTABLISHED -j ACCEPT
-A INPUT -p icmp -j ACCEPT
{{#rule}}{{#tcprule}}
-A INPUT -s {{source}}/32 -p tcp -m tcp --dport {{dport}} -m state --state NEW,ESTABLISHED -j ACCEPT
{{/tcprule}}{{#allrule}}
-A INPUT -p {{protocol}} -m {{protocol}} --dport {{dport}} -j ACCEPT
{{/allrule}}{{/rule}}
COMMIT
"""
import pystache
import datetime
# securityGroups are a hash of "security groups" and a list of boxes in each
# group
securityGroups = {'Database': ['aerolith-pg'],
'Web': ['aerolith-web'],
'Wordpress': ['AerolithWP'],
'Dev': ['ubuntu-512mb-sfo1-01']
}
# groupRules tell you for each security groups, which security groups
# can connect to it and what ports
# note all of these have port 22 (ssh) open by default (see template above)
groupRules = {'Web': [('all', 80), ('all', 443), ('all', 21), ('all', 20),
('all', '61052:61057'), ('all', 8080)],
'Redis': [('Web', 6379), ('all', 80)],
'Database': [('Web', 5432)],
'Dev': [('all', 80), ('all', 443)]
}
def gen_firewall(securityGroup, servers):
context = {'rule': {'tcprule': [], 'allrule': []},
'dt': str(datetime.datetime.now())}
rule = groupRules[securityGroup]
for subrule in rule:
if subrule[0] == 'all':
port = subrule[1]
context['rule']['allrule'].append({'dport': port,
'protocol': 'tcp'})
else:
for server in servers:
# for each server in the security group in question
# add its private ip to the firewall
if server['name'] in securityGroups[subrule[0]]:
port = subrule[1]
context['rule']['tcprule'].append(
{'source': server['networks']['v4'][0]['ip_address'],
'dport': port
})
res = pystache.render(template, context)
f = open('iptables.' + securityGroup + '.rules', 'wb')
f.write(res)
f.close()
return res
| domino14/Webolith | scripts/gen_firewall.py | Python | gpl-3.0 | 2,398 |
import os
import sys
import time
import numpy
import logging
from stoqs import models as m
from django.conf import settings
from django.db.models import Avg
from django.http import HttpResponse, HttpResponseBadRequest
import pprint
logger = logging.getLogger(__name__)
class InvalidLimits(Exception):
pass
def readCLT(fileName):
'''
Read the color lookup table from disk and return a python list of rgb tuples.
'''
cltList = []
for rgb in open(fileName, 'r'):
##logger.debug("rgb = %s", rgb)
(r, g, b) = rgb.strip().split()
cltList.append([float(r), float(g), float(b)])
return cltList
class KML(object):
'''
Manage the construcion of KML files from stoqs. Several options may be set on initialization and
clients can get KML output with the kmlResponse() method.
'''
def __init__(self, request, qs_mp, qparams, stoqs_object_name, **kwargs):
'''
Possible kwargs and their default values:
@withTimeStamps: True
@withLineStrings: True
'''
self.request = request
self.qs_mp = qs_mp
self.qparams = qparams
self.stoqs_object_name = stoqs_object_name
##logger.debug('request = %s', request)
##logger.debug('kwargs = %s', kwargs)
##logger.debug('qparams = %s', qparams)
if 'withTimeStamps' in kwargs:
self.withTimeStampsFlag = kwargs['withTimeStamps']
else:
self.withTimeStampsFlag = True
if 'withLineStrings' in kwargs:
self.withLineStringsFlag = kwargs['withLineStrings']
else:
self.withLineStringsFlag = True
if 'withFullIconURL' in kwargs:
self.withFullIconURLFlag = kwargs['withFullIconURL']
else:
self.withFullIconURLFlag = True
if 'stride' in kwargs:
# If passed in as an argument
self.stride = kwargs['stride']
else:
# Check if in request, otherwise set it to 1
self.stride = int(self.request.GET.get('stride', 1))
def kmlResponse(self):
'''
Return a response that is a KML represenation of the existing MeasuredParameter query that is in self.qs_mp.
pName is either the parameter__name or parameter__standard_name string. Use @stride to return a subset of data.
'''
response = HttpResponse()
if self.qs_mp is None:
raise Exception('self.qs_mp is None.')
# If both selected parameter__name takes priority over parameter__standard_name. If parameter__id supplied that takes overall precedence.
pName = None
if 'parameter__standard_name' in self.qparams:
pName = self.qparams['parameter__standard_name']
if 'parameter__name' in self.qparams:
pName = self.qparams['parameter__name']
if 'parameter__id' in self.qparams:
logger.debug('parameter__id = %s', self.qparams['parameter__id'])
pName = m.Parameter.objects.using(self.request.META['dbAlias']).get(id=int(self.qparams['parameter__id'])).name
logger.debug('pName = %s', pName)
if not pName:
raise NoDataForKML('parameter__name, parameter__standard_name, nor parameter__id specified')
logger.debug('type(self.qs_mp) = %s', type(self.qs_mp))
logger.debug('self.stride = %d', self.stride)
logger.debug('self.stoqs_object_name = %s', self.stoqs_object_name)
if self.stoqs_object_name == 'measured_parameter':
try:
# Expect the query set self.qs_mp to be a collection of value lists
data = [(mp['measurement__instantpoint__timevalue'], mp['measurement__geom'].x, mp['measurement__geom'].y,
mp['measurement__depth'], mp['parameter__name'], mp['datavalue'], mp['measurement__instantpoint__activity__platform__name'])
for mp in self.qs_mp[::self.stride]]
except TypeError:
# Otherwise expect self.qs_mp to be a collection of model instances
data = [(mp.measurement.instantpoint.timevalue, mp.measurement.geom.x, mp.measurement.geom.y,
mp.measurement.depth, mp.parameter.name, mp.datavalue, mp.measurement.instantpoint.activity.platform.name)
for mp in self.qs_mp[::self.stride]]
try:
folderName = "%s_%.1f_%.1f" % (pName, float(self.qparams['measurement__depth__gte']), float(self.qparams['measurement__depth__lte']))
except KeyError:
folderName = "%s_" % (pName,)
elif self.stoqs_object_name == 'sampled_parameter':
try:
# Expect the query set self.qs_mp to be a collection of value lists
data = [(mp['sample__instantpoint__timevalue'], mp['sample__geom'].x, mp['sample__geom'].y,
mp['sample__depth'], mp['parameter__name'], mp['datavalue'], mp['sample__instantpoint__activity__platform__name'])
for mp in self.qs_mp[::self.stride]]
except TypeError:
# Otherwise expect self.qs_mp to be a collection of model instances
data = [(mp.sample.instantpoint.timevalue, mp.sample.geom.x, mp.sample.geom.y,
mp.sample.depth, mp.parameter.name, mp.datavalue, mp.sample.instantpoint.activity.platform.name)
for mp in self.qs_mp[::self.stride]]
try:
folderName = "%s_%.1f_%.1f" % (pName, float(self.qparams['sample__depth__gte']), float(self.qparams['sample__depth__lte']))
except KeyError:
folderName = "%s_" % (pName,)
dataHash = {}
for d in data:
try:
dataHash[d[6]].append(d)
except KeyError:
dataHash[d[6]] = []
dataHash[d[6]].append(d)
if not dataHash:
logger.exception('No data collected for making KML within the constraints provided')
return response
descr = self.request.get_full_path().replace('&', '&')
logger.debug(descr)
try:
kml = self.makeKML(self.request.META['dbAlias'], dataHash, pName, folderName, descr, self.request.GET.get('cmin', None), self.request.GET.get('cmax', None))
except InvalidLimits as e:
logger.exception(e)
return response
response['Content-Type'] = 'application/vnd.google-earth.kml+xml'
response.write(kml)
return response
def makeKML(self, dbAlias, dataHash, pName, title, desc, cmin=None, cmax=None):
'''
Generate the KML for the point in mpList
cmin and cmax are the color min and max
'''
#
# Define the color lookup table and the color limits from 2.5 and 97.5 percentiles for each variable
#
clt = readCLT(os.path.join(settings.STATICFILES_DIRS[0], 'colormaps', 'jetplus.txt'))
climHash = {}
for p in m.Parameter.objects.using(dbAlias).all().values_list('name'):
pn = p[0]
qs = m.ActivityParameter.objects.using(dbAlias).filter(parameter__name=pn).aggregate(Avg('p025'), Avg('p975'))
climHash[pn] = (qs['p025__avg'], qs['p975__avg'],)
##logger.debug('Color lookup min, max values:\n' + pprint.pformat(climHash))
pointKMLHash = {}
lineKMLHash = {}
if cmin and cmax:
try:
clim = (float(cmin), float(cmax),)
except ValueError:
raise InvalidLimits('Cannot make KML with specified cmin, cmax of %s, %s' % (cmin, cmax))
else:
try:
clim = climHash[pName]
except KeyError as e:
logger.warn('Parameter "%s" not in Parameter table in database %s' % (pName, dbAlias))
logger.warn('Setting clim to (-1, 1)')
clim = (-1, 1)
##logger.debug('clim = %s', clim)
for k in dataHash.keys():
(pointStyleKML, pointKMLHash[k]) = self._buildKMLpoints(k, dataHash[k], clt, clim)
if self.withLineStringsFlag:
(lineStyleKML, lineKMLHash[k]) = self._buildKMLlines(k, dataHash[k], clt, clim)
else:
logger.debug('Not drawing LineStrings for platform = %s', k)
#
# KML header
#
kml = '''<?xml version="1.0" encoding="UTF-8"?>
<kml xmlns="http://www.opengis.net/kml/2.2" xmlns:gx="http://www.google.com/kml/ext/2.2" xmlns:kml="http://www.opengis.net/kml/2.2" xmlns:atom="http://www.w3.org/2005/Atom">
<!-- %s -->
<!-- Mike McCann MBARI 28 October 2010 -->
<Document>
<name>%s</name>
<description>%s</description>
''' % ('Automatically generated by STOQS', title, desc)
kml += pointStyleKML
if self.withLineStringsFlag:
kml += lineStyleKML
#
# See that the platforms are alphabetized in the KML. (The point and line KMLHashes will have the same keys.)
#
platList = pointKMLHash.keys()
platList.sort()
for plat in platList:
kml += '''<Folder>
<name>%s Points</name>
%s
</Folder>''' % (plat, pointKMLHash[plat])
if self.withLineStringsFlag:
kml += '''<Folder>
<name>%s Lines</name>
%s
</Folder>''' % (plat, lineKMLHash[plat])
#
# Footer
#
kml += '''</Document>
</kml>'''
return kml
def _buildKMLlines(self, plat, data, clt, clim):
'''
Build KML placemark LineStrings of all the point data in `list`
Use distinctive line colors for each platform.
the same way as is done in the auvctd dorado science data processing.
`data` are the results of a query, say from xySlice()
`clt` is a Color Lookup Table equivalent to a jetplus clt as used in Matlab
`clim` is a 2 element list equivalent to clim in Matlab
Return strings of style and point KML that can be included in a master KML file.
'''
styleKml = '''
<Style id="Tethys">
<LineStyle>
<color>ff0055ff</color>
<width>2</width>
</LineStyle>
</Style>
<Style id="Gulper_AUV">
<LineStyle>
<color>ff00ffff</color>
<width>2</width>
</LineStyle>
</Style>
<Style id="John Martin">
<LineStyle>
<color>ffffffff</color>
<width>1</width>
</LineStyle>
</Style>
'''
#
# Build the LineString for the points
#
lineKml = ''
lastCoordStr = ''
for row in data:
(dt, lon, lat, depth, parm, datavalue, platform) = row
if lat < -90 or lat > 90:
# HACK warning: Fix any accidentally swapped lat & lons
foo = lon
lon = lat
lat = foo
coordStr = "%.6f,%.6f,-%.1f" % (lon, lat, depth)
if lastCoordStr:
if self.withTimeStampsFlag:
placemark = """
<Placemark>
<TimeStamp>
<when>%s</when>
</TimeStamp>
<LineString>
<altitudeMode>absolute</altitudeMode>
<coordinates>
%s
</coordinates>
</LineString>
</Placemark> """ % (time.strftime("%Y-%m-%dT%H:%M:%SZ", dt.timetuple()), lastCoordStr + ' ' + coordStr)
else:
placemark = """
<Placemark>
<LineString>
<altitudeMode>absolute</altitudeMode>
<coordinates>
%s
</coordinates>
</LineString>
</Placemark> """ % (lastCoordStr + ' ' + coordStr)
lineKml += placemark
lastCoordStr = coordStr
return (styleKml, lineKml)
def _buildKMLpoints(self, plat, data, clt, clim):
'''
Build KML Placemarks of all the point data in `list` and use colored styles
the same way as is done in the auvctd dorado science data processing.
`data` are the results of a query, say from xySlice()
`clt` is a Color Lookup Table equivalent to a jetplus clt as used in Matlab
`clim` is a 2 element list equivalent to clim in Matlab
Return strings of style and point KML that can be included in a master KML file.
'''
_debug = False
#
# Build the styles for the colors in clt using clim
#
if self.withFullIconURLFlag:
try:
baseURL = self.request.build_absolute_uri('/')[:-1] + '/' + settings.STATIC_URL
except KeyError:
baseURL = 'http://odss.mbari.org' + '/' + settings.STATIC_URL
else:
baseURL = settings.STATIC_URL
styleKml = ''
for c in clt:
ge_color = "ff%02x%02x%02x" % ((round(c[2] * 255), round(c[1] * 255), round(c[0] * 255)))
if _debug:
logger.debug("c = %s", c)
logger.debug("ge_color = %s", ge_color)
style = '''<Style id="%s">
<IconStyle>
<color>%s</color>
<scale>0.3</scale>
<Icon>
<href>%s.png</href>
</Icon>
</IconStyle>
</Style>
''' % (ge_color, ge_color, os.path.join(baseURL, 'colormaps', 'jetplus_dots', ge_color))
styleKml += style
#
# Build the placemarks for the points
#
pointKml = ''
for row in data:
(dt, lon, lat, depth, parm, datavalue, platform) = row
if lat < -90 or lat > 90:
# HACK Warning: Fix any accidentally swapped lat & lons
foo = lon
lon = lat
lat = foo
coordStr = "%.6f, %.6f,-%.1f" % (lon, lat, depth)
if _debug:
logger.debug("datavalue = %f", float(datavalue))
logger.debug("clim = %s", clim)
try:
clt_index = int(round((float(datavalue) - clim[0]) * ((len(clt) - 1) / float(numpy.diff(clim)))))
except ZeroDivisionError:
raise InvalidLimits('cmin and cmax are the same value')
except ValueError as e:
# Likely: 'cannot convert float NaN to integer' e.g. for altitude outside of terrain coverage
continue
if clt_index < 0:
clt_index = 0;
if clt_index > (len(clt) - 1):
clt_index = int(len(clt) - 1);
if _debug:
logger.debug("clt_index = %d", clt_index)
ge_color_val = "ff%02x%02x%02x" % ((round(clt[clt_index][2] * 255), round(clt[clt_index][1] * 255), round(clt[clt_index][0] * 255)))
if self.withTimeStampsFlag:
placemark = """
<Placemark>
<styleUrl>#%s</styleUrl>
<TimeStamp>
<when>%s</when>
</TimeStamp>
<Point>
<altitudeMode>absolute</altitudeMode>
<coordinates>
%s
</coordinates>
</Point>
</Placemark> """ % (ge_color_val, time.strftime("%Y-%m-%dT%H:%M:%SZ", dt.timetuple()), coordStr)
else:
placemark = """
<Placemark>
<styleUrl>#%s</styleUrl>
<Point>
<altitudeMode>absolute</altitudeMode>
<coordinates>
%s
</coordinates>
</Point>
</Placemark> """ % (ge_color_val, coordStr)
pointKml += placemark
return (styleKml, pointKml)
def _buildKMLlabels(self, plat, data, clt, clim):
'''
Build KML Placemarks of the last point of the data and give it a label
Return strings of style and point KML that can be included in a master KML file.
'''
pass
| josephmfaulkner/stoqs | stoqs/utils/Viz/KML.py | Python | gpl-3.0 | 15,397 |
# vDial-up client
# Copyright (C) 2015 - 2017 Nathaniel Olsen
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from time import sleep
import socket
import libs.vDialupcore as core
from multiprocessing import Process
import sys
import struct
def MD5SUM_mismatch(vNumber_to_connect, sock):
print("*Warning: The server's MD5SUM does not match with the one listed on file, Do you wish to continue? (Y/N)")
if vNumber_to_connect == core.RegServ_vNumber:
MD5SUM_on_file = core.RegServ_MD5SUM
else:
pass # Right now, there is no way to retrieve a server's md5sum until I implement md5sum retriving in RegServ.
print("MD5SUM on file: %s" % (MD5SUM_on_file))
print("MD5SUM according to server: %s" % (received.split()[1]))
print("")
choice = input("Enter choice (Y/N): ")
if choice == 'Y' or choice == 'y':
init(sock, vNumber_to_connect)
if choice == 'N' or choice == 'n':
sys.exit() # Exit for now.
class main():
def send_msg(sock, msg):
# Prefix each message with a 4-byte length (network byte order)
msg = struct.pack('>I', len(msg)) + str.encode(msg)
sock.sendall(msg)
def recv_msg(sock):
# Read message length and unpack it into an integer
raw_msglen = main.recvall(sock, 4)
if not raw_msglen:
return None
msglen = struct.unpack('>I', str.encode(raw_msglen))[0]
return main.recvall(sock, msglen)
def recvall(sock, n):
# Helper function to recv n bytes or return None if EOF is hit
data = ''
while len(data) < n:
packet = (sock.recv(n - len(data)).decode('utf-8'))
if not packet:
return None
data += packet
return data
def servping(sock):
while 1:
sleep(20)
sock.sendall(bytes("SERVPING" + "\n", "utf-8"))
if main.listen_for_data(sock) == "PONG":
break
else:
print("Disconnected: Connection timeout.")
def vdialing(vNumber_to_connect, vNumber_IP):
if core.config['use_ipv6_when_possible']:
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print("vDialing %s..." % (vNumber_to_connect))
if core.config['vDial-up Settings']['vNumber'] == "000000000":
core.dialupnoise()
try:
sock.connect((vNumber_IP, 5000))
except ConnectionRefusedError:
print("Error: Connection Refused.")
sys.exit()
main.send_msg(sock, "INITPING")
if main.recv_msg(sock) == "PONG":
print("Connected.")
#Process(target=main.servping, args=[sock]).start() # The ability to check if a server connection is still alive is coming soon.
main.send_msg(sock, "MD5SUMCHECK")
if main.recv_msg(sock).split()[0] == "MD5SUM:":
if main.recv_msg(sock).split()[1] == core.RegServ_MD5SUM:
print("MD5SUM verification was succeeded.")
else:
MD5SUM_mismatch(vNumber_to_connect, sock)
else:
print("Error: Unable to retrieve MD5SUM.")
main.init(sock, vNumber_to_connect)
else:
print("Error: Server did not properly respond to INITPING, disconnecting.")
else:
Process(target=core.dialupnoise()).start()
sock.connect((vNumber_IP, 5000))
main.send_msg(sock, "INITPING")
if main.recv_msg(sock) == "PONG":
print("Connected to Registation Server!")
main.send_msg(sock, "MD5SUMCHECK")
if main.recv_msg(sock).split()[0] == "MD5SUM:":
if main.recv_msg(sock).split()[1] == core.RegServ_MD5SUM:
print("MD5SUM verification was succeeded.")
else:
MD5SUM_mismatch(vNumber_to_connect, sock)
else:
print("Error: Unable to retrieve MD5SUM.")
else:
print("Error: Server did not properly respond to INITPING, disconnecting.")
def init(sock, vNumber_to_connect):
main.send_msg(sock, "VNUMBER: {}".format(core.config['vDial-up Settings']['vNumber']))
if core.config['vDial-up Settings']['vNumber'] == "000000000":
main.send_msg(sock, "CLIENTREGISTER")
if main.recv_msg(sock).split()[0] == "CONFIG:":
if main.recv_msg(sock).split()[1] == "vNumber":
core.config['vDial-up Settings']['vNumber'] = main.recv_msg(sock).split()[2]
core.saveconfig()
if main.recv_msg(sock).split()[1] == "Key":
core.config['vDial-up Settings']['Key'] = main.recv_msg(sock).split()[2]
core.saveconfig()
if main.recv_msg(sock).split()[0] == "TOCLIENT:":
print(" ".join(main.recv_msg(sock).split()[2:]))
else:
main.send_msg(sock, "KEY: {}".format(core.config['vDial-up Settings']['Key']))
main.send_msg(sock, "INIT")
| vDial-up/client | libs/vDialing.py | Python | gpl-3.0 | 5,879 |
#!/usr/bin/env python
# coding=utf-8
import argparse
import urllib2 # python 2.7
import re
import sqlite3
import traceback
import datetime
import csv
headers = { 'User-Agent' : 'Mozilla/5.0 (compatible; Googlebot/2.1;'
+' +http://www.google.com/bot.html)' }
# http://www.useragentstring.com
parser = argparse.ArgumentParser()
parser.add_argument('cmd', choices=['help', 'leech', 'leechuntil', 'list', 'stats', 'search', 'searchconfig', 'config', 'check'])
parser.add_argument('-d',
help='database name to use (default:database.db)',
default='database.db')
parser.add_argument('params', nargs='*')
args = parser.parse_args()
print 'Commande :', args.cmd
print 'Args :', args
cmd = args.cmd
database = args.d
fgen = u"{0} {1:6}€ {2:3}m² {3} {4:25} {5:35} {6:2}/{7:2}/{8} {9} enligne:{13}\nhttp://www.leboncoin.fr/ventes_immobilieres/{0}.htm"
fdb = u'DB : ' + fgen
fdbs = fdb + u' {11}'
if cmd == 'help':
print 'leech [num]: download of data from page [num] (default:1)'
print 'list'
print 'stats [code postal]'
if cmd == 'list':
conn = sqlite3.connect(database)
c = conn.cursor()
for tmp in c.execute('SELECT * FROM apparts'):
print fdb.format(*tmp)
def leechpage(page, cp):
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute('CREATE TABLE IF NOT EXISTS apparts ( '+
'id text PRIMARY KEY, ' +
'prix int, ' +
'surface int, ' +
'cp int, ville text, ' +
'nom text, ' +
'jour int, mois int, annee int, heure text, ' +
'tel text, ' +
'desc text, ' +
'siren text, ' +
'enligne int)')
req = urllib2.Request(
'http://www.leboncoin.fr/ventes_immobilieres/offres/'
#+ 'provence_alpes_cote_d_azur/bouches_du_rhone/'
+ '?'
+ 'ps=10&pe=14' # de 250k à 350k
+ '&ros=4' # pièces min
+ '&ret=1' # 1:maison, appart= '&ret=2'
#+ '&f=p' # p:particuler c:pro
+ '&location=' + cp
+ '&o=' + str(page), None, headers)
response = urllib2.urlopen(req)
re_id = re.compile('ventes_immobilieres/(?P<id>[0-9]+)\.htm')
m = re_id.finditer(response.read())
for m2 in m:
id = m2.group("id")
url = 'http://www.leboncoin.fr/ventes_immobilieres/' + id + '.htm'
c.execute('SELECT * FROM apparts WHERE id=?', (id,))
tmp = c.fetchone()
if(tmp):
try:
print fdb.format(*tmp)
except Exception:
print tmp
tmp = list(tmp)
print type(tmp[4])
tmp[4] = tmp[4].encode('utf-8')
print type(tmp[4])
print tmp
print fdb.format(*tmp)
continue
try:
response = urllib2.urlopen(url)
except Exception as e:
print 'Error on url', url
print e
continue
rep = response.read().decode('cp1252')
try:
m3 = re.findall('class="price"\>([0-9 ]+).*\<', rep)
try:
prix = int(m3[0].replace(' ', ''))
except:
print rep
m3 = re.findall(
'<th>Surface : </th>\s*<td>([0-9 ]+) m<sup>2</sup>', rep)
surface = int(m3[0].replace(' ', ''))
m3 = re.findall(
'<th>Code postal :</th>\s*<td>([0-9]+)</td>', rep)
cp = m3[0]
m3 = re.findall(
'<th>Ville :</th>\s*<td>([^<]+)</td>', rep)
try:
ville = m3[0]
except IndexError:
ville = ''
m3 = re.findall("'utilisateur_v2','N'\)\">([^<]+)</a>", rep)
nom = m3[0]
m3 = re.findall(' Mise en ligne le (\d+) (.+) à (\d+:\d+).', rep)
jour = m3[0][0]
mois = m3[0][1]
if mois[:4] == 'janv' : mois = 1
elif mois[0] == 'f' : mois = 2
elif mois[:4] == 'mars' : mois = 3
elif mois[:4] == 'avri' : mois = 4
elif mois[:3] == 'mai' : mois = 5
elif mois[:4] == 'juin' : mois = 6
elif mois[:4] == 'juil' : mois = 7
elif mois[0] == 'a' : mois = 8
elif mois[:4] == 'sept' : mois = 9
elif mois[:4] == 'octo' : mois = 10
elif mois[:3] == 'nov' : mois = 11
elif mois[0] == 'd' : mois = 12
else : mois = 0
ddt = datetime.datetime.today()
annee = ddt.year
# si on est en début d'année (ex: 3 jan 2014)
# et que l'annonce est d'un mois de fin d'année (ex: 20 déc)
# on corrige l'année (ex: pour avoir 20 déc 2013)
if ddt.month < 6 and mois > 6:
annee -= 1
heure = m3[0][2]
m3 = re.findall('/pg/0([^\.]+)\.gif" class="AdPhonenum', rep)
try:
tel_raw = m3[0]
except:
tel_raw = ''
#print m3, m3[0]
m3 = re.findall('class="content">(.+?)</div>', rep, re.DOTALL)
try:
desc = m3[0]
except:
desc = ''
m3 = re.findall('Siren : ([0-9]+)', rep)
try:
siren = m3[0]
except:
siren = 0
except Exception as e:
print 'Error on url', url
traceback.print_exc()
#print e
continue
#print ville
f = u'{0:6}€ {1:3}m² {4:4}€/m² {2:5} {3:22} {6:20} {7}/{8}/{9}@{10} {5} {12} {11}'
print f.format(
prix, surface, cp, ville, prix / surface,
url, nom, jour, mois, annee, heure, tel_raw, siren)
print desc
c.execute('INSERT INTO apparts VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', \
(id, prix, surface, cp, ville, nom, jour, mois, annee, heure, \
tel_raw, desc, siren, 1))
#exit(0)
conn.commit()
if cmd == 'leech':
try:
page = int(args.params[0])
except:
page = 1
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT * FROM config")
tmp = c.fetchone()
while(tmp):
print u'{0:10}: {1}'.format(*tmp)
if tmp[0] == 'cp' :
print 'CP:',tmp[1]
cp = tmp[1].split(',')
tmp = c.fetchone()
for cpi in cp:
print 'Leech', cpi, page
leechpage(page, cpi)
if cmd == 'leechuntil':
try:
page = int(args.params[0])
except:
page = 1
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT * FROM config")
tmp = c.fetchone()
while(tmp):
print u'{0:10}: {1}'.format(*tmp)
if tmp[0] == 'cp' :
print 'CP:',tmp[1]
cp = tmp[1].split(',')
tmp = c.fetchone()
for cpi in cp:
for i in range(page, 0, -1):
print 'Leech', cpi, i
leechpage(i,cpi)
if cmd == 'stats':
try:
cp = int(args.params[0])
except:
cp = 0
conn = sqlite3.connect(database)
c = conn.cursor()
prix_m2_cp = {}
prix_m2_cp_pro = {}
if cp:
c.execute('SELECT * FROM apparts WHERE cp=?', (cp,))
else:
c.execute('SELECT * FROM apparts')
tmp = c.fetchone()
while(tmp):
#print fdb.format(*tmp)
try:
prix_m2_cp[tmp[3]]
except:
prix_m2_cp[tmp[3]] = []
try:
prix_m2_cp_pro[tmp[3]]
except:
prix_m2_cp_pro[tmp[3]] = []
if tmp[1]/tmp[2] > 1000 and tmp[1]/tmp[2] < 8000:
if cp:
print '{0} {1:3} {2} {3:22} {4}'.format(
tmp[1], tmp[2],
str(tmp[1]/tmp[2])+'€/m²',
tmp[5].encode('utf8'),
'http://www.leboncoin.fr/ventes_immobilieres/'+tmp[0]+'.htm')
#print tmp[12]
if int(tmp[12]) > 0: #siren
prix_m2_cp_pro[tmp[3]].append(tmp[1]/tmp[2])
else: # pas pro
prix_m2_cp[tmp[3]].append(tmp[1]/tmp[2])
#print tmp[1]/tmp[2], tmp[3]
tmp = c.fetchone()
#print prix_m2_cp
#print prix_m2_cp_pro
cp_ville = {}
with open('data/insee.csv') as inseefile:
inseedata = csv.reader(inseefile, delimiter=';')
for ligne in inseedata:
try:
cp_ville[int(ligne[1])] = ligne[0]
#print int(ligne[1]), ligne[0]
except ValueError:
pass
for k in list(set(prix_m2_cp.keys() + prix_m2_cp_pro.keys())):
numpart = len(prix_m2_cp[k])
if numpart: moypart = sum(prix_m2_cp[k])/numpart
else: moypart = 0
numpro = len(prix_m2_cp_pro[k])
if numpro: moypro = sum(prix_m2_cp_pro[k])/numpro
else: moypro = 0
if moypro: ratio = moypart / float(moypro)
else: ratio = 0
print u'{0:5} part({1:3}): {2:4}€/m² pro({3:3}): {4:4}€/m² {5:3}% {6}'.format( \
k, numpart, moypart, numpro, moypro, int(ratio*100), cp_ville[k])
if cmd == 'test':
with open('data/insee.csv') as inseefile:
inseedata = csv.reader(inseefile, delimiter=';')
for ligne in inseedata:
try:
print int(ligne[1]), ligne[0]
except ValueError:
pass
if cmd == 'search':
try:
s = args.params[0]
except:
raise Exception('Recherche manquante')
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT * FROM apparts WHERE desc LIKE ?", ('%'+s+'%',))
tmp = c.fetchone()
while(tmp):
print fdbs.format(*tmp)
tmp = c.fetchone()
if cmd == 'searchconfig':
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT * FROM config")
tmp = c.fetchone()
while(tmp):
print u'{0:10}: {1}'.format(*tmp)
if tmp[0] == 'cp' :
print 'CP:',tmp[1]
cp = tmp[1].split(',')
if tmp[0] == 'prixmax' :
print 'PrixMax:',tmp[1]
prixmax = tmp[1]
if tmp[0] == 'surfmin' :
print 'SurfMin:',tmp[1]
surfmin = tmp[1]
tmp = c.fetchone()
c.execute("SELECT * FROM apparts ORDER BY annee,mois,jour,cp,heure,id")
tmp = c.fetchone()
while(tmp):
if str(tmp[3]) in cp:
print fdb.format(*tmp)
tmp = c.fetchone()
if cmd == 'config':
if len(args.params) < 1:
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT * FROM config")
print 'Config:'
tmp = c.fetchone()
while(tmp):
print u'{0:10}: {1}'.format(*tmp)
tmp = c.fetchone()
elif len(args.params) != 2:
raise Exception('Attendu: clé valeur')
else:
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute('CREATE TABLE IF NOT EXISTS config (key text PRIMARY KEY, value text)')
try:
c.execute('INSERT INTO config VALUES (?, ?)', (args.params[0], args.params[1]))
except sqlite3.IntegrityError:
c.execute('UPDATE config SET value=? WHERE key=?', (args.params[1], args.params[0]))
conn.commit()
def check_id(id):
url = 'http://www.leboncoin.fr/ventes_immobilieres/' + id + '.htm'
try:
response = urllib2.urlopen(url)
except urllib2.HTTPError as he:
#Usually, 404
print 'HTTP Error on url', url
print he
return False
except Exception as e:
print 'Error on url', url
print e
#rep = response.read().decode('cp1252')
#if rep.find(u'Cette annonce est désactivée') > -1: return False
return True
if cmd == 'check':
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT id FROM apparts WHERE enligne=1")
tmp = c.fetchone()
dead = []
while(tmp):
print tmp
if not check_id(tmp[0]): dead.append(tmp[0])
tmp = c.fetchone()
for d in dead:
print d
c.execute("UPDATE apparts SET enligne=0 WHERE id=?", (d,))
conn.commit()
print 'Fin.'
| ofaurax/LeechCoin | leechcoin.py | Python | gpl-3.0 | 12,394 |
idade = 12
if idade < 4:
preco = 0
elif idade < 18:
preco = 5
elif idade < 65:
preco = 10
else:
preco = 5
print('Seu custo de admissão e R$' + str(preco) + '.')
'''
Foi adicionado mais bloco de instrução elif
para idades abaixo de 65. Se caso a idade for maior
que 18 e menor que 65 o bloco e executado, caso a idade
seja acima de 65 executa o último bloco else.
''' | zirou30/python_student | 82.py | Python | gpl-3.0 | 387 |
from skypebot import *
class Skype_Bot:
"""
This class handles communication with Skype via SkypeBot
"""
def __init__(self, plugins):
self.skype = Skypebot.Skype(Events=self)
self.skype.FriendlyName = "Skype Bot Levitan"
self.skype.Attach()
self.plugins = plugins
def AttachmentStatus(self, status):
if status == Skypebot.apiAttachAvailable:
self.skype.Attach()
def MessageStatus(self, msg, status):
print("INCOMING> %s" % msg.Body)
# msg.MarkAsSeen()
if status == Skypebot.cmsReceived:
for plugin in self.plugins:
r = plugin.plugin_process_request(msg)
if r['status']:
msg.Chat.SendMessage(r['message'])
def send(self, topic, message):
"""
Manual send to CONFERENCES to handle command line interface
:param topic: topic of the conference (it's name)
:param message: thing to say
:return:
"""
for chat in self.skype.Chats:
if chat.Topic == topic:
chat.SendMessage(message)
| zemuvier/Python-courses | skype_bot2.py | Python | gpl-3.0 | 1,129 |
from pylab import *
data = loadtxt('Data/dummy_data.dat')
posterior_sample = atleast_2d(loadtxt('posterior_sample.txt'))
ion()
for i in xrange(0, posterior_sample.shape[0]):
hold(False)
plot(data[:,0], data[:,1], 'bo')
hold(True)
plot(data[:,0], posterior_sample[i, -data.shape[0]:], 'r-')
ylim([0, 1.1*data[:,1].max()])
draw()
ioff()
show()
hist(posterior_sample[:,9], 20)
xlabel('Number of Bursts')
show()
pos = posterior_sample[:, 10:110]
pos = pos[pos != 0.]
hist(pos, 1000)
xlabel('Time')
title('Positions of Bursts')
show()
| jchiang87/TimeBombs | display.py | Python | gpl-3.0 | 548 |
"""Minimal Python 3 shim around PyQt5 and Pyside2 Qt bindings for QML applications.
Forked from https://github.com/mottosso/Qt.py under MIT License.
Copyright (c) 2016 Marcus Ottosson
Changes
* Dropped Python2 and Qt4 support
* Focus on last Python 3 release
* Focus on last Qt API : QML
Requirements
* make use of lazy loading to speed up startup time !
"""
# Fixme: ressource file Patro/QtApplication/rcc/PatroRessource.py
####################################################################################################
# Enable support for `from Qt import *`
__all__ = []
####################################################################################################
import importlib
import logging
import os
import sys
import types
from .QtConfig import _common_members, _misplaced_members, _compatibility_members
####################################################################################################
# _module_logger = logging.getLogger(__name__)
####################################################################################################
# Flags from environment variables
QT_VERBOSE = bool(os.getenv('QT_VERBOSE'))
QT_PREFERRED_BINDING = os.getenv('QT_PREFERRED_BINDING', '')
if QT_PREFERRED_BINDING:
QT_PREFERRED_BINDING = list(x for x in QT_PREFERRED_BINDING.split(',') if x)
else:
# on dec 2018, PySide2 is still not fully operational
QT_PREFERRED_BINDING = ('PyQt5', 'PySide2')
####################################################################################################
def _new_module(name):
return types.ModuleType(__name__ + '.' + name)
####################################################################################################
# Reference to Qt.py
Qt = sys.modules[__name__]
Qt.QtCompat = _new_module('QtCompat')
####################################################################################################
def _log(text):
if QT_VERBOSE:
# _logger print
sys.stdout.write(text + '\n')
####################################################################################################
def _import_sub_module(module, name):
"""import a submodule"""
_log('_import_sub_module {} {}'.format(module, name))
module_name = module.__name__ + '.' + name # e.g. PyQt5.QtCore
module = importlib.import_module(module_name)
return module
####################################################################################################
def _setup(module, extras):
"""Install common submodules"""
Qt.__binding__ = module.__name__
for name in list(_common_members) + extras:
try:
submodule = _import_sub_module(module, name)
except ImportError:
try:
# For extra modules like sip and shiboken that may not be
# children of the binding.
submodule = __import__(name)
except ImportError:
continue
setattr(Qt, '_' + name, submodule)
if name not in extras:
# Store reference to original binding
setattr(Qt, name, _new_module(name)) # Qt.QtCore = module(so module)
####################################################################################################
def _reassign_misplaced_members(binding):
"""Apply misplaced members from `binding` to Qt.py
Arguments:
binding (dict): Misplaced members
"""
for src, dst in _misplaced_members[binding].items():
# print()
dst_value = None
# Fixme: to func
src_parts = src.split('.')
src_module = src_parts[0]
if len(src_parts):
src_member = src_parts[1:]
else:
src_member = None
if isinstance(dst, (list, tuple)):
dst, dst_value = dst
# print(src, '->', dst, dst_value)
# print(src_module, src_member)
dst_parts = dst.split('.')
dst_module = dst_parts[0]
if len(dst_parts):
dst_member = dst_parts[1]
else:
dst_member = None
# print(dst_module, dst_member)
# Get the member we want to store in the namesapce.
if not dst_value:
try:
_part = getattr(Qt, '_' + src_module)
while src_member:
member = src_member.pop(0)
_part = getattr(_part, member)
dst_value = _part
except AttributeError:
# If the member we want to store in the namespace does not
# exist, there is no need to continue. This can happen if a
# request was made to rename a member that didn't exist, for
# example if QtWidgets isn't available on the target platform.
_log('Misplaced member has no source: {0}'.format(src))
continue
# print(dst_value)
try:
# Fixme: src_object ???
src_object = getattr(Qt, dst_module)
except AttributeError:
# print('Failed to get src_object')
if dst_module not in _common_members:
# Only create the Qt parent module if its listed in
# _common_members. Without this check, if you remove QtCore
# from _common_members, the default _misplaced_members will add
# Qt.QtCore so it can add Signal, Slot, etc.
msg = "Not creating missing member module '{m}' for '{c}'"
_log(msg.format(m=dst_module, c=dst_member))
continue
# If the dst is valid but the Qt parent module does not exist
# then go ahead and create a new module to contain the member.
setattr(Qt, dst_module, _new_module(dst_module))
src_object = getattr(Qt, dst_module)
# Enable direct import of the new module
sys.modules[__name__ + '.' + dst_module] = src_object
if not dst_value:
dst_value = getattr(Qt, '_' + src_module)
if src_member:
dst_value = getattr(dst_value, src_member)
setattr(
src_object,
dst_member or dst_module,
dst_value
)
####################################################################################################
def _build_compatibility_members(binding, decorators=None):
"""Apply `binding` to QtCompat
Arguments:
binding (str): Top level binding in _compatibility_members.
decorators (dict, optional): Provides the ability to decorate the
original Qt methods when needed by a binding. This can be used
to change the returned value to a standard value. The key should
be the classname, the value is a dict where the keys are the
target method names, and the values are the decorator functions.
"""
decorators = decorators or dict()
# Allow optional site-level customization of the compatibility members.
# This method does not need to be implemented in QtSiteConfig.
try:
import QtSiteConfig
except ImportError:
pass
else:
if hasattr(QtSiteConfig, 'update_compatibility_decorators'):
QtSiteConfig.update_compatibility_decorators(binding, decorators)
_QtCompat = type('QtCompat', (object,), {})
for classname, bindings in _compatibility_members[binding].items():
attrs = {}
for target, binding in bindings.items():
namespaces = binding.split('.')
try:
src_object = getattr(Qt, '_' + namespaces[0])
except AttributeError as e:
_log('QtCompat: AttributeError: %s' % e)
# Skip reassignment of non-existing members.
# This can happen if a request was made to
# rename a member that didn't exist, for example
# if QtWidgets isn't available on the target platform.
continue
# Walk down any remaining namespace getting the object assuming
# that if the first namespace exists the rest will exist.
for namespace in namespaces[1:]:
src_object = getattr(src_object, namespace)
# decorate the Qt method if a decorator was provided.
if target in decorators.get(classname, []):
# staticmethod must be called on the decorated method to
# prevent a TypeError being raised when the decorated method
# is called.
src_object = staticmethod(
decorators[classname][target](src_object))
attrs[target] = src_object
# Create the QtCompat class and install it into the namespace
compat_class = type(classname, (_QtCompat,), attrs)
setattr(Qt.QtCompat, classname, compat_class)
####################################################################################################
def _pyside2():
"""Initialise PySide2
These functions serve to test the existence of a binding
along with set it up in such a way that it aligns with
the final step; adding members from the original binding
to Qt.py
"""
import PySide2 as module
extras = []
# try:
# from PySide2 import shiboken2
# extras.append('shiboken2')
# except ImportError:
# pass
_setup(module, extras)
Qt.__binding_version__ = module.__version__
# if hasattr(Qt, '_shiboken2'):
# Qt.QtCompat.wrapInstance = _wrapinstance
# Qt.QtCompat.getCppPointer = _getcpppointer
# Qt.QtCompat.delete = shiboken2.delete
if hasattr(Qt, '_QtCore'):
Qt.__qt_version__ = Qt._QtCore.qVersion()
# if hasattr(Qt, '_QtWidgets'):
# Qt.QtCompat.setSectionResizeMode = \
# Qt._QtWidgets.QHeaderView.setSectionResizeMode
_reassign_misplaced_members('PySide2')
# _build_compatibility_members('PySide2')
####################################################################################################
def _pyqt5():
"""Initialise PyQt5"""
import PyQt5 as module
extras = []
# try:
# import sip
# extras.append(sip.__name__)
# except ImportError:
# sip = None
_setup(module, extras)
# if hasattr(Qt, '_sip'):
# Qt.QtCompat.wrapInstance = _wrapinstance
# Qt.QtCompat.getCppPointer = _getcpppointer
# Qt.QtCompat.delete = sip.delete
if hasattr(Qt, '_QtCore'):
Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR
Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR
# if hasattr(Qt, '_QtWidgets'):
# Qt.QtCompat.setSectionResizeMode = \
# Qt._QtWidgets.QHeaderView.setSectionResizeMode
_reassign_misplaced_members('PyQt5')
# _build_compatibility_members('PyQt5')
####################################################################################################
def _install():
# Default order (customise order and content via QT_PREFERRED_BINDING)
order = QT_PREFERRED_BINDING
available = {
'PySide2': _pyside2,
'PyQt5': _pyqt5,
}
_log("Order: {}".format(' '.join(order)))
found_binding = False
for name in order:
_log('Trying %s' % name)
try:
available[name]()
found_binding = True
break
except ImportError as e:
_log('ImportError: %s' % e)
except KeyError:
_log("ImportError: Preferred binding '%s' not found." % name)
if not found_binding:
# If not binding were found, throw this error
raise ImportError('No Qt binding were found.')
# Install individual members
for name, members in _common_members.items():
try:
their_submodule = getattr(Qt, '_' + name)
except AttributeError:
continue
our_submodule = getattr(Qt, name)
# Enable import *
__all__.append(name)
# Enable direct import of submodule,
# e.g. import Qt.QtCore
sys.modules[__name__ + '.' + name] = our_submodule
for member in members:
# Accept that a submodule may miss certain members.
try:
their_member = getattr(their_submodule, member)
except AttributeError:
_log("'%s.%s' was missing." % (name, member))
continue
setattr(our_submodule, member, their_member)
# Enable direct import of QtCompat
sys.modules['Qt.QtCompat'] = Qt.QtCompat
####################################################################################################
_install()
####################################################################################################
# Fixme: Python 3.7
# def __getattr__(name):
# print('__getattr__', name)
####################################################################################################
# Setup Binding Enum states
Qt.IsPySide2 = Qt.__binding__ == 'PySide2'
Qt.IsPyQt5 = not Qt.IsPySide2
| FabriceSalvaire/PyValentina | QtShim/__init__.py | Python | gpl-3.0 | 13,118 |
import os
if not 'DJANGO_SETTINGS_MODULE' in os.environ:
os.environ['DJANGO_SETTINGS_MODULE'] = 'flooding.windows'
from django.core import management
if __name__ == '__main__':
management.execute_from_command_line()
| lizardsystem/flooding | flooding/manage.py | Python | gpl-3.0 | 228 |
from django.conf.urls import include, url
from sapl.sessao.views import (AdicionarVariasMateriasExpediente,
AdicionarVariasMateriasOrdemDia, BancadaCrud,
CargoBancadaCrud, ExpedienteMateriaCrud,
ExpedienteView, JustificativaAusenciaCrud,
OcorrenciaSessaoView, ConsideracoesFinaisView, MateriaOrdemDiaCrud, OradorOrdemDiaCrud,
MesaView, OradorCrud,
OradorExpedienteCrud, PainelView,
PautaSessaoDetailView, PautaSessaoView,
PesquisarPautaSessaoView,
PesquisarSessaoPlenariaView,
PresencaOrdemDiaView, PresencaView,
ResumoOrdenacaoView, ResumoView, ResumoAtaView, RetiradaPautaCrud, SessaoCrud,
TipoJustificativaCrud, TipoExpedienteCrud, TipoResultadoVotacaoCrud,
TipoExpedienteCrud, TipoResultadoVotacaoCrud, TipoRetiradaPautaCrud,
TipoSessaoCrud, VotacaoEditView,
VotacaoExpedienteEditView,
VotacaoExpedienteView, VotacaoNominalEditView,
VotacaoNominalExpedienteDetailView,
VotacaoNominalExpedienteEditView,
VotacaoNominalExpedienteView,
VotacaoNominalTransparenciaDetailView,
VotacaoSimbolicaTransparenciaDetailView,
VotacaoNominalView, VotacaoView, abrir_votacao,
atualizar_mesa, insere_parlamentar_composicao,
mudar_ordem_materia_sessao, recuperar_materia,
recuperar_numero_sessao_view,
remove_parlamentar_composicao,
reordena_materias,
sessao_legislativa_legislatura_ajax,
VotacaoEmBlocoOrdemDia, VotacaoEmBlocoExpediente,
VotacaoEmBlocoSimbolicaView, VotacaoEmBlocoNominalView,
recuperar_nome_tipo_sessao,
ExpedienteLeituraView,
OrdemDiaLeituraView,
retirar_leitura,
TransferenciaMateriasExpediente, TransferenciaMateriasOrdemDia,
filtra_materias_copia_sessao_ajax, verifica_materia_sessao_plenaria_ajax)
from .apps import AppConfig
app_name = AppConfig.name
urlpatterns = [
url(r'^sessao/', include(SessaoCrud.get_urls() + OradorCrud.get_urls() +
OradorExpedienteCrud.get_urls() +
ExpedienteMateriaCrud.get_urls() +
JustificativaAusenciaCrud.get_urls() +
MateriaOrdemDiaCrud.get_urls() +
OradorOrdemDiaCrud.get_urls() +
RetiradaPautaCrud.get_urls())),
url(r'^sessao/(?P<pk>\d+)/mesa$', MesaView.as_view(), name='mesa'),
url(r'^sessao/mesa/atualizar-mesa/$',
atualizar_mesa,
name='atualizar_mesa'),
url(r'^sessao/mesa/insere-parlamentar/composicao/$',
insere_parlamentar_composicao,
name='insere_parlamentar_composicao'),
url(r'^sessao/mesa/remove-parlamentar-composicao/$',
remove_parlamentar_composicao,
name='remove_parlamentar_composicao'),
url(r'^sessao/recuperar-materia/', recuperar_materia),
url(r'^sessao/recuperar-numero-sessao/',
recuperar_numero_sessao_view,
name='recuperar_numero_sessao_view'
),
url(r'^sessao/recuperar-nome-tipo-sessao/',
recuperar_nome_tipo_sessao,
name='recuperar_nome_tipo_sessao'),
url(r'^sessao/sessao-legislativa-legislatura-ajax/',
sessao_legislativa_legislatura_ajax,
name='sessao_legislativa_legislatura_ajax_view'),
url(r'^sessao/filtra-materias-copia-sessao-ajax/',
filtra_materias_copia_sessao_ajax,
name='filtra_materias_copia_sessao_ajax_view'),
url(r'^sessao/verifica-materia-sessao-plenaria-ajax/',
verifica_materia_sessao_plenaria_ajax,
name='verifica_materia_sessao_plenaria_ajax_view'),
url(r'^sessao/(?P<pk>\d+)/(?P<spk>\d+)/abrir-votacao$',
abrir_votacao,
name="abrir_votacao"),
url(r'^sessao/(?P<pk>\d+)/reordena/(?P<tipo>[\w\-]+)/(?P<ordenacao>\d+)/$', reordena_materias, name="reordena_materias"),
url(r'^sistema/sessao-plenaria/tipo/',
include(TipoSessaoCrud.get_urls())),
url(r'^sistema/sessao-plenaria/tipo-resultado-votacao/',
include(TipoResultadoVotacaoCrud.get_urls())),
url(r'^sistema/sessao-plenaria/tipo-expediente/',
include(TipoExpedienteCrud.get_urls())),
url(r'^sistema/sessao-plenaria/tipo-justificativa/',
include(TipoJustificativaCrud.get_urls())),
url(r'^sistema/sessao-plenaria/tipo-retirada-pauta/',
include(TipoRetiradaPautaCrud.get_urls())),
url(r'^sistema/bancada/',
include(BancadaCrud.get_urls())),
url(r'^sistema/cargo-bancada/',
include(CargoBancadaCrud.get_urls())),
url(r'^sistema/resumo-ordenacao/',
ResumoOrdenacaoView.as_view(),
name='resumo_ordenacao'),
url(r'^sessao/(?P<pk>\d+)/adicionar-varias-materias-expediente/',
AdicionarVariasMateriasExpediente.as_view(),
name='adicionar_varias_materias_expediente'),
url(r'^sessao/(?P<pk>\d+)/adicionar-varias-materias-ordem-dia/',
AdicionarVariasMateriasOrdemDia.as_view(),
name='adicionar_varias_materias_ordem_dia'),
# PAUTA SESSÃO
url(r'^sessao/pauta-sessao$',
PautaSessaoView.as_view(), name='pauta_sessao'),
url(r'^sessao/pauta-sessao/pesquisar-pauta$',
PesquisarPautaSessaoView.as_view(), name='pesquisar_pauta'),
url(r'^sessao/pauta-sessao/(?P<pk>\d+)/(?:pdf)?$',
PautaSessaoDetailView.as_view(), name='pauta_sessao_detail'),
# Subnav sessão
url(r'^sessao/(?P<pk>\d+)/expediente$',
ExpedienteView.as_view(), name='expediente'),
url(r'^sessao/(?P<pk>\d+)/ocorrencia_sessao$',
OcorrenciaSessaoView.as_view(), name='ocorrencia_sessao'),
url(r'^sessao/(?P<pk>\d+)/consideracoes_finais$',
ConsideracoesFinaisView.as_view(), name='consideracoes_finais'),
url(r'^sessao/(?P<pk>\d+)/presenca$',
PresencaView.as_view(), name='presenca'),
url(r'^sessao/(?P<pk>\d+)/painel$',
PainelView.as_view(), name='painel'),
url(r'^sessao/(?P<pk>\d+)/presencaordemdia$',
PresencaOrdemDiaView.as_view(),
name='presencaordemdia'),
url(r'^sessao/(?P<pk>\d+)/votacao_bloco_ordemdia$',
VotacaoEmBlocoOrdemDia.as_view(),
name='votacao_bloco_ordemdia'),
url(r'^sessao/(?P<pk>\d+)/votacao_bloco/votnom$',
VotacaoEmBlocoNominalView.as_view(), name='votacaobloconom'),
url(r'^sessao/(?P<pk>\d+)/votacao_bloco/votsimb$',
VotacaoEmBlocoSimbolicaView.as_view(), name='votacaoblocosimb'),
url(r'^sessao/(?P<pk>\d+)/votacao_bloco_expediente$',
VotacaoEmBlocoExpediente.as_view(),
name='votacao_bloco_expediente'),
url(r'^sessao/(?P<pk>\d+)/resumo$',
ResumoView.as_view(), name='resumo'),
url(r'^sessao/(?P<pk>\d+)/resumo_ata$',
ResumoAtaView.as_view(), name='resumo_ata'),
url(r'^sessao/pesquisar-sessao$',
PesquisarSessaoPlenariaView.as_view(), name='pesquisar_sessao'),
url(r'^sessao/(?P<pk>\d+)/matordemdia/votnom/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoNominalView.as_view(), name='votacaonominal'),
url(r'^sessao/(?P<pk>\d+)/matordemdia/votnom/edit/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoNominalEditView.as_view(), name='votacaonominaledit'),
url(r'^sessao/(?P<pk>\d+)/matordemdia/votsec/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoView.as_view(), name='votacaosecreta'),
url(r'^sessao/(?P<pk>\d+)/matordemdia/votsec/view/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoEditView.as_view(), name='votacaosecretaedit'),
url(r'^sessao/(?P<pk>\d+)/matordemdia/votsimb/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoView.as_view(), name='votacaosimbolica'),
url(r'^sessao/(?P<pk>\d+)/matordemdia/votsimbbloco/$',
VotacaoView.as_view(), name='votacaosimbolicabloco'),
url(r'^sessao/(?P<pk>\d+)/matordemdia/votsimb/view/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoEditView.as_view(), name='votacaosimbolicaedit'),
url(r'^sessao/(?P<pk>\d+)/matexp/votnom/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoNominalExpedienteView.as_view(), name='votacaonominalexp'),
url(r'^sessao/(?P<pk>\d+)/matexp/votnom/edit/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoNominalExpedienteEditView.as_view(),
name='votacaonominalexpedit'),
url(r'^sessao/(?P<pk>\d+)/matexp/votnom/detail/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoNominalExpedienteDetailView.as_view(),
name='votacaonominalexpdetail'),
url(r'^sessao/(?P<pk>\d+)/matexp/votsimb/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoExpedienteView.as_view(), name='votacaosimbolicaexp'),
url(r'^sessao/(?P<pk>\d+)/matexp/votsimb/view/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoExpedienteEditView.as_view(), name='votacaosimbolicaexpedit'),
url(r'^sessao/(?P<pk>\d+)/matexp/votsec/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoExpedienteView.as_view(), name='votacaosecretaexp'),
url(r'^sessao/(?P<pk>\d+)/matexp/votsec/view/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoExpedienteEditView.as_view(), name='votacaosecretaexpedit'),
url(r'^sessao/(?P<pk>\d+)/votacao-nominal-transparencia/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoNominalTransparenciaDetailView.as_view(),
name='votacao_nominal_transparencia'),
url(r'^sessao/(?P<pk>\d+)/votacao-simbolica-transparencia/(?P<oid>\d+)/(?P<mid>\d+)$',
VotacaoSimbolicaTransparenciaDetailView.as_view(),
name='votacao_simbolica_transparencia'),
url(r'^sessao/mudar-ordem-materia-sessao/',
mudar_ordem_materia_sessao,
name='mudar_ordem_materia_sessao'),
url(r'^sessao/(?P<pk>\d+)/matexp/leitura/(?P<oid>\d+)/(?P<mid>\d+)$',
ExpedienteLeituraView.as_view(), name='leituraexp'),
url(r'^sessao/(?P<pk>\d+)/matordemdia/leitura/(?P<oid>\d+)/(?P<mid>\d+)$',
OrdemDiaLeituraView.as_view(), name='leituraod'),
url(r'^sessao/(?P<pk>\d+)/(?P<iso>\d+)/(?P<oid>\d+)/retirar-leitura$',
retirar_leitura, name='retirar_leitura'),
url(r'^sessao/(?P<pk>\d+)/transf-mat-exp$',
TransferenciaMateriasExpediente.as_view(),
name="transf_mat_exp"),
url(r'^sessao/(?P<pk>\d+)/transf-mat-ordemdia$',
TransferenciaMateriasOrdemDia.as_view(),
name="transf_mat_ordemdia"),
]
| interlegis/sapl | sapl/sessao/urls.py | Python | gpl-3.0 | 11,025 |
import os
import time
import traceback
from lib.FileManager.FM import REQUEST_DELAY
from lib.FileManager.workers.baseWorkerCustomer import BaseWorkerCustomer
class CreateCopy(BaseWorkerCustomer):
def __init__(self, paths, session, *args, **kwargs):
super(CreateCopy, self).__init__(*args, **kwargs)
self.paths = paths
self.session = session
def run(self):
try:
self.preload()
self.logger.info("CreateCopy process run")
ftp = self.get_ftp_connection(self.session)
# Временная хеш таблица для директорий по которым будем делать листинг
directories = {}
for path in self.paths:
dirname = ftp.path.dirname(path)
if dirname not in directories.keys():
directories[dirname] = []
directories[dirname].append(path)
# Массив хешей source -> target для каждого пути
copy_paths = []
# Эта содомия нужна чтобы составтить массив source -> target для создания копии файла с красивым именем
# с учетом того что могут быть совпадения
for dirname, dir_paths in directories.items():
dir_listing = ftp.listdir(dirname)
for dir_path in dir_paths:
i = 0
exist = False
if ftp.isdir(dir_path):
filename = os.path.basename(dir_path)
ext = ''
else:
filename, file_extension = ftp.path.splitext(os.path.basename(dir_path))
ext = file_extension
copy_name = filename + ' copy' + ext if i == 0 else filename + ' copy(' + str(i) + ')' + ext
for dir_current_path in dir_listing:
if copy_name == dir_current_path:
exist = True
i += 1
break
if not exist:
copy_paths.append({
'source': dir_path,
'target': ftp.path.join(dirname, copy_name)
})
while exist:
exist = False
if ftp.isdir(dir_path):
filename = ftp.path.basename(dir_path)
ext = ''
else:
filename, file_extension = ftp.path.splitext(dir_path)
ext = file_extension
copy_name = filename + ' copy' + ext if i == 0 else filename + ' copy(' + str(i) + ')' + ext
for dir_current_path in dir_listing:
if copy_name == dir_current_path:
exist = True
i += 1
break
if not exist:
dir_listing.append(copy_name)
copy_paths.append({
'source': dir_path,
'target': os.path.join(dirname, copy_name)
})
success_paths = []
error_paths = []
created_paths = []
next_tick = time.time() + REQUEST_DELAY
for copy_path in copy_paths:
try:
source_path = copy_path.get('source')
target_path = copy_path.get('target')
if ftp.isfile(source_path):
copy_result = ftp.copy_file(source_path, ftp.path.dirname(target_path), overwrite=True,
rename=target_path)
if not copy_result['success'] or len(copy_result['file_list']['failed']) > 0:
raise copy_result['error'] if copy_result['error'] is not None else Exception(
"Upload error")
elif ftp.isdir(source_path):
copy_result = ftp.copy_dir(source_path, ftp.path.dirname(target_path), overwrite=True,
rename=target_path)
if not copy_result['success'] or len(copy_result['file_list']['failed']) > 0:
raise copy_result['error'] if copy_result['error'] is not None else Exception(
"Upload error")
else:
error_paths.append(source_path)
break
success_paths.append(source_path)
created_paths.append(ftp.file_info(target_path))
if time.time() > next_tick:
progress = {
'percent': round(float(len(success_paths)) / float(len(copy_paths)), 2),
'text': str(
int(round(float(len(success_paths)) / float(len(copy_paths)), 2) * 100)) + '%'
}
self.on_running(self.status_id, progress=progress, pid=self.pid, pname=self.name)
next_tick = time.time() + REQUEST_DELAY
except Exception as e:
self.logger.error("Error copy file %s , error %s" % (str(source_path), str(e)))
error_paths.append(source_path)
result = {
"success": success_paths,
"errors": error_paths,
"items": created_paths
}
# иначе пользователям кажется что скопировалось не полностью )
progress = {
'percent': round(float(len(success_paths)) / float(len(copy_paths)), 2),
'text': str(int(round(float(len(success_paths)) / float(len(copy_paths)), 2) * 100)) + '%'
}
time.sleep(REQUEST_DELAY)
self.on_success(self.status_id, data=result, progress=progress, pid=self.pid, pname=self.name)
except Exception as e:
result = {
"error": True,
"message": str(e),
"traceback": traceback.format_exc()
}
self.on_error(self.status_id, result, pid=self.pid, pname=self.name)
| LTD-Beget/sprutio-rpc | lib/FileManager/workers/ftp/createCopy.py | Python | gpl-3.0 | 6,690 |
import src
class PoisonBush(src.items.Item):
"""
a hard to remove poison plant
"""
type = "PoisonBush"
name = "poison brush"
description = ""
usageInfo = """
You can use it to loose 100 satiation.
"""
walkable = False
charges = 0
def __init__(self):
"""
set up internal state
"""
super().__init__(display=src.canvas.displayChars.poisonBush)
self.attributesToStore.extend(["charges"])
def apply(self, character):
"""
handle a character trying to use this item
by killing the character
Parameters:
character: the character trying to use this item
"""
self.charges += 1
if 100 > character.satiation:
character.satiation = 0
else:
character.satiation -= 100
if self.charges > 10:
new = src.items.itemMap["EncrustedPoisonBush"]()
self.container.addItem(new,self.getPosition())
self.container.removeItem(self)
character.addMessage("you give your blood to the poison bush")
def spawn(self, distance=1):
"""
spawn a new poison bloom
Parameters:
distance: the spawning distance
"""
if not (self.xPosition and self.yPosition):
return
direction = (
2 * self.xPosition + 3 * self.yPosition + src.gamestate.gamestate.tick
) % 4
direction = (random.randint(1, distance + 1), random.randint(1, distance + 1))
newPos = (self.xPosition + direction[0] - 5, self.yPosition + direction[1] - 5, self.zPosition)
if (
newPos[0] < 1
or newPos[1] < 1
or newPos[0] > 15 * 15 - 2
or newPos[1] > 15 * 15 - 2
):
return
if not (
newPos in self.container.itemByCoordinates
and len(self.container.itemByCoordinates[newPos])
):
new = itemMap["PoisonBloom"]()
self.container.addItem(new,newPos)
def getLongInfo(self):
"""
returns a longer than normal description text
Returns:
the description text
"""
text = super().getLongInfo()
text += "poison charges: %s" % (self.charges)
return text
def destroy(self, generateScrap=True):
"""
destroy the item and leave a exploding thing
Parameters:
generateScrap: flag to toggle leaving residue
"""
new = src.items.itemMap["FireCrystals"]()
self.container.addItem(new,self.getPosition())
character = characters.Exploder()
character.solvers = [
"NaiveActivateQuest",
"ActivateQuestMeta",
"NaiveExamineQuest",
"ExamineQuestMeta",
"NaivePickupQuest",
"NaiveMurderQuest",
"DrinkQuest",
"NaiveExamineQuest",
"ExamineQuestMeta",
]
character.faction = "monster"
command = ""
if src.gamestate.gamestate.tick % 4 == 0:
command += "A"
if src.gamestate.gamestate.tick % 4 == 1:
command += "W"
if src.gamestate.gamestate.tick % 4 == 2:
command += "S"
if src.gamestate.gamestate.tick % 4 == 3:
command += "D"
if self.xPosition % 4 == 0:
command += "A"
if self.xPosition % 4 == 1:
command += "W"
if self.xPosition % 4 == 2:
command += "S"
if self.xPosition % 4 == 3:
command += "D"
if self.yPosition % 4 == 0:
command += "A"
if self.yPosition % 4 == 1:
command += "W"
if self.yPosition % 4 == 2:
command += "S"
if self.yPosition % 4 == 3:
command += "D"
character.macroState["macros"]["m"] = list(command + "_m")
character.runCommandString("_m",clear=True)
character.satiation = 100
self.container.addCharacter(character, self.xPosition, self.yPosition)
super().destroy(generateScrap=False)
src.items.addType(PoisonBush)
| MarxMustermann/OfMiceAndMechs | src/itemFolder/plants/poisonBush.py | Python | gpl-3.0 | 4,198 |
# -*- coding: utf-8 -*-
#
# formatter.py - format html from cplusplus.com to groff syntax
#
# Copyright (C) 2010 - 2015 Wei-Ning Huang (AZ) <aitjcize@gmail.com>
# All Rights reserved.
#
# This file is part of cppman.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
import datetime
import re
import string
import urllib.request
from functools import partial
from cppman.formatter.tableparser import parse_table
from cppman.util import fixupHTML, html2man
def member_table_def(g):
tbl = parse_table('<table>%s</table>' % str(g.group(3)))
# Escape column with '.' as prefix
tbl = re.compile(r'T{\n(\..*?)\nT}', re.S).sub(r'T{\n\\E \1\nT}', tbl)
return '\n.IP "%s"\n%s\n%s\n' % (g.group(1), g.group(2), tbl)
def member_type_function(g):
if g.group(1).find("<a href=") == -1:
return ""
head = re.sub(r'<.*?>', '', g.group(1)).strip()
tail = ''
cppvertag = re.search(
'^(.*?)(\[(?:(?:since|until) )?C\+\+\d+\]\s*(,\s*)?)+$', head)
if cppvertag:
head = cppvertag.group(1).strip()
tail = ' ' + cppvertag.group(2)
if ',' in head:
head = ', '.join([x.strip() + ' (3)' for x in head.split(',')])
else:
head = head.strip() + ' (3)'
full = (head + tail).replace('"', '\\(dq')
""" remove [static] tag as in string::npos[static] """
full = full.replace("[static]", "");
return '\n.IP "%s"\n%s\n' % (full, g.group(2))
NAV_BAR_END = '<div class="t-navbar-sep">.?</div></div>'
# Format replacement RE list
# The '.SE' pseudo macro is described in the function: html2groff
rps = [
# Workaround: remove <p> in t-dcl
(r'<tr class="t-dcl">(.*?)</tr>',
lambda g: re.sub('<p/?>', '', g.group(1)), re.S),
# Header, Name
(r'<h1.*?>(.*?)</h1>',
r'\n.TH "{{name}}" 3 "%s" "cppreference.com" "C++ Programmer\'s Manual"\n'
r'\n.SH "NAME"\n{{name}} {{shortdesc}}\n.SE\n' % datetime.date.today(),
re.S),
# Defined in header
(r'<div class="t-navbar"[^>]*>.*?' + NAV_BAR_END + r'.*?'
r'Defined in header <code>(.*?)</code>(.*?)<tr class="t-dcl-sep">',
r'\n.SH "SYNOPSIS"\n#include \1\n.sp\n'
r'.nf\n\2\n.fi\n.SE\n'
r'\n.SH "DESCRIPTION"\n', re.S),
(r'<div class="t-navbar"[^>]*>.*?' + NAV_BAR_END +
r'(.*?)<tr class="t-dcl-sep">',
r'\n.SH "SYNOPSIS"\n.nf\n\1\n.fi\n.SE\n'
r'\n.SH "DESCRIPTION"\n', re.S),
# <unordered_map>
(r'<div class="t-navbar"[^>]*>.*?' + NAV_BAR_END +
r'(.*?)<table class="t-dsc-begin">',
r'\n.SH "DESCRIPTION"\n\1\n', re.S),
# access specifiers
(r'<div class="t-navbar"[^>]*>.*?' + NAV_BAR_END +
r'(.*?)<h3',
r'\n.SH "DESCRIPTION"\n\1\n<h3', re.S),
(r'<td>\s*\([0-9]+\)\s*</td>', r'', 0),
# Section headers
(r'<div class="t-inherited">.*?<h2>.*?Inherited from\s*(.*?)\s*</h2>',
r'\n.SE\n.IEND\n.IBEGIN \1\n', re.S),
# Remove tags
(r'<span class="edit.*?">.*?</span> ?', r'', re.S),
(r'[edit]', r'', re.S),
(r'\[edit\]', r'', re.S),
(r'<div id="siteSub">.*?</div>', r'', 0),
(r'<div id="contentSub">.*?</div>', r'', 0),
(r'<table class="toc" id="toc"[^>]*>.*?</table>', r'', re.S),
(r'<h2[^>]*>.*?</h2>', r'', re.S),
(r'<div class="coliru-btn coliru-btn-run-init">.*?</div>', r'', re.S),
(r'<tr class="t-dsc-hitem">.*?</tr>', r'', re.S),
# C++11/14/17/20
(r'\(((?:since|until) C\+\+\d+)\)', r' [\1]', re.S),
(r'\((C\+\+\d+)\)', r' [\1]', re.S),
# Subsections
(r'<h5[^>]*>\s*(.*)</h5>', r'\n.SS "\1"\n', 0),
# Group t-lines
(r'<span></span>', r'', re.S),
(r'<span class="t-lines">(?:<span>.+?</span>.*)+</span>',
lambda x: re.sub('\s*</span><span>\s*', r', ', x.group(0)), re.S),
# Member type & function second col is group see basic_fstream for example
(r'<tr class="t-dsc">\s*?<td>((?:(?!</td>).)*?)</td>\s*?'
r'<td>((?:(?!</td>).)*?)<table[^>]*>((?:(?!</table>).)*?)</table>'
r'(?:(?!</td>).)*?</td>\s*?</tr>',
member_table_def, re.S),
# Section headers
(r'.*<h3>(.+?)</h3>', r'\n.SE\n.SH "\1"\n', 0),
# Member type & function
(r'<tr class="t-dsc">\n?<td>\s*(.*?)\n?</td>.*?<td>\s*(.*?)</td>.*?</tr>',
member_type_function, re.S),
# Parameters
(r'<tr class="t-par">.*?<td>\s*(.*?)\n?</td>.*?<td>.*?</td>.*?'
r'<td>\s*(.*?)</td>.*?</tr>',
r'\n.IP "\1"\n\2\n', re.S),
# 'ul' tag
(r'<ul>', r'\n.RS 2\n', 0),
(r'</ul>', r'\n.RE\n.sp\n', 0),
# 'li' tag
(r'<li>\s*(.+?)</li>', r'\n.IP \[bu] 3\n\1\n', re.S),
# 'pre' tag
(r'<pre[^>]*>(.+?)</pre\s*>', r'\n.in +2n\n.nf\n\1\n.fi\n.in\n', re.S),
# Footer
(r'<div class="printfooter">',
r'\n.SE\n.IEND\n.SH "REFERENCE"\n'
r'cppreference.com, 2015 - All rights reserved.', re.S),
# C++ version tag
(r'<div title="(C\+\+..)"[^>]*>', r'.sp\n\1\n', 0),
# Output
(r'<p>Output:\n?</p>', r'\n.sp\nOutput:\n', re.S),
# Paragraph
(r'<p>(.*?)</p>', r'\n\1\n.sp\n', re.S),
(r'<div class="t-li1">(.*?)</div>', r'\n\1\n.sp\n', re.S),
(r'<div class="t-li2">(.*?)</div>',
r'\n.RS\n\1\n.RE\n.sp\n', re.S),
# 'br' tag
(r'<br/>', r'\n.br\n', 0),
(r'\n.br\n.br\n', r'\n.sp\n', 0),
# 'dd' 'dt' tag
(r'<dt>(.+?)</dt>\s*<dd>(.+?)</dd>', r'\n.IP "\1"\n\2\n', re.S),
# Bold
(r'<strong>(.+?)</strong>', r'\n.B \1\n', 0),
# Any other tags
(r'<script[^>]*>[^<]*</script>', r'', 0),
(r'<.*?>', r'', re.S),
# Escape
(r'^#', r'\#', 0),
(r' ', ' ', 0),
(r'&#(\d+);', lambda g: chr(int(g.group(1))), 0),
# Misc
(r'<', r'<', 0),
(r'>', r'>', 0),
(r'"', r'"', 0),
(r'&', r'&', 0),
(r' ', r' ', 0),
(r'\\([^\^nE])', r'\\\\\1', 0),
(r'>/">', r'', 0),
(r'/">', r'', 0),
# Remove empty sections
(r'\n.SH (.+?)\n+.SE', r'', 0),
# Remove empty lines
(r'\n\s*\n+', r'\n', 0),
(r'\n\n+', r'\n', 0),
# Preserve \n" in EXAMPLE
(r'\\n', r'\\en', 0),
# Remove leading whitespace
(r'^\s+', r'', re.S),
# Trailing white-spaces
(r'\s+\n', r'\n', re.S),
# Remove extra whitespace and newline in .SH/SS/IP section
(r'.(SH|SS|IP) "\s*(.*?)\s*\n?"', r'.\1 "\2"', 0),
# Remove extra whitespace before .IP bullet
(r'(.IP \\\\\[bu\] 3)\n\s*(.*?)\n', r'\1\n\2\n', 0),
# Remove extra '\n' before C++ version Tag (don't do it in table)
(r'(?<!T{)\n\s*(\[(:?since|until) C\+\+\d+\])', r' \1', re.S)
]
def html2groff(data, name):
"""Convert HTML text from cppreference.com to Groff-formatted text."""
# Remove header and footer
try:
data = data[data.index('<div id="cpp-content-base">'):]
data = data[:data.index('<div class="printfooter">') + 25]
except ValueError:
pass
# Remove non-printable characters
data = ''.join([x for x in data if x in string.printable])
for table in re.findall(
r'<table class="(?:wikitable|dsctable)"[^>]*>.*?</table>',
data, re.S):
tbl = parse_table(table)
# Escape column with '.' as prefix
tbl = re.compile(r'T{\n(\..*?)\nT}', re.S).sub(r'T{\n\\E \1\nT}', tbl)
data = data.replace(table, tbl)
# Pre replace all
for rp in rps:
data = re.compile(rp[0], rp[2]).sub(rp[1], data)
# Remove non-printable characters
data = ''.join([x for x in data if x in string.printable])
# Upper case all section headers
for st in re.findall(r'.SH .*\n', data):
data = data.replace(st, st.upper())
# Add tags to member/inherited member functions
# e.g. insert -> vector::insert
#
# .SE is a pseudo macro I created which means 'SECTION END'
# The reason I use it is because I need a marker to know where section
# ends.
# re.findall find patterns which does not overlap, which means if I do
# this: secs = re.findall(r'\n\.SH "(.+?)"(.+?)\.SH', data, re.S)
# re.findall will skip the later .SH tag and thus skip the later section.
# To fix this, '.SE' is used to mark the end of the section so the next
# '.SH' can be find by re.findall
try:
idx = data.index('.IEND')
except ValueError:
idx = None
def add_header_multi(prefix, g):
if ',' in g.group(1):
res = ', '.join(['%s::%s' % (prefix, x.strip())
for x in g.group(1).split(',')])
else:
res = '%s::%s' % (prefix, g.group(1))
return '\n.IP "%s"' % res
if idx:
class_name = name
if class_name.startswith('std::'):
normalized_class_name = class_name[len('std::'):]
else:
normalized_class_name = class_name
class_member_content = data[:idx]
secs = re.findall(r'\.SH "(.+?)"(.+?)\.SE', class_member_content, re.S)
for sec, content in secs:
# Member functions
if (('MEMBER' in sec and
'NON-MEMBER' not in sec and
'INHERITED' not in sec and
'MEMBER TYPES' != sec) or
'CONSTANTS' == sec):
content2 = re.sub(r'\n\.IP "([^:]+?)"',
partial(add_header_multi, class_name),
content)
# Replace (constructor) (destructor)
content2 = re.sub(r'\(constructor\)', r'%s' %
normalized_class_name, content2)
content2 = re.sub(r'\(destructor\)', r'~%s' %
normalized_class_name, content2)
data = data.replace(content, content2)
blocks = re.findall(r'\.IBEGIN\s*(.+?)\s*\n(.+?)\.IEND', data, re.S)
for inherited_class, content in blocks:
content2 = re.sub(r'\.SH "(.+?)"', r'\n.SH "\1 INHERITED FROM %s"'
% inherited_class.upper(), content)
data = data.replace(content, content2)
secs = re.findall(r'\.SH "(.+?)"(.+?)\.SE', content, re.S)
for sec, content in secs:
# Inherited member functions
if 'MEMBER' in sec and \
sec != 'MEMBER TYPES':
content2 = re.sub(r'\n\.IP "(.+)"',
partial(add_header_multi, inherited_class),
content)
data = data.replace(content, content2)
# Remove unneeded pseudo macro
data = re.sub('(?:\n.SE|.IBEGIN.*?\n|\n.IEND)', '', data)
# Replace all macros
desc_re = re.search(r'.SH "DESCRIPTION"\n.*?([^\n\s].*?)\n', data)
shortdesc = ''
# not empty description
if desc_re and not desc_re.group(1).startswith('.SH'):
shortdesc = '- ' + desc_re.group(1)
def dereference(g):
d = dict(name=name, shortdesc=shortdesc)
if g.group(1) in d:
return d[g.group(1)]
data = re.sub('{{(.*?)}}', dereference, data)
return data
def func_test():
"""Test if there is major format changes in cplusplus.com"""
ifs = urllib.request.urlopen(
'http://en.cppreference.com/w/cpp/container/vector')
result = html2groff(fixupHTML(ifs.read()), 'std::vector')
assert '.SH "NAME"' in result
assert '.SH "SYNOPSIS"' in result
assert '.SH "DESCRIPTION"' in result
def test():
"""Simple Text"""
ifs = urllib.request.urlopen(
'http://en.cppreference.com/w/cpp/container/vector')
print(html2groff(fixupHTML(ifs.read()), 'std::vector'), end=' ')
# with open('test.html') as ifs:
# data = fixupHTML(ifs.read())
# print html2groff(data, 'std::vector'),
if __name__ == '__main__':
test()
| aitjcize/cppman | cppman/formatter/cppreference.py | Python | gpl-3.0 | 12,272 |
from django.apps import AppConfig
class OntakAppConfig(AppConfig):
name = "oioioi.ontak"
| sio2project/oioioi | oioioi/ontak/apps.py | Python | gpl-3.0 | 95 |
"""
Set the configuration variables for fabric recipes.
"""
from fabric.api import env
from fabric.colors import yellow
import os
env.warn_only = True
try:
import ConfigParser as cp
except ImportError:
import configparser as cp # Python 3.0
config = {}
_config = cp.SafeConfigParser()
if not os.path.isfile("fabric-recipes.conf"):
print yellow("warning: No config file specified")
_config.read("fabric-recipes.conf")
for section in _config.sections():
opt = _config.items(section)
if section == "global":
env.update(opt)
elif section == "roledefs":
opt = [(k, v.split(",")) for k, v in opt]
env['roledefs'].update(opt)
else:
config[section] = dict(opt) | surekap/fabric-recipes | fabfile/config.py | Python | gpl-3.0 | 725 |
from functools import partial
import threading
from PIL import Image
from PyQt4.Qt import Qt
from PyQt4.Qt import QGridLayout, QInputDialog, QPushButton
from PyQt4.Qt import QVBoxLayout, QLabel, SIGNAL
from electrum_gui.qt.main_window import StatusBarButton
from electrum_gui.qt.password_dialog import PasswordDialog
from electrum_gui.qt.util import *
from .plugin import TrezorCompatiblePlugin, TIM_NEW, TIM_RECOVER, TIM_MNEMONIC
from electrum.i18n import _
from electrum.plugins import hook, DeviceMgr
from electrum.util import PrintError
from electrum.wallet import Wallet, BIP44_Wallet
from electrum.wizard import UserCancelled
# By far the trickiest thing about this handler is the window stack;
# MacOSX is very fussy the modal dialogs are perfectly parented
class QtHandler(PrintError):
'''An interface between the GUI (here, QT) and the device handling
logic for handling I/O. This is a generic implementation of the
Trezor protocol; derived classes can customize it.'''
def __init__(self, win, pin_matrix_widget_class, device):
win.connect(win, SIGNAL('clear_dialog'), self.clear_dialog)
win.connect(win, SIGNAL('error_dialog'), self.error_dialog)
win.connect(win, SIGNAL('message_dialog'), self.message_dialog)
win.connect(win, SIGNAL('pin_dialog'), self.pin_dialog)
win.connect(win, SIGNAL('passphrase_dialog'), self.passphrase_dialog)
win.connect(win, SIGNAL('word_dialog'), self.word_dialog)
self.win = win
self.pin_matrix_widget_class = pin_matrix_widget_class
self.device = device
self.dialog = None
self.done = threading.Event()
def top_level_window(self):
return self.win.top_level_window()
def watching_only_changed(self):
self.win.emit(SIGNAL('watching_only_changed'))
def show_message(self, msg, cancel_callback=None):
self.win.emit(SIGNAL('message_dialog'), msg, cancel_callback)
def show_error(self, msg):
self.win.emit(SIGNAL('error_dialog'), msg)
def finished(self):
self.win.emit(SIGNAL('clear_dialog'))
def get_pin(self, msg):
self.done.clear()
self.win.emit(SIGNAL('pin_dialog'), msg)
self.done.wait()
return self.response
def get_word(self, msg):
self.done.clear()
self.win.emit(SIGNAL('word_dialog'), msg)
self.done.wait()
return self.word
def get_passphrase(self, msg):
self.done.clear()
self.win.emit(SIGNAL('passphrase_dialog'), msg)
self.done.wait()
return self.passphrase
def pin_dialog(self, msg):
# Needed e.g. when resetting a device
self.clear_dialog()
dialog = WindowModalDialog(self.top_level_window(), _("Enter PIN"))
matrix = self.pin_matrix_widget_class()
vbox = QVBoxLayout()
vbox.addWidget(QLabel(msg))
vbox.addWidget(matrix)
vbox.addLayout(Buttons(CancelButton(dialog), OkButton(dialog)))
dialog.setLayout(vbox)
dialog.exec_()
self.response = str(matrix.get_value())
self.done.set()
def passphrase_dialog(self, msg):
d = PasswordDialog(self.top_level_window(), None, msg,
PasswordDialog.PW_PASSPHRASE)
confirmed, p, passphrase = d.run()
if confirmed:
passphrase = BIP44_Wallet.normalize_passphrase(passphrase)
self.passphrase = passphrase
self.done.set()
def word_dialog(self, msg):
dialog = WindowModalDialog(self.top_level_window(), "")
hbox = QHBoxLayout(dialog)
hbox.addWidget(QLabel(msg))
text = QLineEdit()
text.setMaximumWidth(100)
text.returnPressed.connect(dialog.accept)
hbox.addWidget(text)
hbox.addStretch(1)
if not dialog.exec_():
return None
self.word = unicode(text.text())
self.done.set()
def message_dialog(self, msg, cancel_callback):
# Called more than once during signing, to confirm output and fee
self.clear_dialog()
title = _('Please check your %s device') % self.device
self.dialog = dialog = WindowModalDialog(self.top_level_window(), title)
l = QLabel(msg)
vbox = QVBoxLayout(dialog)
if cancel_callback:
vbox.addLayout(Buttons(CancelButton(dialog)))
dialog.connect(dialog, SIGNAL('rejected()'), cancel_callback)
vbox.addWidget(l)
dialog.show()
def error_dialog(self, msg):
self.win.show_error(msg, parent=self.top_level_window())
def clear_dialog(self):
if self.dialog:
self.dialog.accept()
self.dialog = None
def query_choice(self, msg, labels):
return self.win.query_choice(msg, labels)
def request_trezor_init_settings(self, method, device):
wizard = self.win
vbox = QVBoxLayout()
main_label = QLabel(_("Initialization settings for your %s:") % device)
vbox.addWidget(main_label)
OK_button = OkButton(wizard, _('Next'))
def clean_text(widget):
text = unicode(widget.toPlainText()).strip()
return ' '.join(text.split())
if method in [TIM_NEW, TIM_RECOVER]:
gb = QGroupBox()
vbox1 = QVBoxLayout()
gb.setLayout(vbox1)
vbox.addWidget(gb)
gb.setTitle(_("Select your seed length:"))
choices = [
_("12 words"),
_("18 words"),
_("24 words"),
]
bg = QButtonGroup()
for i, choice in enumerate(choices):
rb = QRadioButton(gb)
rb.setText(choice)
bg.addButton(rb)
bg.setId(rb, i)
vbox1.addWidget(rb)
rb.setChecked(True)
cb_pin = QCheckBox(_('Enable PIN protection'))
cb_pin.setChecked(True)
else:
text = QTextEdit()
text.setMaximumHeight(60)
if method == TIM_MNEMONIC:
msg = _("Enter your BIP39 mnemonic:")
else:
msg = _("Enter the master private key beginning with xprv:")
def set_enabled():
OK_button.setEnabled(Wallet.is_xprv(clean_text(text)))
text.textChanged.connect(set_enabled)
OK_button.setEnabled(False)
vbox.addWidget(QLabel(msg))
vbox.addWidget(text)
pin = QLineEdit()
pin.setValidator(QRegExpValidator(QRegExp('[1-9]{0,10}')))
pin.setMaximumWidth(100)
hbox_pin = QHBoxLayout()
hbox_pin.addWidget(QLabel(_("Enter your PIN (digits 1-9):")))
hbox_pin.addWidget(pin)
hbox_pin.addStretch(1)
label = QLabel(_("Enter a label to name your device:"))
name = QLineEdit()
hl = QHBoxLayout()
hl.addWidget(label)
hl.addWidget(name)
hl.addStretch(1)
vbox.addLayout(hl)
if method in [TIM_NEW, TIM_RECOVER]:
vbox.addWidget(cb_pin)
else:
vbox.addLayout(hbox_pin)
cb_phrase = QCheckBox(_('Enable Passphrase protection'))
cb_phrase.setChecked(False)
vbox.addWidget(cb_phrase)
vbox.addStretch(1)
vbox.addLayout(Buttons(CancelButton(wizard), OK_button))
wizard.set_layout(vbox)
if not wizard.exec_():
raise UserCancelled
if method in [TIM_NEW, TIM_RECOVER]:
item = bg.checkedId()
pin = cb_pin.isChecked()
else:
item = ' '.join(str(clean_text(text)).split())
pin = str(pin.text())
return (item, unicode(name.text()), pin, cb_phrase.isChecked())
def qt_plugin_class(base_plugin_class):
class QtPlugin(base_plugin_class):
# Derived classes must provide the following class-static variables:
# icon_file
# pin_matrix_widget_class
def create_handler(self, window):
return QtHandler(window, self.pin_matrix_widget_class(), self.device)
@hook
def load_wallet(self, wallet, window):
if type(wallet) != self.wallet_class:
return
window.tzb = StatusBarButton(QIcon(self.icon_file), self.device,
partial(self.settings_dialog, window))
window.statusBar().addPermanentWidget(window.tzb)
wallet.handler = self.create_handler(window)
# Trigger a pairing
self.get_client(wallet)
def on_create_wallet(self, wallet, wizard):
assert type(wallet) == self.wallet_class
wallet.handler = self.create_handler(wizard)
self.select_device(wallet)
wallet.create_hd_account(None)
@hook
def receive_menu(self, menu, addrs, wallet):
if type(wallet) == self.wallet_class and len(addrs) == 1:
menu.addAction(_("Show on %s") % self.device,
lambda: self.show_address(wallet, addrs[0]))
def settings_dialog(self, window):
hid_id = self.choose_device(window)
if hid_id:
SettingsDialog(window, self, hid_id).exec_()
def choose_device(self, window):
'''This dialog box should be usable even if the user has
forgotten their PIN or it is in bootloader mode.'''
handler = window.wallet.handler
hid_id = self.device_manager().wallet_hid_id(window.wallet)
if not hid_id:
clients, labels = self.unpaired_clients(handler)
if clients:
msg = _("Select a %s device:") % self.device
choice = self.query_choice(window, msg, labels)
if choice is not None:
hid_id = clients[choice].hid_id()
else:
handler.show_error(_("No devices found"))
return hid_id
def query_choice(self, window, msg, choices):
dialog = WindowModalDialog(window)
clayout = ChoicesLayout(msg, choices)
layout = clayout.layout()
layout.addStretch(1)
layout.addLayout(Buttons(CancelButton(dialog), OkButton(dialog)))
dialog.setLayout(layout)
if not dialog.exec_():
return None
return clayout.selected_index()
return QtPlugin
class SettingsDialog(WindowModalDialog):
'''This dialog doesn't require a device be paired with a wallet.
We want users to be able to wipe a device even if they've forgotten
their PIN.'''
def __init__(self, window, plugin, hid_id):
title = _("%s Settings") % plugin.device
super(SettingsDialog, self).__init__(window, title)
self.setMaximumWidth(540)
devmgr = plugin.device_manager()
handler = window.wallet.handler
# wallet can be None, needn't be window.wallet
wallet = devmgr.wallet_by_hid_id(hid_id)
hs_rows, hs_cols = (64, 128)
def get_client():
client = devmgr.client_by_hid_id(hid_id, handler)
if not client:
self.show_error("Device not connected!")
raise RuntimeError("Device not connected")
return client
def update():
# self.features for outer scopes
client = get_client()
features = self.features = client.features
set_label_enabled()
bl_hash = features.bootloader_hash.encode('hex')
bl_hash = "\n".join([bl_hash[:32], bl_hash[32:]])
noyes = [_("No"), _("Yes")]
endis = [_("Enable Passphrases"), _("Disable Passphrases")]
setchange = [_("Set a PIN"), _("Change PIN")]
version = "%d.%d.%d" % (features.major_version,
features.minor_version,
features.patch_version)
coins = ", ".join(coin.coin_name for coin in features.coins)
device_label.setText(features.label)
pin_set_label.setText(noyes[features.pin_protection])
bl_hash_label.setText(bl_hash)
label_edit.setText(features.label)
device_id_label.setText(features.device_id)
serial_number_label.setText(client.hid_id())
initialized_label.setText(noyes[features.initialized])
version_label.setText(version)
coins_label.setText(coins)
clear_pin_button.setVisible(features.pin_protection)
clear_pin_warning.setVisible(features.pin_protection)
pin_button.setText(setchange[features.pin_protection])
pin_msg.setVisible(not features.pin_protection)
passphrase_button.setText(endis[features.passphrase_protection])
language_label.setText(features.language)
def set_label_enabled():
label_apply.setEnabled(label_edit.text() != self.features.label)
def rename():
get_client().change_label(unicode(label_edit.text()))
update()
def toggle_passphrase():
title = _("Confirm Toggle Passphrase Protection")
msg = _("This will cause your Electrum wallet to be unpaired "
"unless your passphrase was or will be empty.\n\n"
"This is because addresses will no "
"longer correspond to those used by your %s.\n\n"
"You will need to create a new Electrum wallet "
"with the install wizard so that they match.\n\n"
"Are you sure you want to proceed?") % plugin.device
if not self.question(msg, title=title):
return
get_client().toggle_passphrase()
devmgr.unpair(hid_id)
update()
def change_homescreen():
dialog = QFileDialog(self, _("Choose Homescreen"))
filename = dialog.getOpenFileName()
if filename:
im = Image.open(str(filename))
if im.size != (hs_cols, hs_rows):
raise Exception('Image must be 64 x 128 pixels')
im = im.convert('1')
pix = im.load()
img = ''
for j in range(hs_rows):
for i in range(hs_cols):
img += '1' if pix[i, j] else '0'
img = ''.join(chr(int(img[i:i + 8], 2))
for i in range(0, len(img), 8))
get_client().change_homescreen(img)
def clear_homescreen():
get_client().change_homescreen('\x00')
def set_pin(remove=False):
get_client().set_pin(remove=remove)
update()
def clear_pin():
set_pin(remove=True)
def wipe_device():
if wallet and sum(wallet.get_balance()):
title = _("Confirm Device Wipe")
msg = _("Are you SURE you want to wipe the device?\n"
"Your wallet still has bitcoins in it!")
if not self.question(msg, title=title,
icon=QMessageBox.Critical):
return
get_client().wipe_device()
devmgr.unpair(hid_id)
update()
def slider_moved():
mins = timeout_slider.sliderPosition()
timeout_minutes.setText(_("%2d minutes") % mins)
def slider_released():
seconds = timeout_slider.sliderPosition() * 60
wallet.set_session_timeout(seconds)
dialog_vbox = QVBoxLayout(self)
# Information tab
info_tab = QWidget()
info_layout = QVBoxLayout(info_tab)
info_glayout = QGridLayout()
info_glayout.setColumnStretch(2, 1)
device_label = QLabel()
pin_set_label = QLabel()
version_label = QLabel()
device_id_label = QLabel()
serial_number_label = QLabel()
bl_hash_label = QLabel()
bl_hash_label.setWordWrap(True)
coins_label = QLabel()
coins_label.setWordWrap(True)
language_label = QLabel()
initialized_label = QLabel()
rows = [
(_("Device Label"), device_label),
(_("PIN set"), pin_set_label),
(_("Firmware Version"), version_label),
(_("Device ID"), device_id_label),
(_("Serial Number"), serial_number_label),
(_("Bootloader Hash"), bl_hash_label),
(_("Supported Coins"), coins_label),
(_("Language"), language_label),
(_("Initialized"), initialized_label),
]
for row_num, (label, widget) in enumerate(rows):
info_glayout.addWidget(QLabel(label), row_num, 0)
info_glayout.addWidget(widget, row_num, 1)
info_layout.addLayout(info_glayout)
# Settings tab
settings_tab = QWidget()
settings_layout = QVBoxLayout(settings_tab)
settings_glayout = QGridLayout()
# Settings tab - Label
label_msg = QLabel(_("Name this %s. If you have mutiple devices "
"their labels help distinguish them.")
% plugin.device)
label_msg.setWordWrap(True)
label_label = QLabel(_("Device Label"))
label_edit = QLineEdit()
label_edit.setMinimumWidth(150)
label_edit.setMaxLength(plugin.MAX_LABEL_LEN)
label_apply = QPushButton(_("Apply"))
label_apply.clicked.connect(rename)
label_edit.textChanged.connect(set_label_enabled)
settings_glayout.addWidget(label_label, 0, 0)
settings_glayout.addWidget(label_edit, 0, 1, 1, 2)
settings_glayout.addWidget(label_apply, 0, 3)
settings_glayout.addWidget(label_msg, 1, 1, 1, -1)
# Settings tab - PIN
pin_label = QLabel(_("PIN Protection"))
pin_button = QPushButton()
pin_button.clicked.connect(set_pin)
settings_glayout.addWidget(pin_label, 2, 0)
settings_glayout.addWidget(pin_button, 2, 1)
pin_msg = QLabel(_("PIN protection is strongly recommended. "
"A PIN is your only protection against someone "
"stealing your bitcoins if they obtain physical "
"access to your %s.") % plugin.device)
pin_msg.setWordWrap(True)
pin_msg.setStyleSheet("color: red")
settings_glayout.addWidget(pin_msg, 3, 1, 1, -1)
# Settings tab - Homescreen
homescreen_layout = QHBoxLayout()
homescreen_label = QLabel(_("Homescreen"))
homescreen_change_button = QPushButton(_("Change..."))
homescreen_clear_button = QPushButton(_("Reset"))
homescreen_change_button.clicked.connect(change_homescreen)
homescreen_clear_button.clicked.connect(clear_homescreen)
homescreen_msg = QLabel(_("You can set the homescreen on your device "
"to personalize it. You must choose a "
"%d x %d monochrome black and white image.")
% (hs_rows, hs_cols))
homescreen_msg.setWordWrap(True)
settings_glayout.addWidget(homescreen_label, 4, 0)
settings_glayout.addWidget(homescreen_change_button, 4, 1)
settings_glayout.addWidget(homescreen_clear_button, 4, 2)
settings_glayout.addWidget(homescreen_msg, 5, 1, 1, -1)
# Settings tab - Session Timeout
if wallet:
timeout_label = QLabel(_("Session Timeout"))
timeout_minutes = QLabel()
timeout_slider = QSlider(Qt.Horizontal)
timeout_slider.setRange(1, 60)
timeout_slider.setSingleStep(1)
timeout_slider.setTickInterval(5)
timeout_slider.setTickPosition(QSlider.TicksBelow)
timeout_slider.setTracking(True)
timeout_msg = QLabel(
_("Clear the session after the specified period "
"of inactivity. Once a session has timed out, "
"your PIN and passphrase (if enabled) must be "
"re-entered to use the device."))
timeout_msg.setWordWrap(True)
timeout_slider.setSliderPosition(wallet.session_timeout // 60)
slider_moved()
timeout_slider.valueChanged.connect(slider_moved)
timeout_slider.sliderReleased.connect(slider_released)
settings_glayout.addWidget(timeout_label, 6, 0)
settings_glayout.addWidget(timeout_slider, 6, 1, 1, 3)
settings_glayout.addWidget(timeout_minutes, 6, 4)
settings_glayout.addWidget(timeout_msg, 7, 1, 1, -1)
settings_layout.addLayout(settings_glayout)
settings_layout.addStretch(1)
# Advanced tab
advanced_tab = QWidget()
advanced_layout = QVBoxLayout(advanced_tab)
advanced_glayout = QGridLayout()
# Advanced tab - clear PIN
clear_pin_button = QPushButton(_("Disable PIN"))
clear_pin_button.clicked.connect(clear_pin)
clear_pin_warning = QLabel(
_("If you disable your PIN, anyone with physical access to your "
"%s device can spend your bitcoins.") % plugin.device)
clear_pin_warning.setWordWrap(True)
clear_pin_warning.setStyleSheet("color: red")
advanced_glayout.addWidget(clear_pin_button, 0, 2)
advanced_glayout.addWidget(clear_pin_warning, 1, 0, 1, 5)
# Advanced tab - toggle passphrase protection
passphrase_button = QPushButton()
passphrase_button.clicked.connect(toggle_passphrase)
passphrase_msg = QLabel(
_("Passphrases allow you to access new wallets, each "
"hidden behind a particular case-sensitive passphrase. You "
"need to create a separate Electrum wallet for each passphrase "
"you use as they each generate different addresses. Changing "
"your passphrase does not lose other wallets, each is still "
"accessible behind its own passphrase."))
passphrase_msg.setWordWrap(True)
passphrase_warning = QLabel(
_("If you forget a passphrase you will be unable to access any "
"bitcoins in the wallet behind it. A passphrase is not a PIN. "
"Only change this if you are sure you understand it."))
passphrase_warning.setWordWrap(True)
passphrase_warning.setStyleSheet("color: red")
advanced_glayout.addWidget(passphrase_button, 3, 2)
advanced_glayout.addWidget(passphrase_msg, 4, 0, 1, 5)
advanced_glayout.addWidget(passphrase_warning, 5, 0, 1, 5)
# Advanced tab - wipe device
wipe_device_button = QPushButton(_("Wipe Device"))
wipe_device_button.clicked.connect(wipe_device)
wipe_device_msg = QLabel(
_("Wipe the device, removing all data from it. The firmware "
"is left unchanged."))
wipe_device_msg.setWordWrap(True)
wipe_device_warning = QLabel(
_("Only wipe a device if you have the recovery seed written down "
"and the device wallet(s) are empty, otherwise the bitcoins "
"will be lost forever."))
wipe_device_warning.setWordWrap(True)
wipe_device_warning.setStyleSheet("color: red")
advanced_glayout.addWidget(wipe_device_button, 6, 2)
advanced_glayout.addWidget(wipe_device_msg, 7, 0, 1, 5)
advanced_glayout.addWidget(wipe_device_warning, 8, 0, 1, 5)
advanced_layout.addLayout(advanced_glayout)
advanced_layout.addStretch(1)
tabs = QTabWidget(self)
tabs.addTab(info_tab, _("Information"))
tabs.addTab(settings_tab, _("Settings"))
tabs.addTab(advanced_tab, _("Advanced"))
# Update information
update()
dialog_vbox.addWidget(tabs)
dialog_vbox.addLayout(Buttons(CloseButton(self)))
| joelstanner/electrum | plugins/trezor/qt_generic.py | Python | gpl-3.0 | 24,003 |
"""
PySAR
Polarimetric SAR decomposition
Contents
--------
decomp_fd(hhhh,vvvv,hvhv,hhvv,numthrd=None) : Freeman-Durden 3-component decomposition
"""
from __future__ import print_function, division
import sys,os
import numpy as np
###===========================================================================================
def decomp_fd(hhhh,vvvv,hvhv,hhvv,null=None,numthrd=None,maxthrd=8):
"""
Freeman-Durden 3-component decomposition
Parameters
----------
hhhh : ndarray
horizontally polarized power
vvvv : ndarray
vertically polarized power
hvhv : ndarray
cross-polarized power
hhvv : ndarray
co-polarized cross product (complex-valued)
null : float or None
null value to exclude from decomposition
numthrd : int or None
number of pthreads; None sets numthrd based on the data array size [None]
maxthrd : int or None
maximum allowable numthrd [8]
Returns
-------
ps : ndarray
surface-scattered power
pd : ndarray
double-bounce power
pv : ndarray
volume-scattered power
Notes
-----
* arrays are returned with the same type as hhhh data
Reference
---------
1. Freeman, A. and Durden, S., "A three-component scattering model for polarimetric SAR data", *IEEE Trans. Geosci. Remote Sensing*, vol. 36, no. 3, pp. 963-973, May 1998.
"""
from pysar.polsar._decomp_modc import free_durden
if not numthrd:
numthrd = np.max([len(hhhh)//1e5, 1])
if numthrd > maxthrd: numthrd = maxthrd
elif numthrd < 1:
raise ValueError('numthrd must be >= 1')
if null:
nullmask = np.abs(hhhh-null) < 1.e-7
nullmask += np.abs(vvvv-null) < 1.e-7
nullmask += np.abs(hvhv-null) < 1.e-7
nullmask += np.abs(hhvv-null) < 1.e-7
hhvv[nullmask] = 0.
hhhhtype = None
if hhhh.dtype != np.float32:
hhhhtype = hhhh.dtype
hhhh = hhhh.astype(np.float32)
vvvv = vvvv.astype(np.float32)
hvhv = hvhv.astype(np.float32)
hhvv = hhvv.astype(np.complex64)
if not all({2-x for x in [hhhh.ndim, vvvv.ndim, hvhv.ndim, hhvv.ndim]}):
hhhh, vvvv = hhhh.flatten(), vvvv.flatten()
hvhv, hhvv = hvhv.flatten(), hhvv.flatten()
P = free_durden(hhhh, vvvv, hvhv, hhvv, numthrd)
if hhhhtype: P = P.astype(hhhhtype)
P = P.reshape(3,-1)
if null: P[0,nullmask], P[1,nullmask], P[2,nullmask] = null, null, null
return P[0,:], P[1,:], P[2,:]
###---------------------------------------------------------------------------------
def decomp_haa(hhhh,vvvv,hvhv,hhhv,hhvv,hvvv,matform='C',null=None,numthrd=None,maxthrd=8):
"""
Cloude-Pottier H/A/alpha polarimetric decomposition
Parameters
----------
hhhh : ndarray
horizontal co-polarized power (or 0.5|HH + VV|^2 if matform = 'T')
vvvv : ndarray
vertical co-polarized power (or 0.5|HH - VV|^2 if matform = 'T')
hvhv : ndarray
cross-polarized power (2|HV|^2 for matform = 'T')
hhhv : ndarray
HH.HV* cross-product (or 0.5(HH+VV)(HH-VV)* for matform = 'T')
hhvv : ndarray
HH.VV* cross-product (or HV(HH+VV)* for matform = 'T')
hvvv : ndarray
HV.VV* cross-product (or HV(HH-VV)* for matform = 'T')
matform : str {'C' or 'T'}
form of input matrix entries: 'C' for covariance matrix and
'T' for coherency matrix ['C'] (see ref. 1)
null : float or None
null value to exclude from decomposition
numthrd : int or None
number of pthreads; None sets numthrd based on the data array size [None]
maxthrd : int or None
maximum allowable numthrd [8]
Returns
-------
H : ndarray
entropy (H = -(p1*log_3(p1) + p2*log_3(p2) + p3*log_3(p3))
where pi = lam_i/(hhhh+vvvv+hvhv)) and lam is an eigenvalue
A : ndarray
anisotropy (A = (lam_2-lam_3)/(lam_2+lam_3) --> lam_1 >= lam_2 >= lam_3
alpha : ndarray
alpha angle in degrees (see ref. 1)
Notes
-----
* arrays are returned with the same type as hhhh data
* if covariance matrix form is used, do not multiply entries by any constants
Reference
---------
1. Cloude, S. and Pottier, E., "An entropy based classification scheme for land applications of polarimetric SAR", *IEEE Trans. Geosci. Remote Sensing*, vol. 35, no. 1, pp. 68-78, Jan. 1997.
"""
from pysar.polsar._decomp_modc import cloude_pot
if matform == 'C' or matform == 'c':
mtf = 1
elif matform == 'T' or matform == 't':
mtf = 0
else:
raise ValueError("matform must be 'C' or 'T'")
if not numthrd:
numthrd = np.max([len(hhhh)//1e5, 1])
if numthrd > maxthrd: numthrd = maxthrd
elif numthrd < 1:
raise ValueError('numthrd must be >= 1')
if null:
nullmask = np.abs(hhhh-null) < 1.e-7
nullmask += np.abs(vvvv-null) < 1.e-7
nullmask += np.abs(hvhv-null) < 1.e-7
nullmask += np.abs(hhhv-null) < 1.e-7
nullmask += np.abs(hhvv-null) < 1.e-7
nullmask += np.abs(hvvv-null) < 1.e-7
hhhh[nullmask], vvvv[nullmask] = 0., 0.
hvhv[nullmask] = 0.
hhhhtype = None
if hhhh.dtype != np.float32:
hhhhtype = hhhh.dtype
hhhh = hhhh.astype(np.float32)
vvvv = vvvv.astype(np.float32)
hvhv = hvhv.astype(np.float32)
hhhv = hhhv.astype(np.complex64)
hhvv = hhvv.astype(np.complex64)
hvvv = hvvv.astype(np.complex64)
if not all({2-x for x in [hhhh.ndim, vvvv.ndim, hvhv.ndim, hhhv.ndim, hhvv.ndim, hvvv.ndim]}):
hhhh, vvvv = hhhh.flatten(), vvvv.flatten()
hvhv, hhvv = hvhv.flatten(), hhvv.flatten()
hhhv, hvvv = hhhv.flatten(), hvvv.flatten()
P = cloude_pot(hhhh, vvvv, hvhv, hhhv, hhvv, hvvv, mtf, numthrd)
if hhhhtype: P = P.astype(hhhhtype)
P = P.reshape(3,-1)
if null: P[0,nullmask], P[1,nullmask], P[2,nullmask] = null, null, null
return P[0,:], P[1,:], P[2,:]
def decomp_cp(hhhh,vvvv,hvhv,hhhv,hhvv,hvvv,matform='C',null=None,numthrd=None,maxthrd=8):
__doc__ = decomp_haa.__doc__
return decomp_haa(hhhh=hhhh,vvvv=vvvv,hvhv=hvhv,hhhv=hhhv,hhvv=hhvv,hvvv=hvvv,
matform=matform,null=null,numthrd=numthrd,maxthrd=maxthrd)
| bminchew/PySAR | pysar/polsar/decomp.py | Python | gpl-3.0 | 6,649 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'D:\github repos\Python\A05_SimplyGame\Binaries\MyView.ui'
#
# Created: Tue Oct 25 22:22:12 2016
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(808, 600)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayoutWidget = QtGui.QWidget(self.centralwidget)
self.gridLayoutWidget.setGeometry(QtCore.QRect(240, 110, 561, 281))
self.gridLayoutWidget.setObjectName("gridLayoutWidget")
self.gridLayout = QtGui.QGridLayout(self.gridLayoutWidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setHorizontalSpacing(7)
self.gridLayout.setVerticalSpacing(9)
self.gridLayout.setObjectName("gridLayout")
self.pushButton_5 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_5.setObjectName("pushButton_5")
self.gridLayout.addWidget(self.pushButton_5, 0, 4, 1, 1)
self.pushButton_1 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_1.setObjectName("pushButton_1")
self.gridLayout.addWidget(self.pushButton_1, 0, 0, 1, 1)
self.pushButton_9 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_9.setObjectName("pushButton_9")
self.gridLayout.addWidget(self.pushButton_9, 1, 3, 1, 1)
self.pushButton_6 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_6.setObjectName("pushButton_6")
self.gridLayout.addWidget(self.pushButton_6, 1, 0, 1, 1)
self.pushButton_10 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_10.setObjectName("pushButton_10")
self.gridLayout.addWidget(self.pushButton_10, 1, 4, 1, 1)
self.pushButton_15 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_15.setObjectName("pushButton_15")
self.gridLayout.addWidget(self.pushButton_15, 2, 4, 1, 1)
self.pushButton_4 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_4.setObjectName("pushButton_4")
self.gridLayout.addWidget(self.pushButton_4, 0, 3, 1, 1)
self.pushButton_11 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_11.setObjectName("pushButton_11")
self.gridLayout.addWidget(self.pushButton_11, 2, 0, 1, 1)
self.pushButton_12 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_12.setObjectName("pushButton_12")
self.gridLayout.addWidget(self.pushButton_12, 2, 1, 1, 1)
self.pushButton_7 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_7.setObjectName("pushButton_7")
self.gridLayout.addWidget(self.pushButton_7, 1, 1, 1, 1)
self.pushButton_3 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_3.setObjectName("pushButton_3")
self.gridLayout.addWidget(self.pushButton_3, 0, 2, 1, 1)
self.pushButton_13 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_13.setObjectName("pushButton_13")
self.gridLayout.addWidget(self.pushButton_13, 2, 2, 1, 1)
self.pushButton_8 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_8.setObjectName("pushButton_8")
self.gridLayout.addWidget(self.pushButton_8, 1, 2, 1, 1)
self.pushButton_14 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_14.setObjectName("pushButton_14")
self.gridLayout.addWidget(self.pushButton_14, 2, 3, 1, 1)
self.pushButton_2 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_2.setObjectName("pushButton_2")
self.gridLayout.addWidget(self.pushButton_2, 0, 1, 1, 1)
self.formLayoutWidget = QtGui.QWidget(self.centralwidget)
self.formLayoutWidget.setGeometry(QtCore.QRect(50, 70, 191, 481))
self.formLayoutWidget.setObjectName("formLayoutWidget")
self.formLayout = QtGui.QFormLayout(self.formLayoutWidget)
self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout.setContentsMargins(0, 0, 0, 0)
self.formLayout.setObjectName("formLayout")
self.label = QtGui.QLabel(self.formLayoutWidget)
self.label.setObjectName("label")
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label)
self.label_2 = QtGui.QLabel(self.formLayoutWidget)
self.label_2.setObjectName("label_2")
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.label_2)
self.label_3 = QtGui.QLabel(self.formLayoutWidget)
self.label_3.setObjectName("label_3")
self.formLayout.setWidget(6, QtGui.QFormLayout.LabelRole, self.label_3)
self.label_4 = QtGui.QLabel(self.formLayoutWidget)
self.label_4.setObjectName("label_4")
self.formLayout.setWidget(9, QtGui.QFormLayout.LabelRole, self.label_4)
self.label_5 = QtGui.QLabel(self.formLayoutWidget)
self.label_5.setObjectName("label_5")
self.formLayout.setWidget(12, QtGui.QFormLayout.LabelRole, self.label_5)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.formLayout.setItem(1, QtGui.QFormLayout.LabelRole, spacerItem)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.formLayout.setItem(5, QtGui.QFormLayout.LabelRole, spacerItem1)
spacerItem2 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.formLayout.setItem(8, QtGui.QFormLayout.LabelRole, spacerItem2)
spacerItem3 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.formLayout.setItem(11, QtGui.QFormLayout.LabelRole, spacerItem3)
spacerItem4 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.formLayout.setItem(2, QtGui.QFormLayout.LabelRole, spacerItem4)
spacerItem5 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.formLayout.setItem(4, QtGui.QFormLayout.LabelRole, spacerItem5)
spacerItem6 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.formLayout.setItem(7, QtGui.QFormLayout.LabelRole, spacerItem6)
spacerItem7 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.formLayout.setItem(10, QtGui.QFormLayout.LabelRole, spacerItem7)
self.label_6 = QtGui.QLabel(self.formLayoutWidget)
self.label_6.setObjectName("label_6")
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.label_6)
self.label_7 = QtGui.QLabel(self.formLayoutWidget)
self.label_7.setObjectName("label_7")
self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.label_7)
self.label_8 = QtGui.QLabel(self.formLayoutWidget)
self.label_8.setObjectName("label_8")
self.formLayout.setWidget(6, QtGui.QFormLayout.FieldRole, self.label_8)
self.label_9 = QtGui.QLabel(self.formLayoutWidget)
self.label_9.setObjectName("label_9")
self.formLayout.setWidget(9, QtGui.QFormLayout.FieldRole, self.label_9)
self.label_10 = QtGui.QLabel(self.formLayoutWidget)
self.label_10.setObjectName("label_10")
self.formLayout.setWidget(12, QtGui.QFormLayout.FieldRole, self.label_10)
self.gridLayoutWidget_2 = QtGui.QWidget(self.centralwidget)
self.gridLayoutWidget_2.setGeometry(QtCore.QRect(240, 390, 561, 161))
self.gridLayoutWidget_2.setObjectName("gridLayoutWidget_2")
self.gridLayout_2 = QtGui.QGridLayout(self.gridLayoutWidget_2)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
spacerItem8 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem8, 1, 2, 1, 1)
self.pushButton_24 = QtGui.QPushButton(self.gridLayoutWidget_2)
self.pushButton_24.setObjectName("pushButton_24")
self.gridLayout_2.addWidget(self.pushButton_24, 1, 1, 1, 1)
self.pushButton_25 = QtGui.QPushButton(self.gridLayoutWidget_2)
self.pushButton_25.setObjectName("pushButton_25")
self.gridLayout_2.addWidget(self.pushButton_25, 1, 3, 1, 1)
spacerItem9 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem9, 1, 4, 1, 1)
spacerItem10 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem10, 1, 0, 1, 1)
self.label_11 = QtGui.QLabel(self.centralwidget)
self.label_11.setGeometry(QtCore.QRect(240, 0, 561, 111))
self.label_11.setObjectName("label_11")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 808, 21))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QObject.connect(self.pushButton_25, QtCore.SIGNAL("clicked()"), MainWindow.close)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "MainWindow", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_5.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_1.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_9.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_6.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_10.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_15.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_4.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_11.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_12.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_7.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_3.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_13.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_8.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_14.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_2.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("MainWindow", "offen:", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("MainWindow", "korrket:", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("MainWindow", "falsch:", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("MainWindow", "gesamt:", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("MainWindow", "Spiele:", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("MainWindow", "offenAnzahl", None, QtGui.QApplication.UnicodeUTF8))
self.label_7.setText(QtGui.QApplication.translate("MainWindow", "korrektAnzahl", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setText(QtGui.QApplication.translate("MainWindow", "falschAnzahl", None, QtGui.QApplication.UnicodeUTF8))
self.label_9.setText(QtGui.QApplication.translate("MainWindow", "gesamtAnzahl", None, QtGui.QApplication.UnicodeUTF8))
self.label_10.setText(QtGui.QApplication.translate("MainWindow", "spieleAnzahl", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_24.setText(QtGui.QApplication.translate("MainWindow", "Neu", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_25.setText(QtGui.QApplication.translate("MainWindow", "Ende", None, QtGui.QApplication.UnicodeUTF8))
self.label_11.setText(QtGui.QApplication.translate("MainWindow", "<html><head/><body><p align=\"center\"><span style=\" font-size:14pt; font-weight:600;\">Drücken Sie die Buttons in aufsteigender Reihenfolge</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
| lzuba-tgm/A05_SimplyGame | Ui_MainWindow.py | Python | gpl-3.0 | 13,842 |
#!/usr/bin/env python2
import sys, getopt
import re
import time
def get_option():
opts, args = getopt.getopt(sys.argv[1:], "hi:o:")
input_file = ""
output_file = ""
h = ""
for op, value in opts:
if op == "-i":
input_file = value
elif op == "-o":
output_file = value
elif op == "-h":
h = "E-value distribution.\n-i : inputfile\n-o : outputfile\n"
return input_file,output_file,h
def main(input_file, output_file):
o = a = b = c = d = e = g = 0
fout = open(output_file, 'w')
with open (input_file) as f:
for i in f:
i = float(i[:-1])
if i == 0:
o +=1
elif i < float(1e-150):
a +=1
elif i < float(1e-100):
b +=1
elif i < float(1e-50):
c +=1
elif i < 1e-10:
d +=1
else :
g +=1
out = str(o) + " " + str(a) + " " + str(b) + " " + str(c) + " " + str(d) +" " + str(g)
fout.write(out)
fout.close()
if __name__ == "__main__":
time_start = time.time()
input_file,output_file,h = get_option()
if str(h) == "":
main(input_file, output_file)
print ("time: " + str (time.time()-time_start))
else:
print (h)
| sdws1983/bioinfo-relate | Blast-relate/E-value-distribution.py | Python | gpl-3.0 | 1,085 |
#!/usr/bin/env python
"""
=====================================
dummySensor.py - Dummy Sensor Handler
=====================================
Displays a silly little window for faking sensor values by clicking on buttons.
"""
import threading, subprocess, os, time, socket
import numpy, math
import sys
class sensorHandler:
def __init__(self, proj, shared_data):
"""
Start up sensor handler subwindow and create a new thread to listen to it.
"""
# Since we don't want to have to poll the subwindow for each request,
# we need a data structure to cache sensor states:
self.sensorValue = {}
self.proj = proj
self.sensorListenInitialized = False
self._running = True
self.p_sensorHandler = None
def _stop(self):
if self.p_sensorHandler is not None:
print >>sys.__stderr__, "(SENS) Killing dummysensor GUI..."
self.p_sensorHandler.stdin.write(":QUIT\n")
self.p_sensorHandler.stdin.close()
print >>sys.__stderr__, "(SENS) Terminating dummysensor GUI listen thread..."
self._running = False
self.sensorListenThread.join()
def _createSubwindow(self):
# Create a subprocess
print "(SENS) Starting sensorHandler window and listen thread..."
self.p_sensorHandler = subprocess.Popen(["python", "-u", os.path.join(self.proj.ltlmop_root,"lib","handlers","share","_SensorHandler.py")], stdin=subprocess.PIPE)
# Create new thread to communicate with subwindow
self.sensorListenThread = threading.Thread(target = self._sensorListen)
self.sensorListenThread.daemon = True
self.sensorListenThread.start()
# Block until the sensor listener gets the go-ahead from the subwindow
while not self.sensorListenInitialized:
time.sleep(0.05) # Yield cpu
def regionBit(self,name,init_region,bit_num,initial=False):
"""
Return the value of bit #bit_num in the bit-vector encoding of the currently selected region
name (string): Unique identifier for region sensor (default="target")
init_region (region): Name of the sensor whose state is interested
bit_num (int): The index of the bit to return
"""
if initial:
if not self.sensorListenInitialized:
self._createSubwindow()
if name not in self.sensorValue.keys():
# create a new map element
# choose an initial (decomposed) region inside the desired one
self.sensorValue[name] = self.proj.regionMapping[init_region][0]
self.p_sensorHandler.stdin.write("loadproj," + self.proj.getFilenamePrefix() + ".spec,\n")
self.p_sensorHandler.stdin.write(",".join(["region", name, self.sensorValue[name]]) + "\n")
return True
else:
if name in self.sensorValue:
reg_idx = self.proj.rfi.indexOfRegionWithName(self.sensorValue[name])
numBits = int(math.ceil(math.log(len(self.proj.rfi.regions),2)))
reg_idx_bin = numpy.binary_repr(reg_idx, width=numBits)
#print name, bit_num, (reg_idx_bin[bit_num] == '1')
return (reg_idx_bin[bit_num] == '1')
else:
print "(SENS) WARNING: Region sensor %s is unknown!" % button_name
return None
def buttonPress(self,button_name,init_value,initial=False):
"""
Return a boolean value corresponding to the state of the sensor with name ``sensor_name``
If such a sensor does not exist, returns ``None``
button_name (string): Name of the sensor whose state is interested
init_value (bool): The initial state of the sensor (default=False)
"""
if initial:
if not self.sensorListenInitialized:
self._createSubwindow()
if button_name not in self.sensorValue.keys():
self.sensorValue[button_name] = init_value
if init_value:
self.p_sensorHandler.stdin.write("button," + button_name + ",1\n")
else:
self.p_sensorHandler.stdin.write("button," + button_name + ",0\n")
return self.sensorValue[button_name]
else:
if button_name in self.sensorValue:
return self.sensorValue[button_name]
else:
print "(SENS) WARNING: Sensor %s is unknown!" % button_name
return None
def _sensorListen(self):
"""
Processes messages from the sensor handler subwindow, and updates our cache appropriately
"""
host = 'localhost'
port = 23459
buf = 1024
addr = (host,port)
UDPSock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
UDPSock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
UDPSock.settimeout(1)
try:
UDPSock.bind(addr)
except:
print "ERROR: Cannot bind to port. Try killing all Python processes and trying again."
return
while self._running:
# Wait for and receive a message from the subwindow
try:
input,addrFrom = UDPSock.recvfrom(1024)
except socket.timeout:
continue
if input == '': # EOF indicates that the connection has been destroyed
print "(SENS) Sensor handler listen thread is shutting down."
break
# Check for the initialization signal, if necessary
if not self.sensorListenInitialized and input.strip() == "Hello!":
self.sensorListenInitialized = True
continue
# Get the data out of the message
args = input.strip().split("=")
if len(args) != 2:
continue
# Update our internal cache
if args[1] == "True":
self.sensorValue[args[0]] = True
elif args[1] == "False":
self.sensorValue[args[0]] = False
else:
self.sensorValue[args[0]] = args[1]
| jadecastro/LTLMoP | src/lib/handlers/share/dummySensor.py | Python | gpl-3.0 | 6,284 |
from http.server import BaseHTTPRequestHandler, HTTPServer
import requests
import urllib.parse
import json
# Define server address and port, use localhost if you are running this on your Mattermost server.
HOSTNAME = ''
PORT = 7800
# guarantee unicode string
_u = lambda t: t.decode('UTF-8', 'replace') if isinstance(t, str) else t
#Handles mattermost slash command get request
class PostHandler(BaseHTTPRequestHandler):
def do_GET(self):
length = int(self.headers['Content-Length'])
data = urllib.parse.parse_qs(self.rfile.read(length).decode('utf-8'))
response_url = ""
text = ""
token = ""
channel_id = ""
team_id = ""
command = ""
team_domain = ""
user_name = ""
channel_name = ""
# Get POST data and initialize MattermostRequest object
for key in data:
if key == 'response_url':
response_url = data[key]
elif key == 'text':
text = data[key]
elif key == 'token':
token = data[key]
elif key == 'channel_id':
channel_id = data[key]
elif key == 'team_id':
team_id = data[key]
elif key == 'command':
command = data[key]
elif key == 'team_domain':
team_domain = data[key]
elif key == 'user_name':
user_name = data[key]
elif key == 'channel_name':
channel_name = data[key]
responsetext = ''
print("Found command %s" % token)
if token[0] == u'<your-slash-command-token>':
if len(text) > 0:
responsetext = getweather(text[0])
else:
responsetext = getweather()
if responsetext:
res = {}
res['response_type'] = 'in_channel'
res['text'] = responsetext
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
self.wfile.write(json.dumps(res).encode("utf-8"))
return
#The command search in wunderground for the specified city and return weather info
def getweather(city="Rovereto, Italy"):
print("Searching cities containing %s" % city)
r = requests.get("http://autocomplete.wunderground.com/aq?query=%s" % urllib.parse.quote_plus(city))
cities = r.json()
if "RESULTS" not in cities or len(cities["RESULTS"]) == 0:
print("No result")
return u"**No city found**"
elif len(cities["RESULTS"]) > 1:
print("Found more than 1 city")
res = u"**Available cities**:\r\n"
for c in cities["RESULTS"]:
res = u"%s* %s\r\n" % (res, c["name"])
return res
else:
print("Requesting weather info from wunderground")
res = ""
c = cities["RESULTS"][0]
r = requests.get('http://api.wunderground.com/api/<your-wunderground-api-here>/geolookup/conditions%s.json' % c["l"])
data = r.json()
co = data['current_observation']
res = u'#### Weather conditions in **%s**:\n\n' % data['location']['city']
res += u"\n\n" % (co['weather'], co['icon_url'])
res += u"| Field | Value |\n"
res += u'| :---- | :----: |\n'
res += u'| Temperature : | %s °C |\n' % str(co['temp_c'])
res += u'| Feelslike : | %s °C |\n' % str(co['feelslike_c'])
res += u'| Wind : | %s |\n' % str(co['wind_string'])
res += u'| Wind direction : | %s |\n' % str(co['wind_dir'])
res += u'| Wind speed : | %s kn |\n' % str(round(co['wind_kph']*1/1.852, 1))
res += u'| Wind gust : | %s kn |\n' % str(round(float(co['wind_gust_kph'])*1/1.852, 1))
return res
#Start the app listening on specified port for http GET requests incoming from mattermost slash command
if __name__ == '__main__':
server = HTTPServer((HOSTNAME, PORT), PostHandler)
print('Starting matterslash server, use <Ctrl-C> to stop')
server.serve_forever()
| mlongo4290/mattermost-wunderground-slash-command | wunderground-slash-command.py | Python | gpl-3.0 | 4,117 |
# cspSLS.py - Stochastic Local Search for Solving CSPs
# AIFCA Python3 code Version 0.7.1 Documentation at http://aipython.org
# Artificial Intelligence: Foundations of Computational Agents
# http://artint.info
# Copyright David L Poole and Alan K Mackworth 2017.
# This work is licensed under a Creative Commons
# Attribution-NonCommercial-ShareAlike 4.0 International License.
# See: http://creativecommons.org/licenses/by-nc-sa/4.0/deed.en
import heapq
import random
import matplotlib.pyplot as plt
from aipython.cspProblem import CSP, Constraint
from aipython.searchProblem import Arc, Search_problem
from aipython.utilities import Displayable
class SLSearcher(Displayable):
"""A search problem directly from the CSP..
A node is a variable:value dictionary"""
def __init__(self, csp):
self.csp = csp
self.variables_to_select = {var for var in self.csp.variables
if len(self.csp.domains[var]) > 1}
# Create assignment and conflicts set
self.current_assignment = None # this will trigger a random restart
self.number_of_steps = 1 # number of steps after the initialization
super().__init__()
def restart(self):
"""creates a new total assignment and the conflict set
"""
self.current_assignment = {var: random_sample(dom) for (var, dom) in self.csp.domains.items()}
self.display(2, "Initial assignment", self.current_assignment)
self.conflicts = set()
for con in self.csp.constraints:
if not con.holds(self.current_assignment):
self.conflicts.add(con)
self.display(2, "Conflicts:", self.conflicts)
self.variable_pq = None
def search(self, max_steps=500, prob_best=1.0, prob_anycon=1.0):
"""
returns the number of steps or None if these is no solution
if there is a solution, it can be found in self.current_assignment
"""
if self.current_assignment is None:
self.restart()
self.number_of_steps += 1
if not self.conflicts:
return self.number_of_steps
if prob_best > 0: # we need to maintain a variable priority queue
return self.search_with_var_pq(max_steps, prob_best, prob_anycon)
else:
return self.search_with_any_conflict(max_steps, prob_anycon)
def search_with_any_conflict(self, max_steps, prob_anycon=1.0):
"""Searches with the any_conflict heuristic.
This relies on just maintaining the set of conflicts;
it does not maintain a priority queue
"""
self.variable_pq = None # we are not maintaining the priority queue.
# This ensures it is regenerated if needed.
for i in range(max_steps):
self.number_of_steps += 1
if random.random() < prob_anycon:
con = random_sample(self.conflicts) # pick random conflict
var = random_sample(con.scope) # pick variable in conflict
else:
var = random_sample(self.variables_to_select)
if len(self.csp.domains[var]) > 1:
val = random_sample(self.csp.domains[var] -
{self.current_assignment[var]})
self.display(2, "Assigning", var, "=", val)
self.current_assignment[var] = val
for varcon in self.csp.var_to_const[var]:
if varcon.holds(self.current_assignment):
if varcon in self.conflicts:
self.conflicts.remove(varcon)
self.display(3, "Became consistent", varcon)
else:
self.display(3, "Still consistent", varcon)
else:
if varcon not in self.conflicts:
self.conflicts.add(varcon)
self.display(3, "Became inconsistent", varcon)
else:
self.display(3, "Still inconsistent", varcon)
self.display(2, "Conflicts:", self.conflicts)
if not self.conflicts:
self.display(1, "Solution found", self.current_assignment,
"in", self.number_of_steps, "steps")
return self.number_of_steps
self.display(1, "No solution in", self.number_of_steps, "steps",
len(self.conflicts), "conflicts remain")
return None
def search_with_var_pq(self, max_steps, prob_best=1.0, prob_anycon=1.0):
"""search with a priority queue of variables.
This is used to select a variable with the most conflicts.
"""
if not self.variable_pq:
self.create_pq()
pick_best_or_con = prob_best + prob_anycon
for i in range(max_steps):
self.number_of_steps += 1
randnum = random.random()
# Pick a variable
if randnum < prob_best: # pick best variable
var, oldval = self.variable_pq.top()
elif randnum < pick_best_or_con: # pick a variable in a conflict
con = random_sample(self.conflicts)
var = random_sample(con.scope)
else: # pick any variable that can be selected
var = random_sample(self.variables_to_select)
if len(self.csp.domains[var]) > 1: # var has other values
# Pick a value
val = random_sample(self.csp.domains[var] - {self.current_assignment[var]})
self.display(2, "Assigning", var, "=", val)
# Update the priority queue
var_differential = {}
self.current_assignment[var] = val
for varcon in self.csp.var_to_const[var]:
self.display(3, "Checking", varcon)
if varcon.holds(self.current_assignment):
if varcon in self.conflicts: # was incons, now consis
self.display(3, "Became consistent", varcon)
self.conflicts.remove(varcon)
for v in varcon.scope: # v is in one fewer conflicts
var_differential[v] = var_differential.get(v, 0) - 1
else:
self.display(3, "Still consistent", varcon)
else:
if varcon not in self.conflicts: # was consis, not now
self.display(3, "Became inconsistent", varcon)
self.conflicts.add(varcon)
for v in varcon.scope: # v is in one more conflicts
var_differential[v] = var_differential.get(v, 0) + 1
else:
self.display(3, "Still inconsistent", varcon)
self.variable_pq.update_each_priority(var_differential)
self.display(2, "Conflicts:", self.conflicts)
if not self.conflicts: # no conflicts, so solution found
self.display(1, "Solution found", self.current_assignment, "in", self.number_of_steps, "steps")
return self.number_of_steps
self.display(1, "No solution in", self.number_of_steps, "steps", len(self.conflicts), "conflicts remain")
return None
def create_pq(self):
"""Create the variable to number-of-conflicts priority queue.
This is needed to select the variable in the most conflicts.
The value of a variable in the priority queue is the negative of the
number of conflicts the variable appears in.
"""
self.variable_pq = Updatable_priority_queue()
var_to_number_conflicts = {}
for con in self.conflicts:
for var in con.scope:
var_to_number_conflicts[var] = var_to_number_conflicts.get(var, 0) + 1
for var, num in var_to_number_conflicts.items():
if num > 0:
self.variable_pq.add(var, -num)
def random_sample(st):
"""selects a random element from set st"""
return random.sample(st, 1)[0]
class Updatable_priority_queue(object):
"""A priority queue where the values can be updated.
Elements with the same value are ordered randomly.
This code is based on the ideas described in
http://docs.python.org/3.3/library/heapq.html
It could probably be done more efficiently by
shuffling the modified element in the heap.
"""
def __init__(self):
self.pq = [] # priority queue of [val,rand,elt] triples
self.elt_map = {} # map from elt to [val,rand,elt] triple in pq
self.REMOVED = "*removed*" # a string that won't be a legal element
self.max_size = 0
def add(self, elt, val):
"""adds elt to the priority queue with priority=val.
"""
assert val <= 0, val
assert elt not in self.elt_map, elt
new_triple = [val, random.random(), elt]
heapq.heappush(self.pq, new_triple)
self.elt_map[elt] = new_triple
def remove(self, elt):
"""remove the element from the priority queue"""
if elt in self.elt_map:
self.elt_map[elt][2] = self.REMOVED
del self.elt_map[elt]
def update_each_priority(self, update_dict):
"""update values in the priority queue by subtracting the values in
update_dict from the priority of those elements in priority queue.
"""
for elt, incr in update_dict.items():
if incr != 0:
newval = self.elt_map.get(elt, [0])[0] - incr
assert newval <= 0, str(elt) + ":" + str(newval + incr) + "-" + str(incr)
self.remove(elt)
if newval != 0:
self.add(elt, newval)
def pop(self):
"""Removes and returns the (elt,value) pair with minimal value.
If the priority queue is empty, IndexError is raised.
"""
self.max_size = max(self.max_size, len(self.pq)) # keep statistics
triple = heapq.heappop(self.pq)
while triple[2] == self.REMOVED:
triple = heapq.heappop(self.pq)
del self.elt_map[triple[2]]
return triple[2], triple[0] # elt, value
def top(self):
"""Returns the (elt,value) pair with minimal value, without removing it.
If the priority queue is empty, IndexError is raised.
"""
self.max_size = max(self.max_size, len(self.pq)) # keep statistics
triple = self.pq[0]
while triple[2] == self.REMOVED:
heapq.heappop(self.pq)
triple = self.pq[0]
return triple[2], triple[0] # elt, value
def empty(self):
"""returns True iff the priority queue is empty"""
return all(triple[2] == self.REMOVED for triple in self.pq)
class Runtime_distribution(object):
def __init__(self, csp, xscale='log'):
"""Sets up plotting for csp
xscale is either 'linear' or 'log'
"""
self.csp = csp
plt.ion()
plt.xlabel("Number of Steps")
plt.ylabel("Cumulative Number of Runs")
plt.xscale(xscale) # Makes a 'log' or 'linear' scale
def plot_run(self, num_runs=100, max_steps=1000, prob_best=1.0, prob_anycon=1.0):
stats = []
SLSearcher.max_display_level, temp_mdl = 0, SLSearcher.max_display_level # no display
for i in range(num_runs):
searcher = SLSearcher(self.csp)
num_steps = searcher.search(max_steps, prob_best, prob_anycon)
if num_steps:
stats.append(num_steps)
searcher.max_display_level = temp_mdl # restore display
stats.sort()
if prob_best >= 1.0:
label = "P(best)=1.0"
else:
p_ac = min(prob_anycon, 1 - prob_best)
label = "P(best)=%.2f, P(ac)=%.2f" % (prob_best, p_ac)
plt.plot(stats, range(len(stats)), label=label)
plt.legend(loc="upper left")
# plt.draw()
SLSearcher.max_display_level = temp_mdl # restore display
def sls_solver(csp, prob_best=0.7):
"""stochastic local searcher"""
se0 = SLSearcher(csp)
se0.search(1000, prob_best)
return se0.current_assignment
def any_conflict_solver(csp):
"""stochastic local searcher (any-conflict)"""
return sls_solver(csp, 0)
if __name__ == "__main__":
test(sls_solver)
test(any_conflict_solver)
# Test
#p = Runtime_distribution(extended_csp)
# p.plot_run(100,1000,0)
# p.plot_run(100,1000,1.0)
# p.plot_run(100,1000,0.7)
| AISpace2/AISpace2 | aipython/cspSLSPlot.py | Python | gpl-3.0 | 12,720 |
# GUI for pyfdtd using PySide
# Copyright (C) 2012 Patrik Gebhardt
# Contact: grosser.knuff@googlemail.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from newLayer import *
from newSimulation import *
| schansge/pyfdtd-gui | src/dialogs/__init__.py | Python | gpl-3.0 | 797 |
import os
from optparse import OptionParser
import io
import time
import random
import thread
import sys
from smtp_stuff import sendMail
from imap_stuff import checkMessages
import datetime
import string
import array
from time import gmtime, strftime
from socket import *
user = ''
recipient = ''
incoming_server = ''
outgoing_server = ''
password = ''
imei = 0
aprs_server = 'second.aprs.net'
aprs_port = 20157
aprs_password = ''
aprs_callsign = ''
aprs_address = '>APRS,TCPIP*:'
aprs_is_enabled = False
# comment length is supposed to be 0 to 43 char.
email_enabled = False
ip_enabled = False
http_post_enabled = False
COMMAND_GET_POS = 0
COMMAND_RELEASE = 1
COMMAND_SET_REPORT_INTERVAL = 2
def send_mo_email(msg):
global email
global incoming_server
global outgoing_server
global password
global imei
#put together body
body = ''
#subject
subject = '%d' % imei
#message is included as an attachment
attachment = 'msg.sbd'
fd = open(attachment, 'wb')
fd.write(msg)
fd.close()
sendMail(subject, body, user, recipient, password, outgoing_server, attachment)
def log(string):
print string
#TODO logic for text logging
def parse_text_report_no_fix(report):
report = report.split(":")
report = report[1]
report = report.split(",")
int_temp = float(report[0])
ext_temp = float(report[1])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
def send_aprs_packet(position):
global aprs_callsign
#print position
# create socket & connect to server
sSock = socket(AF_INET, SOCK_STREAM)
sSock.connect((aprs_server, aprs_port))
# logon
sSock.send('user ' + aprs_callsign + ' pass ' + aprs_password + ' vers "' + aprs_callsign + ' Python" \n')
#get position information and encode string
lat = position[1]
lon = position[2]
alt = 100
kts = 0.1
crs = 30
# deg mm.mm
lat_str = "=%02d" % ( lat ) + "%05.2f" % ( ( abs(lat) % 1 ) * 60.0 )
if lat > 0:
lat_str += "N"
else:
lat_str += "S"
# deg mm.mm
lon_str = "%03d" % ( abs(lon) ) + "%05.2f" % ( ( abs(lon) % 1 ) * 60.0 )
if lat > 0:
lon_str += "W"
else:
lon_str += "E"
#combine the two
position_str = lat_str + "/" + lon_str
#add course, speed, and altitude
comment = "O%03d/%03d/A=%06d" % (crs,kts,alt)
#comment = "-HELP ME"
#print aprs_callsign + aprs_address + position_str + comment
sSock.send(aprs_callsign + aprs_address + position_str + comment +'\n')
print("Packet sent to APRS: " + time.ctime() )
# close socket -- must be closed to avoidbuffer overflow
sSock.shutdown(0)
sSock.close()
def update_position(position):
if aprs_is_enabled:
send_aprs_packet(position)
def parse_text_report(report):
report = report.split(":")
report = report[1]
report = report.split(",")
time_str = report[0]
lat = float(report[1])
lon = float(report[2])
alt = float(report[3])
kts = float(report[4])
crs = float(report[5])
position = [time_str,lat,lon,alt,kts,crs]
int_temp = float(report[6])
ext_temp = float(report[7])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
print "Report - Lat:",lat,"Lon:",lon,"Alt(ft):",alt,"Speed(kts):",kts,"Course(deg):",crs
update_position(position)
MSG_TEXT_REPORT = 'U'
MSG_TEXT_REPORT_NO_FIX = 'F'
def parse_incoming(msg):
#TODO: My gawd, this is ugly.. lets do something else?
if msg[0] == MSG_TEXT_REPORT_NO_FIX:
parse_text_report_no_fix(msg)
elif msg[0] == MSG_TEXT_REPORT:
parse_text_report(msg)
def email_check_task(name):
#check e-mail for messages
while(1):
#print 'Checking email'
msg,subject,received_msg,unread_msgs = checkMessages(incoming_server,user,password)
if received_msg:
print "Received Message", msg,"\r"
parse_incoming(msg)
time.sleep(1.0)
def SET_REPORT_INTERVAL(args):
print "Setting reporting interval"
if RepresentsInt(args[0]):
value = int(args[0])
byte1 = ( value >> 8 ) & 0xFF
byte0 = ( value ) & 0xFF
msg = array.array('B',[COMMAND_SET_REPORT_INTERVAL,byte1,byte0])
send_mo_email(msg)
else:
"First argument must be int seconds between 1 - 65532. 0 to disable automatic reporting."
def GET_POS(args):
print "Sending position request"
msg = array.array('B',[COMMAND_GET_POS,1,2,3]) #extra bytes for not good reason
send_mo_email(msg)
def RELEASE(args):
print "Sending ballast release command"
if RepresentsInt(args[0]):
msg = array.array('B',[COMMAND_RELEASE,int(args[0])])
print msg
send_mo_email(msg)
else:
"First argument must be int"
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
def process_cmd(cmd_str):
#split up the string by space
cmd_args = cmd_str.split(' ')
#caps on CLI input
cmd_args[0] = cmd_args[0].upper()
if(len(cmd_args) > 1):
args = cmd_args[1:]
else:
args = []
possibles = globals().copy()
possibles.update(locals())
method = possibles.get(cmd_args[0])
if not method:
print("Method %s not implemented" % cmd_args[0])
else:
method(args)
def main():
global user
global recipient
global incoming_server
global outgoing_server
global password
global email_enabled
global ip_enabled
global http_post_enabled
global aprs_server
global aprs_port
global aprs_password
global aprs_callsign
global aprs_is_enabled
parser = OptionParser()
parser.add_option("-p", "--passwd", dest="passwd", action="store", help="Password", metavar="PASSWD")
parser.add_option("-u", "--user", dest="user", action="store", help="E-mail account username", metavar="USER")
parser.add_option("-r", "--recipient", dest="recipient", action="store", help="Destination e-mail address.", metavar="USER")
parser.add_option("-i", "--in_srv", dest="in_srv", action="store", help="Incoming e-mail server url", metavar="IN_SRV")
parser.add_option("-o", "--out_srv", dest="out_srv", action="store", help="Outoging e-mail server", metavar="OUT_SRV")
parser.add_option("-m", "--mode", dest="mode", action="store", help="Mode: EMAIL,HTTP_POST,IP,NONE", default="NONE", metavar="MODE")
parser.add_option("-I", "--imei", dest="imei",action="store",help="IMEI of target modem.",metavar="IMEI")
parser.add_option("-A", "--aprs-server",dest="aprs_server",action="store",help="APRS server",metavar="APRS_SERVER")
parser.add_option("-a", "--aprs-port",dest="aprs_port",action="store",help="APRS port",metavar="APRS_PORT")
parser.add_option("-s", "--aprs-password",dest="aprs_password",action="store",help="APRS password",metavar="APRS_PASSWORD")
parser.add_option("-c", "--aprs-callsign",dest="aprs_callsign",action="store",help="APRS Callsign",metavar="APRS_CALLSIGN")
(options, args) = parser.parse_args()
if options.aprs_server:
aprs_server = options.aprs_server
if options.aprs_port:
aprs_port = options.aprs_port
if options.aprs_password:
aprs_password = options.aprs_password
aprs_is_enabled = True
if options.aprs_callsign:
aprs_callsign = options.aprs_callsign
#check for valid arguments
if options.mode == "EMAIL":
if options.passwd is None or options.user is None or options.recipient is None or options.in_srv is None or options.out_srv is None:
print 'If you want to use e-mail, you must specify in/out servers, user, password, and recipient address.'
sys.exit()
else:
email_enabled = True
elif options.mode == "HTTP_POST":
print 'Not implemented yet'
sys.exit()
elif options.mode == "IP":
print 'Not implemented yet'
sys.exit()
else:
print "No valid mode specified"
sys.exit()
user = options.user
recipient = options.recipient
incoming_server = options.in_srv
outgoing_server = options.out_srv
password = options.passwd
imei = options.imei
#spawn task to monitor email for incoming messages
thread.start_new_thread ( email_check_task, ( "Thread-1" , ) )
rx_buffer = ''
while(1):
"Enter 'x' to exit"
cmd_str = raw_input("# ")
if cmd_str == 'x':
break
if not cmd_str == '':
process_cmd(cmd_str)
print "Exiting application."
if __name__ == '__main__':
main()
| astronewts/Flight1 | misc/allaloft/groundstation/python/ground_station_base.py | Python | gpl-3.0 | 9,305 |
#! /usr/bin/env python
#
# IM - Infrastructure Manager
# Copyright (C) 2011 - GRyCAP - Universitat Politecnica de Valencia
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest
sys.path.append(".")
sys.path.append("..")
from .CloudConn import TestCloudConnectorBase
from IM.CloudInfo import CloudInfo
from IM.auth import Authentication
from radl import radl_parse
from IM.VirtualMachine import VirtualMachine
from IM.InfrastructureInfo import InfrastructureInfo
from IM.connectors.OpenNebula import OpenNebulaCloudConnector
from mock import patch, MagicMock, call
class TestONEConnector(TestCloudConnectorBase):
"""
Class to test the IM connectors
"""
@staticmethod
def get_one_cloud():
cloud_info = CloudInfo()
cloud_info.type = "OpenNebula"
cloud_info.server = "server.com"
cloud_info.port = 2633
inf = MagicMock()
inf.id = "1"
one_cloud = OpenNebulaCloudConnector(cloud_info, inf)
return one_cloud
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_05_getONEVersion(self, server_proxy):
one_server = MagicMock()
one_server.system.listMethods.return_value = ["one.system.version"]
one_server.one.system.version.return_value = (True, "5.2.1", "")
server_proxy.return_value = one_server
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
one_cloud.getONEVersion(auth)
def test_10_concrete(self):
radl_data = """
network net ()
system test (
cpu.arch='x86_64' and
cpu.count>=1 and
memory.size>=512m and
net_interface.0.connection = 'net' and
net_interface.0.dns_name = 'test' and
disk.0.os.name = 'linux' and
disk.0.image.url = 'one://server.com/1' and
disk.0.os.credentials.username = 'user' and
disk.0.os.credentials.password = 'pass'
)"""
radl = radl_parse.parse_radl(radl_data)
radl_system = radl.systems[0]
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
concrete = one_cloud.concreteSystem(radl_system, auth)
self.assertEqual(len(concrete), 1)
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
@patch('IM.connectors.OpenNebula.OpenNebulaCloudConnector.getONEVersion')
@patch('IM.InfrastructureList.InfrastructureList.save_data')
def test_20_launch(self, save_data, getONEVersion, server_proxy):
radl_data = """
network net1 (provider_id = 'publica' and outbound = 'yes' and
outports = '8080,9000:9100' and sg_name= 'test')
network net2 ()
system test (
cpu.arch='x86_64' and
cpu.count=1 and
memory.size=512m and
availability_zone='0' and
net_interface.0.connection = 'net1' and
net_interface.0.dns_name = 'test' and
net_interface.1.connection = 'net2' and
instance_tags = 'key=value,key1=value2' and
disk.0.os.name = 'linux' and
disk.0.image.url = 'one://server.com/1' and
disk.0.os.credentials.username = 'user' and
disk.1.size=1GB and
disk.1.device='hdb' and
disk.1.mount_path='/mnt/path'
)"""
radl = radl_parse.parse_radl(radl_data)
radl.check()
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'},
{'type': 'InfrastructureManager', 'username': 'user',
'password': 'pass'}])
one_cloud = self.get_one_cloud()
getONEVersion.return_value = "4.14.0"
one_server = MagicMock()
one_server.one.vm.allocate.return_value = (True, "1", 0)
one_server.one.vnpool.info.return_value = (True, self.read_file_as_string("files/nets.xml"), 0)
one_server.one.secgrouppool.info.return_value = (True, self.read_file_as_string("files/sgs.xml"), 0)
one_server.one.secgroup.allocate.return_value = (True, 1, 0)
server_proxy.return_value = one_server
inf = InfrastructureInfo()
inf.auth = auth
res = one_cloud.launch(inf, radl, radl, 1, auth)
success, _ = res[0]
self.assertTrue(success, msg="ERROR: launching a VM.")
sg_template = ('NAME = test\nRULE = [ PROTOCOL = TCP, RULE_TYPE = inbound, RANGE = 22:22 ]\n'
'RULE = [ PROTOCOL = TCP, RULE_TYPE = inbound, RANGE = 8080:8080 ]\n'
'RULE = [ PROTOCOL = TCP, RULE_TYPE = inbound, RANGE = 9000:9100 ]\n')
self.assertEqual(one_server.one.secgroup.allocate.call_args_list, [call('user:pass', sg_template)])
vm_template = """
NAME = userimage
CPU = 1
VCPU = 1
MEMORY = 512
OS = [ ARCH = "x86_64" ]
DISK = [ IMAGE_ID = "1" ]
DISK = [ SAVE = no, TYPE = fs , FORMAT = ext3, SIZE = 1024, TARGET = hdb ]
SCHED_REQUIREMENTS = "CLUSTER_ID=\\"0\\""\n"""
self.assertIn(vm_template, one_server.one.vm.allocate.call_args_list[0][0][1])
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
# Now test an error in allocate
one_server.one.vm.allocate.return_value = (False, "Error msg", 0)
res = one_cloud.launch(inf, radl, radl, 1, auth)
success, msg = res[0]
self.assertFalse(success)
self.assertEqual(msg, "ERROR: Error msg")
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_30_updateVMInfo(self, server_proxy):
radl_data = """
network net (outbound = 'yes' and provider_id = 'publica')
network net1 (provider_id = 'privada')
system test (
cpu.arch='x86_64' and
cpu.count=1 and
memory.size=512m and
net_interface.0.connection = 'net' and
net_interface.0.dns_name = 'test' and
net_interface.1.connection = 'net1' and
disk.0.os.name = 'linux' and
disk.0.image.url = 'one://server.com/1' and
disk.0.os.credentials.username = 'user' and
disk.0.os.credentials.password = 'pass'
)"""
radl = radl_parse.parse_radl(radl_data)
radl.check()
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
inf = MagicMock()
vm = VirtualMachine(inf, "1", one_cloud.cloud, radl, radl, one_cloud, 1)
one_server = MagicMock()
one_server.one.vm.info.return_value = (True, self.read_file_as_string("files/vm_info.xml"), 0)
server_proxy.return_value = one_server
success, vm = one_cloud.updateVMInfo(vm, auth)
self.assertEquals(vm.info.systems[0].getValue("net_interface.1.ip"), "10.0.0.01")
self.assertEquals(vm.info.systems[0].getValue("net_interface.0.ip"), "158.42.1.1")
self.assertTrue(success, msg="ERROR: updating VM info.")
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_40_stop(self, server_proxy):
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
inf = MagicMock()
vm = VirtualMachine(inf, "1", one_cloud.cloud, "", "", one_cloud, 1)
one_server = MagicMock()
one_server.one.vm.action.return_value = (True, "", 0)
server_proxy.return_value = one_server
success, _ = one_cloud.stop(vm, auth)
self.assertTrue(success, msg="ERROR: stopping VM info.")
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_50_start(self, server_proxy):
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
inf = MagicMock()
vm = VirtualMachine(inf, "1", one_cloud.cloud, "", "", one_cloud, 1)
one_server = MagicMock()
one_server.one.vm.action.return_value = (True, "", 0)
server_proxy.return_value = one_server
success, _ = one_cloud.start(vm, auth)
self.assertTrue(success, msg="ERROR: stopping VM info.")
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_52_reboot(self, server_proxy):
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
inf = MagicMock()
vm = VirtualMachine(inf, "1", one_cloud.cloud, "", "", one_cloud, 1)
one_server = MagicMock()
one_server.one.vm.action.return_value = (True, "", 0)
server_proxy.return_value = one_server
success, _ = one_cloud.reboot(vm, auth)
self.assertTrue(success, msg="ERROR: stopping VM info.")
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_55_alter(self, server_proxy):
radl_data = """
network net ()
system test (
cpu.arch='x86_64' and
cpu.count=1 and
memory.size=512m and
net_interface.0.connection = 'net' and
net_interface.0.dns_name = 'test' and
disk.0.os.name = 'linux' and
disk.0.image.url = 'one://server.com/1' and
disk.0.os.credentials.username = 'user' and
disk.0.os.credentials.password = 'pass'
)"""
radl = radl_parse.parse_radl(radl_data)
new_radl_data = """
system test (
cpu.count>=2 and
memory.size>=2048m and
disk.1.size=1GB and
disk.1.device='hdc' and
disk.1.fstype='ext4' and
disk.1.mount_path='/mnt/disk'
)"""
new_radl = radl_parse.parse_radl(new_radl_data)
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
inf = MagicMock()
vm = VirtualMachine(inf, "1", one_cloud.cloud, radl, radl, one_cloud, 1)
one_server = MagicMock()
one_server.one.vm.action.return_value = (True, "", 0)
one_server.one.vm.resize.return_value = (True, "", 0)
one_server.one.vm.info.return_value = (True, self.read_file_as_string("files/vm_info_off.xml"), 0)
one_server.one.vm.attach.return_value = (True, "", 0)
one_server.system.listMethods.return_value = ["one.vm.resize"]
server_proxy.return_value = one_server
success, _ = one_cloud.alterVM(vm, new_radl, auth)
self.assertTrue(success, msg="ERROR: modifying VM info.")
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
@patch('IM.connectors.OpenNebula.OpenNebulaCloudConnector._get_security_group')
def test_60_finalize(self, get_security_group, server_proxy):
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
radl_data = """
network net1 (provider_id = 'publica' and outbound = 'yes' and outports = '8080,9000:9100')
network net2 ()
system test (
cpu.arch='x86_64' and
cpu.count=1 and
memory.size=512m and
net_interface.0.connection = 'net1' and
net_interface.0.dns_name = 'test' and
net_interface.1.connection = 'net2' and
disk.0.os.name = 'linux' and
disk.0.image.url = 'one://server.com/1' and
disk.0.os.credentials.username = 'user' and
disk.1.size=1GB and
disk.1.device='hdb' and
disk.1.mount_path='/mnt/path'
)"""
radl = radl_parse.parse_radl(radl_data)
radl.check()
inf = MagicMock()
inf.radl = radl
vm = VirtualMachine(inf, "1", one_cloud.cloud, radl, radl, one_cloud, 1)
one_server = MagicMock()
one_server.one.vm.action.return_value = (True, "", 0)
server_proxy.return_value = one_server
get_security_group.return_value = 101
one_server.one.secgroup.delete.return_value = (True, "", 0)
success, _ = one_cloud.finalize(vm, True, auth)
self.assertTrue(success, msg="ERROR: finalizing VM info.")
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
@patch('IM.connectors.OpenNebula.OpenNebulaCloudConnector.getONEVersion')
@patch('time.sleep')
def test_70_create_snapshot(self, sleep, getONEVersion, server_proxy):
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
inf = MagicMock()
vm = VirtualMachine(inf, "1", one_cloud.cloud, "", "", one_cloud, 1)
getONEVersion.return_value = "5.2.1"
one_server = MagicMock()
one_server.one.vm.disksaveas.return_value = (True, 1, 0)
one_server.one.image.info.return_value = (True, "<IMAGE><STATE>1</STATE></IMAGE>", 0)
server_proxy.return_value = one_server
success, new_image = one_cloud.create_snapshot(vm, 0, "image_name", True, auth)
self.assertTrue(success, msg="ERROR: creating snapshot: %s" % new_image)
self.assertEqual(new_image, 'one://server.com/1')
self.assertEqual(one_server.one.vm.disksaveas.call_args_list, [call('user:pass', 1, 0, 'image_name', '', -1)])
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
@patch('IM.connectors.OpenNebula.OpenNebulaCloudConnector.getONEVersion')
@patch('time.sleep')
def test_80_delete_image(self, sleep, getONEVersion, server_proxy):
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
getONEVersion.return_value = "4.12"
one_server = MagicMock()
one_server.one.image.delete.return_value = (True, "", 0)
one_server.one.imagepool.info.return_value = (True, "<IMAGE_POOL><IMAGE><ID>1</ID>"
"<NAME>imagename</NAME></IMAGE></IMAGE_POOL>", 0)
one_server.one.image.info.return_value = (True, "<IMAGE><STATE>1</STATE></IMAGE>", 0)
server_proxy.return_value = one_server
success, msg = one_cloud.delete_image('one://server.com/1', auth)
self.assertTrue(success, msg="ERROR: deleting image. %s" % msg)
self.assertEqual(one_server.one.image.delete.call_args_list, [call('user:pass', 1)])
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
success, msg = one_cloud.delete_image('one://server.com/imagename', auth)
self.assertTrue(success, msg="ERROR: deleting image. %s" % msg)
self.assertEqual(one_server.one.image.delete.call_args_list[1], call('user:pass', 1))
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
if __name__ == '__main__':
unittest.main()
| indigo-dc/im | test/unit/connectors/OpenNebula.py | Python | gpl-3.0 | 17,429 |
"""Unit tests for the ``organizations`` paths.
Each ``APITestCase`` subclass tests a single URL. A full list of URLs to be
tested can be found here:
http://theforeman.org/api/apidoc/v2/organizations.html
:Requirement: Organization
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: API
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_alphanumeric, gen_string
from nailgun import client, entities
from random import randint
from requests.exceptions import HTTPError
from robottelo.config import settings
from robottelo.datafactory import filtered_datapoint, invalid_values_list
from robottelo.decorators import skip_if_bug_open, tier1, tier2
from robottelo.helpers import get_nailgun_config
from robottelo.test import APITestCase
from six.moves import http_client
@filtered_datapoint
def valid_org_data_list():
"""List of valid data for input testing.
Note: The maximum allowed length of org name is 242 only. This is an
intended behavior (Also note that 255 is the standard across other
entities.)
"""
return [
gen_string('alphanumeric', randint(1, 242)),
gen_string('alpha', randint(1, 242)),
gen_string('cjk', randint(1, 85)),
gen_string('latin1', randint(1, 242)),
gen_string('numeric', randint(1, 242)),
gen_string('utf8', randint(1, 85)),
gen_string('html', randint(1, 85)),
]
class OrganizationTestCase(APITestCase):
"""Tests for the ``organizations`` path."""
@tier1
def test_positive_create_text_plain(self):
"""Create an organization using a 'text/plain' content-type.
:id: 6f67a3f0-0c1d-498c-9a35-28207b0faec2
:expectedresults: HTTP 415 is returned.
:CaseImportance: Critical
"""
organization = entities.Organization()
organization.create_missing()
response = client.post(
organization.path(),
organization.create_payload(),
auth=settings.server.get_credentials(),
headers={'content-type': 'text/plain'},
verify=False,
)
self.assertEqual(
http_client.UNSUPPORTED_MEDIA_TYPE, response.status_code)
@tier1
def test_positive_create_with_auto_label(self):
"""Create an organization and provide a name.
:id: c9f69ee5-c6dd-4821-bb05-0d93ffa22460
:expectedresults: The organization has the provided attributes and an
auto-generated label.
:CaseImportance: Critical
"""
org = entities.Organization().create()
self.assertTrue(hasattr(org, 'label'))
self.assertIsInstance(org.label, type(u''))
@tier1
def test_positive_create_with_custom_label(self):
"""Create an org and provide a name and identical label.
:id: f0deab6a-b09b-4110-8575-d4bea945a545
:expectedresults: The organization has the provided attributes.
:CaseImportance: Critical
"""
# A label has a more restrictive allowable charset than a name, so we
# use it for populating both name and label.
org = entities.Organization()
name_label = org.get_fields()['label'].gen_value()
org.name = org.label = name_label
org = org.create()
self.assertEqual(name_label, org.name)
self.assertEqual(name_label, org.label)
@tier1
def test_positive_create_with_name_and_label(self):
"""Create an organization and provide a name and label.
:id: 2bdd9aa8-a36a-4009-ac29-5c3d6416a2b7
:expectedresults: The organization has the provided attributes.
:CaseImportance: Critical
"""
org = entities.Organization()
org.name = name = org.get_fields()['name'].gen_value()
org.label = label = org.get_fields()['label'].gen_value()
org = org.create()
self.assertEqual(name, org.name)
self.assertEqual(label, org.label)
@tier1
def test_positive_create_with_name_and_description(self):
"""Create an organization and provide a name and description.
:id: afeea84b-61ca-40bf-bb16-476432919115
:expectedresults: The organization has the provided attributes and an
auto-generated label.
:CaseImportance: Critical
"""
for name in valid_org_data_list():
with self.subTest(name):
org = entities.Organization(
name=name,
description=name,
).create()
self.assertEqual(org.name, name)
self.assertEqual(org.description, name)
# Was a label auto-generated?
self.assertTrue(hasattr(org, 'label'))
self.assertIsInstance(org.label, type(u''))
self.assertGreater(len(org.label), 0)
@tier1
def test_positive_create_with_name_label_description(self):
"""Create an org and provide a name, label and description.
:id: f7d92392-751e-45de-91da-5ed2a47afc3f
:expectedresults: The organization has the provided name, label and
description.
:CaseImportance: Critical
"""
org = entities.Organization()
org.name = name = org.get_fields()['name'].gen_value()
org.label = label = org.get_fields()['label'].gen_value()
org.description = desc = org.get_fields()['description'].gen_value()
org = org.create()
self.assertEqual(org.name, name)
self.assertEqual(org.label, label)
self.assertEqual(org.description, desc)
@tier1
def test_negative_create_with_invalid_name(self):
"""Create an org with an incorrect name.
:id: 9c6a4b45-a98a-4d76-9865-92d992fa1a22
:expectedresults: The organization cannot be created.
:CaseImportance: Critical
"""
for name in invalid_values_list():
with self.subTest(name):
with self.assertRaises(HTTPError):
entities.Organization(name=name).create()
@tier1
def test_negative_create_with_same_name(self):
"""Create two organizations with identical names.
:id: a0f5333c-cc83-403c-9bf7-08fb372909dc
:expectedresults: The second organization cannot be created.
:CaseImportance: Critical
"""
name = entities.Organization().create().name
with self.assertRaises(HTTPError):
entities.Organization(name=name).create()
@tier1
def test_positive_search(self):
"""Create an organization, then search for it by name.
:id: f6f1d839-21f2-4676-8683-9f899cbdec4c
:expectedresults: Searching returns at least one result.
:CaseImportance: Critical
"""
org = entities.Organization().create()
orgs = entities.Organization().search(
query={u'search': u'name="{0}"'.format(org.name)}
)
self.assertEqual(len(orgs), 1)
self.assertEqual(orgs[0].id, org.id)
self.assertEqual(orgs[0].name, org.name)
class OrganizationUpdateTestCase(APITestCase):
"""Tests for the ``organizations`` path."""
@classmethod
def setUpClass(cls): # noqa
"""Create an organization."""
super(OrganizationUpdateTestCase, cls).setUpClass()
cls.organization = entities.Organization().create()
@tier1
def test_positive_update_name(self):
"""Update an organization's name with valid values.
:id: 68f2ba13-2538-407c-9f33-2447fca28cd5
:expectedresults: The organization's name is updated.
:CaseImportance: Critical
"""
for name in valid_org_data_list():
with self.subTest(name):
setattr(self.organization, 'name', name)
self.organization = self.organization.update(['name'])
self.assertEqual(self.organization.name, name)
@tier1
def test_positive_update_description(self):
"""Update an organization's description with valid values.
:id: bd223197-1021-467e-8714-c1a767ae89af
:expectedresults: The organization's description is updated.
:CaseImportance: Critical
"""
for desc in valid_org_data_list():
with self.subTest(desc):
setattr(self.organization, 'description', desc)
self.organization = self.organization.update(['description'])
self.assertEqual(self.organization.description, desc)
@tier1
def test_positive_update_name_and_description(self):
"""Update an organization with new name and description.
:id: 30036e70-b8fc-4c24-9494-b201bbd1c28d
:expectedresults: The organization's name and description are updated.
:CaseImportance: Critical
"""
name = gen_string('alpha')
desc = gen_string('alpha')
self.organization.name = name
self.organization.description = desc
self.organization = self.organization.update(['name', 'description'])
self.assertEqual(self.organization.name, name)
self.assertEqual(self.organization.description, desc)
@tier2
def test_positive_update_user(self):
"""Update an organization, associate user with it.
:id: 2c0c0061-5b4e-4007-9f54-b61d6e65ef58
:expectedresults: User is associated with organization.
:CaseLevel: Integration
"""
user = entities.User().create()
self.organization.user = [user]
self.organization = self.organization.update(['user'])
self.assertEqual(len(self.organization.user), 1)
self.assertEqual(self.organization.user[0].id, user.id)
@tier2
def test_positive_update_subnet(self):
"""Update an organization, associate subnet with it.
:id: 3aa0b9cb-37f7-4e7e-a6ec-c1b407225e54
:expectedresults: Subnet is associated with organization.
:CaseLevel: Integration
"""
subnet = entities.Subnet().create()
self.organization.subnet = [subnet]
self.organization = self.organization.update(['subnet'])
self.assertEqual(len(self.organization.subnet), 1)
self.assertEqual(self.organization.subnet[0].id, subnet.id)
@tier2
@skip_if_bug_open('bugzilla', 1230865)
def test_positive_add_media(self):
"""Update an organization and associate it with a media.
:id: 83f085d9-94c0-4462-9780-d29ea4cb5aac
:expectedresults: An organization is associated with a media.
:CaseLevel: Integration
"""
media = entities.Media().create()
self.organization.media = [media]
self.organization = self.organization.update(['media'])
self.assertEqual(len(self.organization.media), 1)
self.assertEqual(self.organization.media[0].id, media.id)
@tier2
def test_positive_add_hostgroup(self):
"""Add a hostgroup to an organization
:id: e8c2ccfd-9ae8-4a39-b459-bc5818f54e63
:expectedresults: Hostgroup is added to organization
:CaseLevel: Integration
"""
org = entities.Organization().create()
hostgroup = entities.HostGroup().create()
org.hostgroup = [hostgroup]
org = org.update(['hostgroup'])
self.assertEqual(len(org.hostgroup), 1)
self.assertEqual(org.hostgroup[0].id, hostgroup.id)
@skip_if_bug_open('bugzilla', 1395229)
@tier2
def test_positive_remove_hostgroup(self):
"""Add a hostgroup to an organization and then remove it
:id: 7eb1aca7-fd7b-404f-ab18-21be5052a11f
:expectedresults: Hostgroup is added to organization and then removed
:CaseLevel: Integration
"""
org = entities.Organization().create()
hostgroup = entities.HostGroup().create()
org.hostgroup = [hostgroup]
org = org.update(['hostgroup'])
self.assertEqual(len(org.hostgroup), 1)
org.hostgroup = []
org = org.update(['hostgroup'])
self.assertEqual(len(org.hostgroup), 0)
@tier2
@skip_if_bug_open('bugzilla', 1395229)
def test_positive_add_smart_proxy(self):
"""Add a smart proxy to an organization
:id: e21de720-3fa2-429b-bd8e-b6a48a13146d
:expectedresults: Smart proxy is successfully added to organization
:CaseLevel: Integration
"""
# Every Satellite has a built-in smart proxy, so let's find it
smart_proxy = entities.SmartProxy().search(query={
'search': 'url = https://{0}:9090'.format(settings.server.hostname)
})
# Check that proxy is found and unpack it from the list
self.assertGreater(len(smart_proxy), 0)
smart_proxy = smart_proxy[0]
# By default, newly created organization uses built-in smart proxy,
# so we need to remove it first
org = entities.Organization().create()
org.smart_proxy = []
org = org.update(['smart_proxy'])
# Verify smart proxy was actually removed
self.assertEqual(len(org.smart_proxy), 0)
# Add smart proxy to organization
org.smart_proxy = [smart_proxy]
org = org.update(['smart_proxy'])
# Verify smart proxy was actually added
self.assertEqual(len(org.smart_proxy), 1)
self.assertEqual(org.smart_proxy[0].id, smart_proxy.id)
@skip_if_bug_open('bugzilla', 1395229)
@tier2
def test_positive_remove_smart_proxy(self):
"""Remove a smart proxy from an organization
:id: 8045910e-d85c-47ee-9aed-ac0a6bbb646b
:expectedresults: Smart proxy is removed from organization
:CaseLevel: Integration
"""
# By default, newly created organization uses built-in smart proxy,
# so we can remove it instead of adding and removing some another one
org = entities.Organization().create()
self.assertGreater(len(org.smart_proxy), 0)
org.smart_proxy = []
org = org.update(['smart_proxy'])
# Verify smart proxy was actually removed
self.assertEqual(len(org.smart_proxy), 0)
@tier1
def test_negative_update(self):
"""Update an organization's attributes with invalid values.
:id: b7152d0b-5ab0-4d68-bfdf-f3eabcb5fbc6
:expectedresults: The organization's attributes are not updated.
:CaseImportance: Critical
"""
dataset = (
{'name': gen_string(str_type='utf8', length=256)},
# Immutable. See BZ 1089996.
{'label': gen_string(str_type='utf8')},
)
for attrs in dataset:
with self.subTest(attrs):
with self.assertRaises(HTTPError):
entities.Organization(
id=self.organization.id,
**attrs
).update(attrs.keys())
@tier2
@skip_if_bug_open('bugzilla', 1103157)
def test_verify_bugzilla_1103157(self):
"""Create organization and add two compute resources one by one
using different transactions and different users to see that they
actually added, but not overwrite each other
:id: 5f4fd2b7-d998-4980-b5e7-9822bd54156b
:Steps:
1. Use the admin user to create an organization and two compute
resources. Make one compute resource point at / belong to the
organization.
2. Create a user and give them the ability to update compute
resources and organizations. Have this user make the second
compute resource point at / belong to the organization.
3. Use the admin user to read information about the organization.
Verify that both compute resources are pointing at / belong to
the organization.
:expectedresults: Organization contains both compute resources
:CaseLevel: Integration
"""
# setUpClass() creates an organization w/admin user. Here, we use admin
# to make two compute resources and make first belong to organization.
compute_resources = [
entities.LibvirtComputeResource(
name=gen_string('alpha'),
url='qemu://host.example.com/system'
).create()
for _ in range(2)
]
self.organization.compute_resource = compute_resources[:1] # list
self.organization = self.organization.update(['compute_resource'])
self.assertEqual(len(self.organization.compute_resource), 1)
# Create a new user and give them minimal permissions.
login = gen_alphanumeric()
password = gen_alphanumeric()
user = entities.User(login=login, password=password).create()
role = entities.Role().create()
for perm in ['edit_compute_resources', 'edit_organizations']:
permissions = [
entities.Permission(id=permission['id'])
for permission
in entities.Permission(name=perm).search()
]
entities.Filter(permission=permissions, role=role).create()
user.role = [role]
user = user.update(['role'])
# Make new user assign second compute resource to org.
cfg = get_nailgun_config()
cfg.auth = (login, password)
entities.Organization(
cfg,
id=self.organization.id,
compute_resource=compute_resources[1:], # slice returns list
).update(['compute_resource'])
# Use admin to verify both compute resources belong to organization.
self.assertEqual(len(self.organization.read().compute_resource), 2)
| elyezer/robottelo | tests/foreman/api/test_organization.py | Python | gpl-3.0 | 17,745 |
"""
Panel displaying the torrc or armrc with the validation done against it.
"""
import math
import curses
import threading
from util import conf, panel, torConfig, uiTools
DEFAULT_CONFIG = {"features.config.file.showScrollbars": True,
"features.config.file.maxLinesPerEntry": 8}
# TODO: The armrc use case is incomplete. There should be equivilant reloading
# and validation capabilities to the torrc.
TORRC, ARMRC = range(1, 3) # configuration file types that can be displayed
class TorrcPanel(panel.Panel):
"""
Renders the current torrc or armrc with syntax highlighting in a scrollable
area.
"""
def __init__(self, stdscr, configType, config=None):
panel.Panel.__init__(self, stdscr, "configFile", 0)
self._config = dict(DEFAULT_CONFIG)
if config:
config.update(self._config, {"features.config.file.maxLinesPerEntry": 1})
self.valsLock = threading.RLock()
self.configType = configType
self.scroll = 0
self.showLabel = True # shows top label (hides otherwise)
self.showLineNum = True # shows left aligned line numbers
self.stripComments = False # drops comments and extra whitespace
# height of the content when last rendered (the cached value is invalid if
# _lastContentHeightArgs is None or differs from the current dimensions)
self._lastContentHeight = 1
self._lastContentHeightArgs = None
def handleKey(self, key):
self.valsLock.acquire()
if uiTools.isScrollKey(key):
pageHeight = self.getPreferredSize()[0] - 1
newScroll = uiTools.getScrollPosition(key, self.scroll, pageHeight, self._lastContentHeight)
if self.scroll != newScroll:
self.scroll = newScroll
self.redraw(True)
elif key == ord('n') or key == ord('N'):
self.showLineNum = not self.showLineNum
self._lastContentHeightArgs = None
self.redraw(True)
elif key == ord('s') or key == ord('S'):
self.stripComments = not self.stripComments
self._lastContentHeightArgs = None
self.redraw(True)
self.valsLock.release()
def draw(self, subwindow, width, height):
self.valsLock.acquire()
# If true, we assume that the cached value in self._lastContentHeight is
# still accurate, and stop drawing when there's nothing more to display.
# Otherwise the self._lastContentHeight is suspect, and we'll process all
# the content to check if it's right (and redraw again with the corrected
# height if not).
trustLastContentHeight = self._lastContentHeightArgs == (width, height)
# restricts scroll location to valid bounds
self.scroll = max(0, min(self.scroll, self._lastContentHeight - height + 1))
renderedContents, corrections, confLocation = None, {}, None
if self.configType == TORRC:
loadedTorrc = torConfig.getTorrc()
loadedTorrc.getLock().acquire()
confLocation = loadedTorrc.getConfigLocation()
if not loadedTorrc.isLoaded():
renderedContents = ["### Unable to load the torrc ###"]
else:
renderedContents = loadedTorrc.getDisplayContents(self.stripComments)
# constructs a mapping of line numbers to the issue on it
corrections = dict((lineNum, (issue, msg)) for lineNum, issue, msg in loadedTorrc.getCorrections())
loadedTorrc.getLock().release()
else:
loadedArmrc = conf.getConfig("arm")
confLocation = loadedArmrc.path
renderedContents = list(loadedArmrc.rawContents)
# offset to make room for the line numbers
lineNumOffset = 0
if self.showLineNum:
if len(renderedContents) == 0: lineNumOffset = 2
else: lineNumOffset = int(math.log10(len(renderedContents))) + 2
# draws left-hand scroll bar if content's longer than the height
scrollOffset = 0
if self._config["features.config.file.showScrollbars"] and self._lastContentHeight > height - 1:
scrollOffset = 3
self.addScrollBar(self.scroll, self.scroll + height - 1, self._lastContentHeight, 1)
displayLine = -self.scroll + 1 # line we're drawing on
# draws the top label
if self.showLabel:
sourceLabel = "Tor" if self.configType == TORRC else "Arm"
locationLabel = " (%s)" % confLocation if confLocation else ""
self.addstr(0, 0, "%s Configuration File%s:" % (sourceLabel, locationLabel), curses.A_STANDOUT)
isMultiline = False # true if we're in the middle of a multiline torrc entry
for lineNumber in range(0, len(renderedContents)):
lineText = renderedContents[lineNumber]
lineText = lineText.rstrip() # remove ending whitespace
# blank lines are hidden when stripping comments
if self.stripComments and not lineText: continue
# splits the line into its component (msg, format) tuples
lineComp = {"option": ["", curses.A_BOLD | uiTools.getColor("green")],
"argument": ["", curses.A_BOLD | uiTools.getColor("cyan")],
"correction": ["", curses.A_BOLD | uiTools.getColor("cyan")],
"comment": ["", uiTools.getColor("white")]}
# parses the comment
commentIndex = lineText.find("#")
if commentIndex != -1:
lineComp["comment"][0] = lineText[commentIndex:]
lineText = lineText[:commentIndex]
# splits the option and argument, preserving any whitespace around them
strippedLine = lineText.strip()
optionIndex = strippedLine.find(" ")
if isMultiline:
# part of a multiline entry started on a previous line so everything
# is part of the argument
lineComp["argument"][0] = lineText
elif optionIndex == -1:
# no argument provided
lineComp["option"][0] = lineText
else:
optionText = strippedLine[:optionIndex]
optionEnd = lineText.find(optionText) + len(optionText)
lineComp["option"][0] = lineText[:optionEnd]
lineComp["argument"][0] = lineText[optionEnd:]
# flags following lines as belonging to this multiline entry if it ends
# with a slash
if strippedLine: isMultiline = strippedLine.endswith("\\")
# gets the correction
if lineNumber in corrections:
lineIssue, lineIssueMsg = corrections[lineNumber]
if lineIssue in (torConfig.VAL_DUPLICATE, torConfig.VAL_IS_DEFAULT):
lineComp["option"][1] = curses.A_BOLD | uiTools.getColor("blue")
lineComp["argument"][1] = curses.A_BOLD | uiTools.getColor("blue")
elif lineIssue == torConfig.VAL_MISMATCH:
lineComp["argument"][1] = curses.A_BOLD | uiTools.getColor("red")
lineComp["correction"][0] = " (%s)" % lineIssueMsg
else:
# For some types of configs the correction field is simply used to
# provide extra data (for instance, the type for tor state fields).
lineComp["correction"][0] = " (%s)" % lineIssueMsg
lineComp["correction"][1] = curses.A_BOLD | uiTools.getColor("magenta")
# draws the line number
if self.showLineNum and displayLine < height and displayLine >= 1:
lineNumStr = ("%%%ii" % (lineNumOffset - 1)) % (lineNumber + 1)
self.addstr(displayLine, scrollOffset, lineNumStr, curses.A_BOLD | uiTools.getColor("yellow"))
# draws the rest of the components with line wrap
cursorLoc, lineOffset = lineNumOffset + scrollOffset, 0
maxLinesPerEntry = self._config["features.config.file.maxLinesPerEntry"]
displayQueue = [lineComp[entry] for entry in ("option", "argument", "correction", "comment")]
while displayQueue:
msg, format = displayQueue.pop(0)
maxMsgSize, includeBreak = width - cursorLoc, False
if len(msg) >= maxMsgSize:
# message is too long - break it up
if lineOffset == maxLinesPerEntry - 1:
msg = uiTools.cropStr(msg, maxMsgSize)
else:
includeBreak = True
msg, remainder = uiTools.cropStr(msg, maxMsgSize, 4, 4, uiTools.END_WITH_HYPHEN, True)
displayQueue.insert(0, (remainder.strip(), format))
drawLine = displayLine + lineOffset
if msg and drawLine < height and drawLine >= 1:
self.addstr(drawLine, cursorLoc, msg, format)
# If we're done, and have added content to this line, then start
# further content on the next line.
cursorLoc += len(msg)
includeBreak |= not displayQueue and cursorLoc != lineNumOffset + scrollOffset
if includeBreak:
lineOffset += 1
cursorLoc = lineNumOffset + scrollOffset
displayLine += max(lineOffset, 1)
if trustLastContentHeight and displayLine >= height: break
if not trustLastContentHeight:
self._lastContentHeightArgs = (width, height)
newContentHeight = displayLine + self.scroll - 1
if self._lastContentHeight != newContentHeight:
self._lastContentHeight = newContentHeight
self.redraw(True)
self.valsLock.release()
| katmagic/arm | src/interface/torrcPanel.py | Python | gpl-3.0 | 9,156 |
# Copyright 2015 Facundo Batista, Nicolás Demarchi
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further info, check https://github.com/PyAr/fades
""" Tests for pip related code. """
import unittest
from unittest.mock import patch
import logassert
from fades.pipmanager import PipManager
from fades import helpers
class PipManagerTestCase(unittest.TestCase):
""" Check parsing for `pip show`. """
def setUp(self):
logassert.setup(self, 'fades.pipmanager')
def test_get_parsing_ok(self):
mocked_stdout = ['Name: foo',
'Version: 2.0.0',
'Location: ~/.local/share/fades/86cc492/lib/python3.4/site-packages',
'Requires: ']
mgr = PipManager('/usr/bin', pip_installed=True)
with patch.object(helpers, 'logged_exec') as mock:
mock.return_value = mocked_stdout
version = mgr.get_version('foo')
self.assertEqual(version, '2.0.0')
def test_get_parsing_error(self):
mocked_stdout = ['Name: foo',
'Release: 2.0.0',
'Location: ~/.local/share/fades/86cc492/lib/python3.4/site-packages',
'Requires: ']
mgr = PipManager('/usr/bin', pip_installed=True)
with patch.object(helpers, 'logged_exec') as mock:
version = mgr.get_version('foo')
mock.return_value = mocked_stdout
self.assertEqual(version, '')
self.assertLoggedError('Fades is having problems getting the installed version. '
'Run with -v or check the logs for details')
def test_real_case_levenshtein(self):
mocked_stdout = [
'Metadata-Version: 1.1',
'Name: python-Levenshtein',
'Version: 0.12.0',
'License: GPL',
]
mgr = PipManager('/usr/bin', pip_installed=True)
with patch.object(helpers, 'logged_exec') as mock:
mock.return_value = mocked_stdout
version = mgr.get_version('foo')
self.assertEqual(version, '0.12.0')
def test_install(self):
mgr = PipManager('/usr/bin', pip_installed=True)
with patch.object(helpers, 'logged_exec') as mock:
mgr.install('foo')
mock.assert_called_with(['/usr/bin/pip', 'install', 'foo'])
def test_install_with_options(self):
mgr = PipManager('/usr/bin', pip_installed=True, options=['--bar baz'])
with patch.object(helpers, 'logged_exec') as mock:
mgr.install('foo')
mock.assert_called_with(['/usr/bin/pip', 'install', 'foo', '--bar', 'baz'])
def test_install_with_options_using_equal(self):
mgr = PipManager('/usr/bin', pip_installed=True, options=['--bar=baz'])
with patch.object(helpers, 'logged_exec') as mock:
mgr.install('foo')
mock.assert_called_with(['/usr/bin/pip', 'install', 'foo', '--bar=baz'])
def test_install_raise_error(self):
mgr = PipManager('/usr/bin', pip_installed=True)
with patch.object(helpers, 'logged_exec') as mock:
mock.side_effect = Exception("Kapow!")
with self.assertRaises(Exception):
mgr.install('foo')
self.assertLoggedError("Error installing foo: Kapow!")
def test_install_without_pip(self):
mgr = PipManager('/usr/bin', pip_installed=False)
with patch.object(helpers, 'logged_exec') as mocked_exec:
with patch.object(mgr, '_brute_force_install_pip') as mocked_install_pip:
mgr.install('foo')
self.assertEqual(mocked_install_pip.call_count, 1)
mocked_exec.assert_called_with(['/usr/bin/pip', 'install', 'foo'])
| arielrossanigo/fades | tests/test_pipmanager.py | Python | gpl-3.0 | 4,315 |
#!/usr/bin/python
# coding=utf-8
# Simple Steam profile checker Telegram bot
# Copyright (c) 2017 EasyCoding Team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from html import unescape
from re import sub
from urllib.request import Request as request, urlopen
from xml.dom import minidom
class SteamChecker:
@staticmethod
def striptags(gtstr, gtrep=''):
"""
Strip HTML tags from string.
:param gtstr: String to strip tags
:param gtrep: Replacement for tags
:return: String without HTML tags
"""
return sub('<[^<]+?>', gtrep, unescape(gtstr))
def __fetchxml(self):
"""
Format query to API, fetch results and return them as string.
:return: API check results
"""
apiuri = 'https://check.team-fortress.su/api.php?action=check&token=%s&id=%s' % (self.__token, self.__id)
req = request(apiuri, data=None, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; rv:52.0.0)'
'Gecko/20100101 Firefox/52.0.0'})
with urlopen(req) as xmlres:
return xmlres.read().decode('utf-8')
@property
def sitestatus(self):
"""
TEAM-FORTRESS.SU user friendly status of checked user profile.
:return: TEAM-FORTRESS.SU check results
"""
# Set dictionary with API return codes...
stv = {
'1': 'гарант',
'2': 'в белом списке',
'3': 'в чёрном списке',
'4': 'нет в базе',
'5': 'в чёрном списке аукциона',
'6': 'сотрудник сайта',
'7': 'донатер',
'8': 'ненадёжный'
}
# Return result using dictionary...
return stv[self.__sitestatus]
@property
def vacstatus(self):
"""
VAC status of checked user profile.
:return: VAC status
"""
stv = {
'0': 'чист',
'1': 'забанен'
}
return stv[self.__vacstatus]
@property
def f2pstatus(self):
"""
Free-to-Play status (has no purchased games) of checked user profile.
:return: Free-to-Play status
"""
stv = {
'0': 'нет',
'1': 'да'
}
return stv[self.__f2pstatus]
@property
def tradestatus(self):
"""
Current trade status of checked user profile.
:return: Trade status
"""
stv = {
'0': 'нет ограничений',
'1': 'заблокирована',
'2': 'испытательный срок'
}
return stv[self.__tradestatus]
@property
def gamebanstatus(self):
"""
Current game bans on checked user profile.
:return: Game bans status and their count
"""
return 'нет' if self.__gamebans == '0' else 'есть (%s)' % self.__gamebans
@property
def description(self):
"""
Formatted custom description of checked user profile.
:return: Custom description with markup
"""
return '`%s`' % self.striptags(self.__description, ' ') if self.__description else '*отсутствует.*'
def __init__(self, tid, token):
"""
Main SteamChecker constructor.
:param tid: Profile link, username or SteamID
:param token: API token
"""
# Setting token and unique identifier to pseudo-private properties...
self.__id = tid
self.__token = token
# Fetching XML from API...
rxml = self.__fetchxml()
# Parsing received XML...
xmlp = minidom.parseString(rxml)
# Checking API result...
if xmlp.getElementsByTagName('qstatus')[0].firstChild.data != 'OK':
raise Exception('Incorrect API return code')
# Setting public fields...
self.steamid32 = xmlp.getElementsByTagName('steamID')[0].firstChild.data
self.steamid64 = xmlp.getElementsByTagName('steamID64')[0].firstChild.data
self.steamidv3 = xmlp.getElementsByTagName('steamIDv3')[0].firstChild.data
self.nickname = xmlp.getElementsByTagName('nickname')[0].firstChild.data
self.avatar = xmlp.getElementsByTagName('avatar')[0].firstChild.data
self.permalink = xmlp.getElementsByTagName('permalink')[0].firstChild.data
self.srstatus = self.striptags(xmlp.getElementsByTagName('steamrep')[0].firstChild.data)
# Setting private fields...
self.__sitestatus = xmlp.getElementsByTagName('sitestatus')[0].firstChild.data
self.__vacstatus = xmlp.getElementsByTagName('isbanned')[0].firstChild.data
self.__f2pstatus = xmlp.getElementsByTagName('isf2p')[0].firstChild.data
self.__tradestatus = xmlp.getElementsByTagName('istrbanned')[0].firstChild.data
self.__premium = xmlp.getElementsByTagName('ispremium')[0].firstChild.data
self.__gamebans = xmlp.getElementsByTagName('gamebans')[0].firstChild.data
# Fetching custom description...
dcs = xmlp.getElementsByTagName('customdescr')[0].firstChild
self.__description = dcs.data if dcs else ''
| xvitaly/stmbot | stmbot/checker.py | Python | gpl-3.0 | 5,895 |
import gensim
import numpy as np
from gensim.models import word2vec
import jieba
from TextSta_v2 import TextSta
from gensim.corpora.dictionary import Dictionary
path = u"C:\\Users\\xiangrufan\\Desktop\\NLP\\Astro_NLP\\resource\\复旦分类语料\\answer\\C3-Art\\C3-Art0002.txt"
text = TextSta(path,encoding="GBK")
sentense_file = text.sen(all_return=True)
word_list = jieba.lcut(sentense_file)
tmp_dic = Dictionary()
tmp_dic(word_list)
# sentences = word2vec.Text8Corpus() # 加载语料
# model = word2vec.Word2Vec(sentences, size=10) # 默认window=5 | xiangrufan/astro-NLP | hobby2vec/gensim_test.py | Python | gpl-3.0 | 558 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2021 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Tests for the hrpt reader."""
import os
import unittest
from contextlib import suppress
from tempfile import NamedTemporaryFile
from unittest import mock
import numpy as np
import xarray as xr
from satpy.readers.hrpt import HRPTFile, dtype
from satpy.tests.reader_tests.test_avhrr_l1b_gaclac import PygacPatcher
from satpy.tests.utils import make_dataid
NUMBER_OF_SCANS = 10
SWATH_WIDTH = 2048
class TestHRPTWithFile(unittest.TestCase):
"""Test base class with writing a fake file."""
def setUp(self) -> None:
"""Set up the test case."""
test_data = np.ones(NUMBER_OF_SCANS, dtype=dtype)
# Channel 3a
test_data["id"]["id"][:5] = 891
# Channel 3b
test_data["id"]["id"][5:] = 890
with NamedTemporaryFile(mode='w+', suffix='.hmf', delete=False) as hrpt_file:
self.filename = hrpt_file.name
test_data.tofile(hrpt_file)
def tearDown(self) -> None:
"""Tear down the test case."""
with suppress(OSError):
os.remove(self.filename)
def _get_dataset(self, dataset_id):
fh = HRPTFile(self.filename, {}, {})
return fh.get_dataset(dataset_id, {})
class TestHRPTReading(TestHRPTWithFile):
"""Test case for reading hrpt data."""
def test_reading(self):
"""Test that data is read."""
fh = HRPTFile(self.filename, {}, {})
assert fh._data is not None
class TestHRPTGetUncalibratedData(TestHRPTWithFile):
"""Test case for reading uncalibrated hrpt data."""
def _get_channel_1_counts(self):
return self._get_dataset(make_dataid(name='1', calibration='counts'))
def test_get_dataset_returns_a_dataarray(self):
"""Test that get_dataset returns a dataarray."""
result = self._get_channel_1_counts()
assert isinstance(result, xr.DataArray)
def test_platform_name(self):
"""Test that the platform name is correct."""
result = self._get_channel_1_counts()
assert result.attrs['platform_name'] == 'NOAA 19'
def test_no_calibration_values_are_1(self):
"""Test that the values of non-calibrated data is 1."""
result = self._get_channel_1_counts()
assert (result.values == 1).all()
def fake_calibrate_solar(data, *args, **kwargs):
"""Fake calibration."""
del args, kwargs
return data * 25.43 + 3
def fake_calibrate_thermal(data, *args, **kwargs):
"""Fake calibration."""
del args, kwargs
return data * 35.43 + 3
class CalibratorPatcher(PygacPatcher):
"""Patch pygac."""
def setUp(self) -> None:
"""Patch pygac's calibration."""
super().setUp()
# Import things to patch here to make them patchable. Otherwise another function
# might import it first which would prevent a successful patch.
from pygac.calibration import Calibrator, calibrate_solar, calibrate_thermal
self.Calibrator = Calibrator
self.calibrate_thermal = calibrate_thermal
self.calibrate_thermal.side_effect = fake_calibrate_thermal
self.calibrate_solar = calibrate_solar
self.calibrate_solar.side_effect = fake_calibrate_solar
class TestHRPTWithPatchedCalibratorAndFile(CalibratorPatcher, TestHRPTWithFile):
"""Test case with patched calibration routines and a synthetic file."""
def setUp(self) -> None:
"""Set up the test case."""
CalibratorPatcher.setUp(self)
TestHRPTWithFile.setUp(self)
def tearDown(self):
"""Tear down the test case."""
CalibratorPatcher.tearDown(self)
TestHRPTWithFile.tearDown(self)
class TestHRPTGetCalibratedReflectances(TestHRPTWithPatchedCalibratorAndFile):
"""Test case for reading calibrated reflectances from hrpt data."""
def _get_channel_1_reflectance(self):
"""Get the channel 1 reflectance."""
dataset_id = make_dataid(name='1', calibration='reflectance')
return self._get_dataset(dataset_id)
def test_calibrated_reflectances_values(self):
"""Test the calibrated reflectance values."""
result = self._get_channel_1_reflectance()
np.testing.assert_allclose(result.values, 28.43)
class TestHRPTGetCalibratedBT(TestHRPTWithPatchedCalibratorAndFile):
"""Test case for reading calibrated brightness temperature from hrpt data."""
def _get_channel_4_bt(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='4', calibration='brightness_temperature')
return self._get_dataset(dataset_id)
def test_calibrated_bt_values(self):
"""Test the calibrated reflectance values."""
result = self._get_channel_4_bt()
np.testing.assert_allclose(result.values, 38.43)
class TestHRPTChannel3(TestHRPTWithPatchedCalibratorAndFile):
"""Test case for reading calibrated brightness temperature from hrpt data."""
def _get_channel_3b_bt(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='3b', calibration='brightness_temperature')
return self._get_dataset(dataset_id)
def _get_channel_3a_reflectance(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='3a', calibration='reflectance')
return self._get_dataset(dataset_id)
def _get_channel_3a_counts(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='3a', calibration='counts')
return self._get_dataset(dataset_id)
def test_channel_3b_masking(self):
"""Test that channel 3b is split correctly."""
result = self._get_channel_3b_bt()
assert np.isnan(result.values[:5]).all()
assert np.isfinite(result.values[5:]).all()
def test_channel_3a_masking(self):
"""Test that channel 3a is split correctly."""
result = self._get_channel_3a_reflectance()
assert np.isnan(result.values[5:]).all()
assert np.isfinite(result.values[:5]).all()
def test_uncalibrated_channel_3a_masking(self):
"""Test that channel 3a is split correctly."""
result = self._get_channel_3a_counts()
assert np.isnan(result.values[5:]).all()
assert np.isfinite(result.values[:5]).all()
class TestHRPTNavigation(TestHRPTWithFile):
"""Test case for computing HRPT navigation."""
def setUp(self) -> None:
"""Set up the test case."""
super().setUp()
self.fake_lons = np.ones((NUMBER_OF_SCANS, SWATH_WIDTH))
self.fake_lats = np.ones((NUMBER_OF_SCANS, SWATH_WIDTH)) * 2
def _prepare_mocks(self, Orbital, SatelliteInterpolator, get_lonlatalt):
"""Prepare the mocks."""
Orbital.return_value.get_position.return_value = mock.MagicMock(), mock.MagicMock()
get_lonlatalt.return_value = (mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
SatelliteInterpolator.return_value.interpolate.return_value = self.fake_lons, self.fake_lats
@mock.patch.multiple('satpy.readers.hrpt',
Orbital=mock.DEFAULT,
compute_pixels=mock.DEFAULT,
get_lonlatalt=mock.DEFAULT,
SatelliteInterpolator=mock.DEFAULT)
def test_longitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator):
"""Check that latitudes are returned properly."""
self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt)
dataset_id = make_dataid(name='longitude')
result = self._get_dataset(dataset_id)
assert (result == self.fake_lons).all()
@mock.patch.multiple('satpy.readers.hrpt',
Orbital=mock.DEFAULT,
compute_pixels=mock.DEFAULT,
get_lonlatalt=mock.DEFAULT,
SatelliteInterpolator=mock.DEFAULT)
def test_latitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator):
"""Check that latitudes are returned properly."""
self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt)
dataset_id = make_dataid(name='latitude')
result = self._get_dataset(dataset_id)
assert (result == self.fake_lats).all()
| pytroll/satpy | satpy/tests/reader_tests/test_avhrr_l0_hrpt.py | Python | gpl-3.0 | 8,951 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
vivopump -- module of helper functions for the pump
"""
import sys
import csv
import string
import random
import logging
__author__ = "Michael Conlon"
__copyright__ = "Copyright (c) 2016 Michael Conlon"
__license__ = "New BSD license"
__version__ = "0.8.7"
logger = logging.getLogger(__name__)
class DefNotFoundException(Exception):
"""
Raise this exception when update definition fle is not found
"""
def __init__(self, value):
Exception.__init__(self)
self.value = value
def __str__(self):
return repr(self.value)
class InvalidDefException(Exception):
"""
Raise this exception when update definition contains values that can not be processed
"""
def __init__(self, value):
Exception.__init__(self)
self.value = value
def __str__(self):
return repr(self.value)
class InvalidSourceException(Exception):
"""
Raise this exception when update data contains values that can not be processed
"""
def __init__(self, value):
Exception.__init__(self)
self.value = value
def __str__(self):
return repr(self.value)
class PathLengthException(Exception):
"""
Raise this exception when update def has a path length greater than support
"""
def __init__(self, value):
Exception.__init__(self)
self.value = value
def __str__(self):
return repr(self.value)
class UnicodeCsvReader(object):
"""
From http://stackoverflow.com/questions/1846135/python-csv-
library-with-unicode-utf-8-support-that-just-works. Added errors='ignore'
to handle cases when the input file misrepresents itself as utf-8.
"""
def __init__(self, f, encoding="utf-8", **kwargs):
self.csv_reader = csv.reader(f, **kwargs)
self.encoding = encoding
def __iter__(self):
return self
def next(self):
"""
Read and split the csv row into fields
"""
row = self.csv_reader.next()
# now decode
return [unicode(cell, self.encoding, errors='ignore') for cell in row]
@property
def line_num(self):
"""
Return line number
"""
return self.csv_reader.line_num
class UnicodeDictReader(csv.DictReader):
"""
A Unicode CSV Reader
"""
def __init__(self, f, encoding="utf-8", fieldnames=None, **kwds):
csv.DictReader.__init__(self, f, fieldnames=fieldnames, **kwds)
self.reader = UnicodeCsvReader(f, encoding=encoding, **kwds)
def read_csv(filename, skip=True, delimiter='|'):
"""
Read a CSV file, return dictionary object
:param filename: name of file to read
:param skip: should lines with invalid number of columns be skipped? False=Throw Exception
:param delimiter: The delimiter for CSV files
:return: Dictionary object
"""
fp = open(filename, 'rU')
data = read_csv_fp(fp, skip, delimiter)
fp.close()
return data
def read_csv_fp(fp, skip=True, delimiter="|"):
"""
Given a filename, read the CSV file with that name. We use "|" as a
separator in CSV files to allow commas to appear in values.
CSV files read by this function follow these conventions:
-- use delimiter as a separator. Defaults to vertical bar.
-- have a first row that contains column headings.
-- all elements must have values. To specify a missing value, use
the string "None" or "NULL" between separators, that is |None| or |NULL|
-- leading and trailing whitespace in values is ignored. | The | will be
read as "The"
-- if skip=True, rows with too many or too few data elements are skipped.
if skip=False, a RowError is thrown
CSV files processed by read_csv will be returned as a dictionary of
dictionaries, one dictionary per row keyed by an integer row number. This supports
maintaining the order of the data input, which is important for some applications
"""
class RowError(Exception):
"""
Thrown when the number of data elements on a row in a CSV is not equal to the number of header elements
"""
pass
heading = []
row_number = 0
data = {}
for row in UnicodeCsvReader(fp, delimiter=delimiter):
i = 0
for r in row:
# remove white space fore and aft
row[i] = r.strip(string.whitespace).encode("utf-8")
i += 1
if len(heading) == 0:
heading = row # the first row is the heading
continue
row_number += 1
if len(row) == len(heading):
data[row_number] = {}
i = 0
for r in row:
data[row_number][heading[i]] = r
i += 1
elif not skip:
raise RowError("On row " + str(row_number) + ", expecting " +
str(len(heading)) + " data values. Found " +
str(len(row)) + " data values. Row contents = " +
str(row))
else:
pass # row has wrong number of columns and skip is True
logger.debug("loader returns {} rows".format(len(data)))
return data
def write_csv_fp(fp, data, delimiter='|'):
"""
Write a CSV to a file pointer. Used to support stdout.
:param fp: File pointer. Could be stdout.
:param data: data to be written
:param delimiter: field delimiter for output
:return:
"""
assert(len(data.keys()) > 0)
# create a list of var_names from the first row
var_names = data[data.keys()[0]].keys()
fp.write(delimiter.join(var_names).encode('utf-8') + '\n')
for key in sorted(data.keys()):
fp.write(delimiter.join([data[key][x] for x in var_names]) + '\n')
def write_csv(filename, data, delimiter='|'):
"""
Given a filename, a data structure as produced by read_csv and an optional
delimiter, write a file that can be read by read_csv
The data structure is a dictionary keyed by an integer of "row numbers"
preserving the natural order of the data. Each element is in turn a
dictionary of name value pairs. All values are strings.
:param filename: name of file to write
:param data: data structure to be written to the file
:param delimiter: field delimiter. Popular choices are '|', '\t' and ','
:return:
"""
with open(filename, 'w') as f:
f.write(delimiter.join(data[data.keys()[0]].keys()).encode('utf-8') + '\n')
for key in sorted(data.keys()):
f.write(delimiter.join(data[key].values()).encode('utf-8') + '\n')
def replace_initials(s):
"""
For a string s, find all occurrences of A. B. etc and replace them with A B etc
:param s:
:return: string with replacements made
"""
import re
def repl_function(m):
"""
Helper function for re.sub
"""
return m.group(0)[0]
t = re.sub('[A-Z]\.', repl_function, s)
return t
def key_string(s):
"""
Given a string s, return a string with a bunch of punctuation and special
characters removed and then everything lower cased. Useful for matching
strings in which case, punctuation and special characters should not be
considered in the match
"""
k = s.encode("utf-8", "ignore").translate(None,
""" \t\n\r\f!@#$%^&*()_+:"<>?-=[]\\;'`~,./""")
k = k.lower()
return k
def get_vivo_types(selector, parms, separator=';'):
"""
Query VIVO using the selector and return a dictionary with keys of all uri satisfying the selector and
data of all the types for each uri, separated by the separator
:param: selector: query fragment for selecting the entities whose types will be returned
:param: parms: vivo_query parms
:return: dictionary of types keyed by uri
"""
query = """
select ?uri (GROUP_CONCAT(?type; separator="{{separator}}") AS ?types)
where {
{{selector}}
?uri rdf:type ?type .}
GROUP BY ?uri
"""
q = query.replace("{{separator}}", separator)
q = q.replace("{{selector}}", selector)
a = vivo_query(q, parms)
types = [x['types']['value'] for x in a['results']['bindings']]
uri = [x['uri']['value'] for x in a['results']['bindings']]
return dict(zip(uri, types))
def get_vivo_ufid(parms):
"""
Query VIVO and return a list of all the ufid found in VIVO
:param: parms: vivo_query parameters
:return: dictionary of uri keyed by ufid
"""
query = "select ?uri ?ufid where {?uri uf:ufid ?ufid .}"
a = vivo_query(query, parms)
ufid = [x['ufid']['value'] for x in a['results']['bindings']]
uri = [x['uri']['value'] for x in a['results']['bindings']]
return dict(zip(ufid, uri))
def get_vivo_publishers(parms):
"""
Query VIVO and return a list of all the publishers found in VIVO
:param: parms: vivo_query parameters
:return: dictionary of uri keyed by simplified publisher name
"""
query = "select ?uri ?label where {?uri a vivo:Publisher . ?uri rdfs:label ?label .}"
a = vivo_query(query, parms)
label = [key_string(x['label']['value']) for x in a['results']['bindings']]
uri = [x['uri']['value'] for x in a['results']['bindings']]
return dict(zip(label, uri))
def get_vivo_journals(parms):
"""
Query VIVO and return a list of all the journals.
@see uf_examples/publications/filters/journal_match_filter.py
:param: parms: vivo_query params
:return: dictionary of uri keyed by ISSN
"""
query = "select ?uri ?issn where {?uri bibo:issn ?issn .}"
a = vivo_query(query, parms)
issn = [x['issn']['value'] for x in a['results']['bindings']]
uri = [x['uri']['value'] for x in a['results']['bindings']]
return dict(zip(issn, uri))
def get_vivo_ccn(parms):
"""
Query VIVO and return a list of all the ccn found in VIVO.
@see uf_examples/courses/merge_filter.py
:param: parms: vivo_query parms
:return: dictionary of uri keyed by ccn
"""
query = "select ?uri ?ccn where {?uri uf:ccn ?ccn .}"
a = vivo_query(query, parms)
ccn = [x['ccn']['value'] for x in a['results']['bindings']]
uri = [x['uri']['value'] for x in a['results']['bindings']]
return dict(zip(ccn, uri))
def get_vivo_sponsorid(parms):
"""
Query VIVO and return a list of all the sponsorid found in VIVO
:param: parms: vivo_query parms
:return: dictionary of uri keyed by sponsorid
"""
query = "select ?uri ?sponsorid where {?uri a vivo:FundingOrganization . ?uri ufVivo:sponsorID ?sponsorid .}"
a = vivo_query(query, parms)
sponsorid = [x['sponsorid']['value'] for x in a['results']['bindings']]
uri = [x['uri']['value'] for x in a['results']['bindings']]
return dict(zip(sponsorid, uri))
def get_vivo_authors(parms):
"""
Query VIVO and return a list of all the authors found in VIVO. Authors are people connected to
publications through authorships
:param: parms: vivo_query parms
:return: dictionary of author uri keyed by display_name (that won't work!)
"""
query = """
SELECT ?uri ?display_name
WHERE
{
?art a bibo:AcademicArticle .
?art bibo:doi ?doi .
?art vivo:relatedBy ?a .
?a a vivo:Authorship .
?a vivo:relates ?author .
?uri a foaf:Person .
?uri rdfs:label ?display_name .
}
"""
a = vivo_query(query, parms)
display_name = [x['display_name']['value'] for x in a['results']['bindings']]
uri = [x['uri']['value'] for x in a['results']['bindings']]
return dict(zip(display_name, uri))
def get_vivo_positions(parms):
"""
Query VIVO and return a list of all the UF positions found in VIVO. UF positions will
have an hrTitle. Non UF positions will not have this property
:param: parms: vivo_query parameters
:return: dictionary of position uri keyed by ufid, deptid, hr_title, start_date
"""
query = """
select ?uri ?ufid ?deptid ?hr_title ?start_date
where {
?uri a vivo:Position .
?uri vivo:relates ?x . ?x uf:ufid ?ufid .
?uri vivo:relates ?y . ?y uf:deptid ?deptid .
?uri uf:hrTitle ?hr_title .
?uri vivo:dateTimeInterval ?dti . ?dti vivo:start ?start . ?start vivo:dateTimeValue ?start_date .
}
"""
a = vivo_query(query, parms)
ufids = [x['ufid']['value'] for x in a['results']['bindings']]
deptids = [x['deptid']['value'] for x in a['results']['bindings']]
hr_titles = [x['hr_title']['value'] for x in a['results']['bindings']]
start_dates = [x['start_date']['value'] for x in a['results']['bindings']]
keys = [';'.join(x) for x in zip(ufids, deptids, hr_titles, start_dates)]
uri = [x['uri']['value'] for x in a['results']['bindings']]
return dict(zip(keys, uri))
def read_update_def(filename, prefix):
"""
Read an update_def in JSON format, from a file
:param filename: name of file to read
:param prefix: text prefix for sparql queries
:rtype: dict
:return: JSON-like object from file, replacing all URI strings with URIRef objects
"""
def make_prefix_dict(prefix_text):
"""
Given prefix text, return a prefix dictionary with tags as keys and url strings as values
:param prefix_text:
:return: dictionary
:rtype: dict
"""
prefix_dictionary = {}
prefix_list = prefix_text.split()
for i in range(len(prefix_list) - 2):
if prefix_list[i].upper() == "PREFIX":
prefix_dictionary[prefix_list[i + 1]] = prefix_list[i + 2].replace('<', '').replace('>', '')
return prefix_dictionary
def cast_to_rdflib(t):
"""
Given a string t containing the name of an rdflib object, return the rdflib object. For now
this is returns xsd data types
Will throw a KeyValue error if t is not a known data type
:param t:
:return: an xsd data type
"""
from rdflib import XSD
cast_table = {
'xsd:integer': XSD.integer,
'xsd:string': XSD.string,
'xsd:datetime': XSD.datetime,
'xsd:boolean': XSD.boolean,
'xsd:decimal': XSD.decimal,
'xsd:anyURI': XSD.anyURI
}
r = cast_table[t]
return r
def fixit(current_object, prefix_dictionary):
"""
Read the def data structure and replace all string URIs with URIRef entities
:param current_object: the piece of the data structure to be fixed
:return current_object: the piece repaired in place
"""
from rdflib import URIRef
if isinstance(current_object, dict):
for k in current_object.keys():
current_object[k] = fixit(current_object[k], prefix_dictionary)
elif isinstance(current_object, list):
for i in range(0, len(current_object)):
current_object[i] = fixit(current_object[i], prefix_dictionary)
elif isinstance(current_object, basestring):
if current_object.startswith("http://"):
current_object = URIRef(current_object)
elif current_object.startswith("xsd:"):
current_object = cast_to_rdflib(current_object)
elif ':' in current_object:
k = current_object.find(':')
tag = str(current_object[0:k + 1])
if tag in prefix_dictionary:
current_object = URIRef(str(current_object).replace(tag, prefix_dictionary[tag]))
return current_object
def add_order(a, b):
"""
Given an update_def (a) and the string of the input file containing the update_def (b),
add an "order" parameter to the entity_def, specifying the column_def ordering. This
is used in subsequent processing to insure that the order in the input file is preserved
when output is created.
:param a: update_def
:param b: string of update_def from file
:return a new update_def dictionary with an order list in the entity def
"""
defn = a
loc = []
var_list = []
k = b.find("column_defs")
b = b[k:]
for var in defn['column_defs'].keys():
var_list.append(var)
loc.append(b.find(var + '": ['))
seq = sorted(loc)
order = [var_list[loc.index(v)] for v in seq]
defn['entity_def']['order'] = order
return defn
def validate_update_def(a):
"""
Validate the update_def. Throw InvalidDef if errors
:param a: update_def
:return None
"""
col_names = a['column_defs'].keys()
# Test that each closure_def name can be found in the column_def names
for name in a.get('closure_defs', {}).keys():
if name not in col_names:
raise InvalidDefException(name + 'in closure_def, not in column_def.')
# Test for agreement between closure_def and column_def last step object type and datatype
if 'closure_defs' in a:
for name in a.get('closure_defs').keys():
col_object = a['column_defs'][name][-1]['object'] # last object in the column_def
clo_object = a['closure_defs'][name][-1]['object'] # last object in the closure_def
if col_object.get('dataype', '') == clo_object.get('datatype', '') and \
col_object.get('type', '') == clo_object.get('type', ''):
continue
else:
raise InvalidDefException(name + ' has inconsistent datatype or type in closure')
# Test for paths having more than one multiple predicate
for name in col_names:
multiple = 0
for step in a['column_defs'][name]:
if step['predicate']['single'] == False:
multiple += 1
if multiple > 1:
raise InvalidDefException(name + ' has more than one multiple predicate')
# Test for presence of required boolean value
for name in col_names:
for step in a['column_defs'][name]:
if step['predicate']['single'] == 'boolean' and 'value' not in step['object']:
raise InvalidDefException(name + 'is boolean with no value')
return None
def add_object_names_and_step_attributes(a):
"""
handed an update_def structure a, return an improved structure b in which each object has a generated name
attribute based on the column_def or closure_def name
Assign multiple to each object. Object is multiple if any preceding predicate is not single
"""
b = dict(a)
for name, path in b['column_defs'].items():
multiple = False
for i in range(len(path)):
multiple = multiple or (b['column_defs'][name][i]['predicate']['single'] == False)
b['column_defs'][name][i]['closure'] = False
b['column_defs'][name][i]['column_name'] = name
b['column_defs'][name][i]['object']['multiple'] = multiple
if i==len(path) - 1:
b['column_defs'][name][i]['object']['name'] = name
b['column_defs'][name][i]['last'] = True
else:
b['column_defs'][name][i]['object']['name'] = name + '_' + str(len(path) - i - 1)
b['column_defs'][name][i]['last'] = False
if 'closure_defs' in b:
for name, path in b['closure_defs'].items():
multiple = False
for i in range(len(path)):
multiple = multiple or (b['closure_defs'][name][i]['predicate']['single'] == False)
b['closure_defs'][name][i]['closure'] = True
b['closure_defs'][name][i]['column_name'] = name
b['closure_defs'][name][i]['object']['multiple'] = multiple
if i==len(path) - 1:
b['closure_defs'][name][i]['object']['name'] = name
b['closure_defs'][name][i]['last'] = True
else:
b['closure_defs'][name][i]['object']['name'] = name + '_' + str(len(path) - i - 1)
b['closure_defs'][name][i]['last'] = False
return b
import json
with open(filename, "r") as my_file:
data = my_file.read()
prefix_dict = make_prefix_dict(prefix)
update_def = fixit(json.loads(data), prefix_dict)
update_def = add_order(update_def, data)
update_def = add_object_names_and_step_attributes(update_def)
validate_update_def(update_def)
return update_def
def add_qualifiers(input_path):
"""
Given an update_def input_path, generate the SPARQL fragment to express the qualifiers in the path, if any
:param input_path:
:return: qualifer SPARQL string
"""
return ' '.join([x['object'].get('qualifier', '') for x in input_path])
def gather_types(input_step, varname):
"""
Given and input step, return a SPARQL fragment to gather the types for the step
:param input_step:
:return: SPARQL fragment as string
"""
if not input_step['object']['literal']:
return ' ?' + input_step['object']['name'] + ' a ?' + varname + ' . '
else:
return ''
def make_update_query(entity_sparql, path):
"""
Given a path from an update_def data structure, generate the query needed to pull the triples from VIVO that might
be updated. Here's what the queries look like (psuedo code) by path length
Path length 1 example:
select ?uri (vivo:subOrganizationWithin as ?p) (?column_name as ?o)
where {
... entity sparql goes here ...
?uri vivo:subOrganizationWithin ?column_name . # ?uri ?p ?o
}
Path Length 2 example:
select ?uri (vivo:webpage as ?p1) (?column_name_1 as ?o1) (vivo:linkURI as ?p) (?column_name as ?o)
where {
... entity sparql goes here ...
?uri vivo:webpage ?column_name_1 . # ?uri ?p1 ?o1
?column_name_1 vivo:linkURI ?column_name . # ?o1 ?p ?o
}
Path length 3 example:
select ?uri (vivo:dateTimeInterval as ?p2) (?column_name_2 as ?o2) (vivo:end as ?p1)
(?column_name_1 as ?o1) (vivo:dateTime as ?p)
(?column_name as ?o)
where {
... entity sparql goes here ...
?uri vivo:dateTimeInterval ?column_name_2 . # ?uri ?p2 ?o2
?column_name_2 vivo:end ?column_name_1 . # ?o2 ?p1 ?o1
?column_name_1 vivo:dateTime ?column_name . # ?o1 ?p ?o
}
:return: a sparql query string
"""
query = ""
if len(path) == 1:
query = 'select ?uri (<' + str(path[0]['predicate']['ref']) + '> as ?p) (?' + path[0]['object']['name'] + \
' as ?o) ?t\n' + \
' where { ' + entity_sparql + '\n ?uri <' + str(path[0]['predicate']['ref']) + '> ?' + \
path[0]['object']['name'] + \
' . ' + gather_types(path[0], 't') + add_qualifiers(path) + ' \n}'
elif len(path) == 2:
query = 'select ?uri (<' + str(path[0]['predicate']['ref']) + '> as ?p1) ' + \
'(?' + path[0]['object']['name'] + ' as ?o1) ?t1 (<' + \
str(path[1]['predicate']['ref']) + '> as ?p) (?' + path[1]['object']['name'] + ' as ?o) ?t\n' + \
' where { ' + entity_sparql + '\n ?uri <' + str(path[0]['predicate']['ref']) + '> ?' + \
path[0]['object']['name'] + ' . ' + gather_types(path[0], 't1') + '?' + \
path[0]['object']['name'] + ' <' + str(path[1]['predicate']['ref']) + '> ?' + \
path[1]['object']['name'] + ' . ' + gather_types(path[1], 't') + add_qualifiers(path) + ' \n}'
elif len(path) == 3:
query = 'select ?uri (<' + str(path[0]['predicate']['ref']) + '> as ?p2) ' + \
'(?' + path[0]['object']['name'] + ' as ?o2) ?t2 (<' + str(path[1]['predicate']['ref']) + \
'> as ?p1) (?' + path[1]['object']['name'] + ' as ?o1) ?t1 (<' + str(path[2]['predicate']['ref']) + \
'> as ?p) (?' + path[2]['object']['name'] + ' as ?o) ?t\n' + \
'where { ' + entity_sparql + '\n ?uri <' + \
str(path[0]['predicate']['ref']) + '> ?' + path[0]['object']['name'] + ' . ' + \
gather_types(path[0], 't2') + ' ?' + \
path[0]['object']['name'] + ' <' + str(path[1]['predicate']['ref']) + '> ?' + \
path[1]['object']['name'] + ' . ' + gather_types(path[1], 't1') + ' ?' + \
path[1]['object']['name'] + ' <' + \
str(path[2]['predicate']['ref']) + '> ?' + path[2]['object']['name'] + ' . ' + \
gather_types(path[2], 't') + add_qualifiers(path) + ' \n}'
return query
def make_rdf_term(row_term):
"""
Given a row term from a JSON object returned by a SPARQL query (whew!) return a corresponding
rdflib term -- either a Literal or a URIRef
:param row_term:
:return: an rdf_term, either Literal or URIRef
"""
from rdflib import Literal, URIRef
if row_term['type'] == 'literal' or row_term['type'] == 'typed-literal':
rdf_term = Literal(row_term['value'], datatype=row_term.get('datatype', None),
lang=row_term.get('xml:lang', None))
else:
rdf_term = URIRef(row_term['value'])
return rdf_term
def get_graph(update_def, query_parms):
"""
Given the update def, get a graph from VIVO of the triples eligible for updating
:return: graph of triples
"""
from rdflib import Graph, URIRef, RDF
a = Graph()
entity_query = 'select ?uri (<http://www.w3.org/1999/02/22-rdf-syntax-ns#type> as ?p) (<' + \
str(update_def['entity_def']['type']) + '> as ?o)\nwhere {\n ' + \
update_def['entity_def']['entity_sparql'] + '\n}'
result = vivo_query(entity_query, query_parms)
for row in result['results']['bindings']:
s = URIRef(row['uri']['value'])
p = URIRef(row['p']['value'])
o = make_rdf_term(row['o'])
a.add((s, p, o))
for column_name, path in update_def['column_defs'].items() + \
update_def.get('closure_defs', {}).items():
update_query = make_update_query(update_def['entity_def']['entity_sparql'], path)
if len(update_query) == 0:
continue
result = vivo_query(update_query, query_parms)
for row in result['results']['bindings']:
if 'p2' in row and 'o2' in row:
uri = URIRef(row['uri']['value'])
p2 = URIRef(row['p2']['value'])
o2 = make_rdf_term(row['o2'])
a.add((uri, p2, o2))
if 't2' in row:
a.add((o2, RDF.type, make_rdf_term(row['t2'])))
p1 = URIRef(row['p1']['value'])
o1 = make_rdf_term(row['o1'])
a.add((o2, p1, o1))
if 't1' in row:
a.add((o1, RDF.type, make_rdf_term(row['t1'])))
p = URIRef(row['p']['value'])
o = make_rdf_term(row['o'])
a.add((o1, p, o))
if 't' in row:
a.add((o, RDF.type, make_rdf_term(row['t'])))
elif 'p1' in row and 'o1' in row:
uri = URIRef(row['uri']['value'])
p1 = URIRef(row['p1']['value'])
o1 = make_rdf_term(row['o1'])
a.add((uri, p1, o1))
if 't1' in row:
a.add((o1, RDF.type, make_rdf_term(row['t1'])))
p = URIRef(row['p']['value'])
o = make_rdf_term(row['o'])
a.add((o1, p, o))
if 't' in row:
a.add((o, RDF.type, make_rdf_term(row['t'])))
elif 'p' in row and 'o' in row:
uri = URIRef(row['uri']['value'])
p = URIRef(row['p']['value'])
o = make_rdf_term(row['o'])
a.add((uri, p, o))
if 't' in row:
a.add((o, RDF.type, make_rdf_term(row['t'])))
logger.debug(u"Triples in original graph {}".format(len(a)))
return a
def new_uri(parms):
"""
Find an unused VIVO URI in the VIVO defined by the parms
:param parms: dictionary with queryuri, username, password and uriprefix
:return: a URI not in VIVO
"""
test_uri = ""
while True:
test_uri = parms['uriprefix'] + str(random.randint(1, 9999999999))
query = """
SELECT (COUNT(?z) AS ?count) WHERE {
<""" + test_uri + """> ?y ?z
}"""
response = vivo_query(query, parms)
if int(response["results"]["bindings"][0]['count']['value']) == 0:
break
return test_uri
def vivo_query(query, parms):
"""
A new VIVO query function using SPARQLWrapper. Tested with Stardog, UF VIVO and Dbpedia
:param query: SPARQL query. VIVO PREFIX will be added
:param parms: dictionary with query parms: queryuri, username and password
:return: result object, typically JSON
:rtype: dict
"""
from SPARQLWrapper import SPARQLWrapper, JSON
logger.debug(u"in vivo_query\n{}".format(parms))
sparql = SPARQLWrapper(parms['queryuri'])
new_query = parms['prefix'] + '\n' + query
sparql.setQuery(new_query)
logger.debug(new_query)
sparql.setReturnFormat(JSON)
sparql.addParameter("email", parms['username'])
sparql.addParameter("password", parms['password'])
# sparql.setCredentials(parms['username'], parms['password'])
results = sparql.query()
results = results.convert()
return results
def write_update_def(update_def, filename):
"""
Write update_def to a json_file
:param filename: name of file to write
:return: None. A file is written
"""
import json
out_file = open(filename, "w")
json.dump(update_def, out_file, indent=4)
out_file.close()
return
def parse_pages(pages):
"""
Give a string possibly containing a start and end page, return the start and end page if any
:param pages:
:return: list with start and end pages
"""
if '-' in pages:
k = pages.find('-')
start = pages[0:k]
end = pages[k + 1:]
else:
start = pages
end = ''
return [start, end]
def parse_date_parts(month, year):
"""
Given a month string and a year string from publisher data, parse apart the month, day and year and create
a standard date string that can be used as input to VIVO
:param month: string from publisher data. May be text such as 'JUN' or 'Jun 15' with day number included
:param year: string of year such as '2015'
:return: date string in isoformat
"""
month_numbers = {'JAN': 1, 'FEB': 2, 'MAR': 3, 'APR': 4, 'MAY': 5, 'JUN': 6,
'JUL': 7, 'AUG': 8, 'SEP': 9, 'OCT': 10, 'NOV': 11, 'DEC': 12,
'SUM': 6, 'FAL': 9, 'WIN': 12, 'SPR': 3, '': 1}
from datetime import datetime
if ' ' in month:
k = month.find(' ')
month_name = month[0:k]
month_day = month[k + 1:]
elif '-' in month:
k = month.find('-')
month_name = month[0:k]
month_day = '1'
else:
month_name = month
month_day = '1'
month_number = month_numbers[month_name.upper()]
date_value = datetime(int(year), month_number, int(month_day))
return date_value.isoformat()
def get_args():
"""
Get the args specified by the user. Arg values are determined:
1. from hard coded values (see below)
2. Overridden by values in a specified config file (see below)
3. Overridden by values on the command line
Set the logging level based on args
:return: args structure as defined by argparser
"""
import argparse
import ConfigParser
program_defaults = {
'action': 'summarize',
'defn': 'pump_def.json',
'inter': '\t',
'intra': ';',
'username': 'vivo_root@school.edu',
'password': 'password',
'prefix':
'PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n'
'PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n'
'PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n'
'PREFIX owl: <http://www.w3.org/2002/07/owl#>\n'
'PREFIX vitro: <http://vitro.mannlib.cornell.edu/ns/vitro/0.7#>\n'
'PREFIX bibo: <http://purl.org/ontology/bibo/>\n'
'PREFIX event: <http://purl.org/NET/c4dm/event.owl#>\n'
'PREFIX foaf: <http://xmlns.com/foaf/0.1/>\n'
'PREFIX obo: <http://purl.obolibrary.org/obo/>\n'
'PREFIX skos: <http://www.w3.org/2004/02/skos/core#>\n'
'PREFIX uf: <http://vivo.school.edu/ontology/uf-extension#>\n'
'PREFIX vitrop: <http://vitro.mannlib.cornell.edu/ns/vitro/public#>\n'
'PREFIX vivo: <http://vivoweb.org/ontology/core#>\n',
'rdfprefix': 'pump',
'queryuri': 'http://localhost:8080/vivo/api/sparqlQuery',
'uriprefix': 'http://vivo.school.edu/individual/n',
'src': 'pump_data.txt',
'config': 'sv.cfg',
'verbose': logging.WARNING,
'debug': logging.WARNING,
'nofilters': False
}
parser = argparse.ArgumentParser(description="Get or update row and column data from and to VIVO",
epilog="For more info, see http://github.com/mconlon17/vivo-pump")
parser.add_argument("-a", "--action", help="desired action. get = get data from VIVO. update = update VIVO "
"data from a spreadsheet. summarize = show def summary. serialize = serial version of the pump"
". test = test pump configuration.",
nargs='?')
parser.add_argument("-d", "--defn", help="name of definition file", nargs="?")
parser.add_argument("-i", "--inter", help="interfield delimiter", nargs="?")
parser.add_argument("-j", "--intra", help="intrafield delimiter", nargs="?")
parser.add_argument("-u", "--username", help="username for API", nargs="?")
parser.add_argument("-p", "--password", help="password for API", nargs="?")
parser.add_argument("-q", "--queryuri", help="URI for API", nargs="?")
parser.add_argument("-r", "--rdfprefix", help="RDF prefix", nargs="?")
parser.add_argument("-x", "--uriprefix", help="URI prefix", nargs="?")
parser.add_argument("-s", "--src", help="name of source file containing data to be updated in VIVO", nargs='?')
parser.add_argument("-c", "--config", help="name of file containing config data. Config data overrides program "
"defaults. Command line overrides config file values", nargs='?')
parser.add_argument("-v", "--verbose", action="store_const", dest='loglevel', const=logging.INFO,
help="write informational messages to the log")
parser.add_argument("-b", "--debug", action="store_const", dest='loglevel', const=logging.DEBUG,
default=logging.WARNING, help="write debugging messages to the log")
parser.add_argument("-n", "--nofilters", action="store_true", help="turn off filters")
args = parser.parse_args()
if args.config is None:
args.config = program_defaults['config']
logger.debug(u"No config file specified -- using hardcoded defaults")
else:
logger.debug(u"Reading config file: {}".format(args.config))
# Read the config parameters from the file specified in the command line
config = ConfigParser.ConfigParser()
try:
config.read(args.config)
except IOError:
logger.error(u"Config file {} not found.".format(args.config))
sys.exit(1)
# Config file values overwrite program defaults
for section in config.sections():
for name, val in config.items(section):
program_defaults[name] = val
if 'prefix' != name:
logger.debug(u"Param {} = {}".format(name, val))
# Non null command line values overwrite the config file values
for name, val in vars(args).items():
if val is not None:
program_defaults[name] = val
# Put the final values back in args
for name, val in program_defaults.items():
if val == 'tab':
val = '\t'
vars(args)[name] = val
# Set the level of logging if verbose and/or debug args were used
if args.loglevel:
logging.basicConfig(level=args.loglevel)
return args
def get_parms():
"""
Use get_args to get the args, and return a dictionary of the args ready for
use in pump software.
@see get_args()
:return: dict: parms
"""
parms = {}
args = get_args()
for name, val in vars(args).items():
if val is not None:
parms[name] = val
return parms
def add_type_restriction(step):
"""
for a given step, look for object type and construct a SPARQL fragement to restrict the graph
to objects of the type. If the object does not have a type restriction, return an empty string.
:param step: The step for which an object restriction is requested
:return: the SPARQL fragement for thr restriction, or an empty string if no type is specified
"""
if 'type' in step['object']:
return '?' + step['object']['name'] + ' a <' + str(step['object']['type']) + '> . '
else:
return ""
def make_get_query(update_def):
"""
Given an update_def, return the sparql query needed to produce a spreadsheet of the data to be managed.
See do_get
:return: a sparql query string
"""
front_query = 'SELECT ?uri ?' + ' ?'.join(update_def['column_defs'].keys()) + '\nWHERE {\n ' + \
update_def['entity_def']['entity_sparql'] + '\n'
# Fake recursion here to depth 3. Could be replaced by real recursion to arbitrary path length
middle_query = ""
for name, path in update_def['column_defs'].items():
middle_query += ' OPTIONAL { ?uri <' + str(path[0]['predicate']['ref']) + '> ?'
if len(path) == 1:
middle_query += name + ' . ' + add_type_restriction(path[0]) + add_qualifiers(path) + ' }\n'
else:
middle_query += path[0]['object']['name'] + ' . ' + add_type_restriction(path[0]) + '?' + \
path[0]['object']['name'] + ' <' + str(path[1]['predicate']['ref']) + '> ?'
if len(path) == 2:
middle_query += name + ' . ' + add_type_restriction(path[1]) + add_qualifiers(path) + ' }\n'
else:
middle_query += path[1]['object']['name'] + ' . ' + add_type_restriction(path[1]) + '?' + \
path[1]['object']['name'] + ' <' + str(path[2]['predicate']['ref']) + '> ?'
if len(path) == 3:
middle_query += name + ' . ' + add_type_restriction(path[2]) + add_qualifiers(path) + ' }\n'
else:
raise PathLengthException('Path length >3 not supported in do_get')
if 'order_by' in update_def['entity_def']:
back_query = '}\nORDER BY ?' + update_def['entity_def']['order_by']
else:
back_query = '}\n'
return front_query + middle_query + back_query
def unique_path(path):
"""
Given a path, determine if all its elements are single-valued predicates. If so, the path is unique,
regardless of length. If any one of the steps in the path has a non single-valued predicated, the path is not
unique.
:param path: a definition path
:return: True if path is unique
:rtype: boolean
"""
unique = True
for elem in path:
if elem['predicate']['single'] != True:
unique = False
break
return unique
def make_get_data(update_def, result_set):
"""
Given a query result set, produce a dictionary keyed by uri with values of dictionaries keyed by column
names. Where columns have multiple values, create sets of values.
:param result_set: SPARQL result set
:return: dictionary
:rtype: dict
"""
data = {}
for binding in result_set['results']['bindings']:
uri = str(binding['uri']['value'])
if uri not in data:
data[uri] = {}
for name in ['uri'] + update_def['column_defs'].keys():
if name != 'uri':
last_step = update_def['column_defs'][name][len(update_def['column_defs'][name]) - 1]
if name != 'uri' and last_step['predicate']['single'] == 'boolean':
if name in binding and (str(last_step['object']['value']) == binding[name]['value']):
data[uri][name] = '1'
elif name not in data[uri]:
data[uri][name] = '0'
else:
if name in binding:
if name in data[uri]:
data[uri][name].add(binding[name]['value'])
else:
data[uri][name] = {binding[name]['value']}
return data
def make_rdf_term_from_source(value, step):
"""
Given a text string value and a step definition, return the rdflib term as defined by the step def
:param: value: string from source
:param: step: step definition from update_def
:return: rdf_term: an rdf_term from rdflib -- either Literal or URIRef
"""
from rdflib import Literal, URIRef
if step["object"]["literal"]:
datatype = step["object"].get('datatype', None)
if datatype is not None and datatype[:4] == 'xsd:':
datatype = datatype.replace('xsd:', 'http://www.w3.org/2001/XMLSchema#')
rdf_term = Literal(value, datatype=datatype, lang=step["object"].get('lang', None))
else:
rdf_term = URIRef(value)
return rdf_term
def prepare_column_values(update_string, intra, step_def, enum, row, column_name):
"""
Given the string of data from the update file, the step definition, the row and column name of the
update_string in the update file, enumerations and filters, prepare the column values and return them
as a list of rdflib terms
:return: column_values a list of rdflib terms
:rtype: list[str]
"""
# Three cases: boolean, single valued and multiple valued
if step_def['predicate']['single'] == 'boolean':
update_string = update_string.strip()
if update_string == '':
column_values = ['']
elif update_string == '0' or update_string == 'None' or update_string.lower() == 'false' or \
update_string.lower() == 'n' or update_string.lower() == 'no':
column_values = ['0']
else:
column_values = ['1']
elif not step_def['object']['multiple']:
column_values = [update_string.strip()]
else:
column_values = update_string.split(intra)
if 'include' in step_def['predicate']:
column_values += step_def['predicate']['include']
for i in range(len(column_values)):
column_values[i] = column_values[i].strip()
# Check column values for consistency with single and multi-value paths
if step_def['object']['multiple'] != True and len(column_values) > 1:
raise InvalidSourceException(str(row) + str(column_name) +
'Path is single-valued, multiple values in source.')
while '' in column_values:
column_values.remove('')
if 'None' in column_values and len(column_values) > 1:
raise InvalidSourceException(str(row) + str(column_name) +
'None value in multi-valued predicate set')
# Handle enumerations
if 'enum' in step_def['object']:
for i in range(len(column_values)):
try:
column_values[i] = enum[step_def['object']['enum']]['update'][column_values[i]]
except KeyError:
logger.error(u"{} not found in enumeration. Blank value substituted.".format(column_values[i]))
column_values[i] = ''
# Convert to rdflib terms
column_terms = [make_rdf_term_from_source(column_value, step_def) for column_value in column_values]
return column_terms
def load_enum(update_def):
"""
Find all enumerations in the update_def. for each, read the corresponding enum file and build the corresponding
pair of enum dictionaries.
The two columns in the tab delimited input file must be called "short" and "vivo". "vivo" is the value to put in
vivo (update) or get from vivo. short is the human usable short form.
The input file name appears as the 'enum' value in update_def
:return enumeration structure. Pairs of dictionaries, one pair for each enumeration. short -> vivo, vivo -> short
"""
enum = {}
for path in update_def['column_defs'].values():
for step in path:
if 'object' in step and 'enum' in step['object']:
enum_name = step['object']['enum']
if enum_name not in enum:
enum[enum_name] = {}
enum[enum_name]['get'] = {}
enum[enum_name]['update'] = {}
enum_data = read_csv(enum_name, delimiter='\t')
for enum_datum in enum_data.values():
try:
enum[enum_name]['get'][enum_datum['vivo']] = enum_datum['short']
except KeyError:
logger.error(
u"Enumeration {} does not have required columns named short and vivo".format(enum_name))
raise KeyError
enum[enum_name]['update'][enum_datum['short']] = enum_datum['vivo']
return enum
def create_enum(filename, query, parms, trim=0, skip=0):
"""
Given, query, parms and a filename, execute the query and write the enum into the file
:param: filename: name of the file to contain the enumeration
:param: query: the query to be used to create the columns for the enumeration
:param: parms: dictionary of VIVO SPARQL API parameters
:param: trim: If 0, no trim. If k, return the first k characters as a trimmed value for short
:param: skip: If 0, no skip. If k, skip the first k characters as a trimmed value for short
:return: None
"""
import codecs
data = vivo_query(query, parms)
outfile = codecs.open(filename, mode='w', encoding='utf_8', errors='xmlcharrefreplace')
outfile.write("short\tvivo\n")
for item in data['results']['bindings']:
if trim == 0 and skip==0:
outfile.write(item["short"]["value"] + "\t" + item["vivo"]["value"] + "\n")
elif trim != 0 and skip == 0:
outfile.write(item["short"]["value"][:trim] + "\t" + item["vivo"]["value"] + "\n")
elif trim == 0 and skip != 0:
outfile.write(item["short"]["value"][skip:] + "\t" + item["vivo"]["value"] + "\n")
else:
outfile.write(item["short"]["value"][skip:-trim] + "\t" + item["vivo"]["value"] + "\n")
outfile.close() | dofeldsc/vivo_uos | my_pump/pump/vivopump.py | Python | gpl-3.0 | 47,667 |
"""
optimisation.py: Optimisations for worldview solving
Copyright (C) 2014 Michael Kelly
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# def evaluation_skip(optimisation, stat_struct, valuator_string, debug=0):
# """
# Retrofitting for statistic structure
# Evaluation Skip: Looks at possible optimisations from previous function
# and sorts through epistemic atoms and the current valuation string
# and determines if it is a valuation worth persuing, here valuations
# containing conflicting values will be considered removable
# Pre :- * epistemic atoms and their negation status need to be processed.
# * The valuation binary string must be calculated.
# * An evaluation of optimisations to check for must also be looked
# for.
# Post :- * A binary value will be outputted deciding whether this set of
# atoms is worth persuing as an interpretation of the original
# subjective program.
# """
# temp = 0
# if self.modal_operator_count(stat_struct) == 1 or \
# not self.modal_operator_count(stat_struct):
# return True
# # make a copy of the original queue to not lose original value set
# copysStat = copy.copy(stat_struct)
# count = self.modOpCount(stat_struct)
# countb = len(stat_struct.keys())
# while countb:
# counta = len(stat_struct[stat_struct.keys()[countb-1]])
# while counta:
# temp = valuator_string & 0x1
# valuator_string >>= 1
# if not temp:
# remove_item = stat_struct[stat_struct.keys()[countb-1]][counta-1]
# stat_struct[stat_struct.keys()[countb-1]].remove(remove_item)
# counta -= 1
# countb -= 1
# count = len(stat_struct)
# while count:
# if not stat_struct[stat_struct.keys()[count-1]]:
# del stat_struct[count]
# count -= 1
# for linea in stat_struct:
# for lineb in stat_struct:
# comparison_modals = combinations(stat_struct[lineb] + stat_struct[linea], 2)
# for modal_pair in comparison_modals:
# mod_a, mod_b = comparison_modals
# if mod_a.label != mod_b.label:
# continue
# if not check_optimisation(optimisation, mod_a, mod_b):
# return False
# return True
# def check_optimisation(optimisation, mod_a, mod_b):
# """
# Analysing old code
# 0: epistemic negation
# 1: modality K|B
# 2: atom negation
# """
# if optimisation & 0x1 == 1:
# if ((mod_a[1] & 0x2) != (mod_b[1] & 0x2)) and \
# ((mod_a[1] & 0x1) != (mod_b[1] & 0x1)) and \
# ((mod_a[1] & 0x4) == (mod_b[1] & 0x4)) and \
# ((mod_a[1] & 0x4) == 0):
# # if modal operators are different
# # atom negation is different, and there is no atom negation
# return False
# elif (optimisation & 0x2) == 2:
# if ((mod_a[1] & 0x6) == (mod_b[1] & 0x6)) and \
# ((mod_a[1] & 0x2) == 1) and \
# ((mod_a[1] & 0x4) == 0) and \
# ((mod_a[1] & 0x1) != (mod_b[1] & 0x1)):
# # if both mod negation and mod are the same (K and no negation)
# return False
# elif (optimisation & 0x4) == 4:
# if ((mod_a[1] & 0x6) != (mod_b[1] & 0x6)) and \
# ((mod_a[1] & 0x1) == (mod_b[1] & 0x1)) and \
# ((mod_a[1] & 0x4) != (mod_a[1] & 0x2)) and \
# ((mod_b[1] & 0x4) != (mod_b[1] & 0x2)):
# return False
# elif (optimisation & 0x8) == 8:
# # look for cases where the epistemic atoms are the same.
# if (mod_a[1] == mod_b[1]):
# return False
# elif (optimisation & 0x10) == 16:
# if ((mod_a[1] & 0x2) != (mod_b[1] & 0x2)) and \
# ((mod_a[1] & 0x2) == 1) and \
# ((mod_a[1] & 0x1) == (mod_b[1] & 0x1)) and \
# ((mod_a[1] & 0x4) == (mod_b[1] & 0x4)) and \
# ((mod_a[1] & 0x4) == 0):
# return False
# elif (optimisation & 0x20) == 32:
# if ((mod_a[1] & 0x3) == (mod_b[1] & 0x3)) and \
# ((mod_b[1] & 0x4) != (mod_a[1] & 0x4)):
# return False
# def optimisation_present(e_atom_a, e_atom_b):
# if e_atom_a == e_atom_b:
# return True
# # if different modality but same negations on same label
# # optimisation exists
# if not e_atom_a.same_modal_token(e_atom_b) and e_atom_a.know() and \
# e_atom_b.same_atom_negation(e_atom_b) and \
# e_atom_b.same_epistemic_negation(e_atom_b)
# not e_atom_a.atom_negation:
# return True
# if e_atom_a.same_modal(e_atom_b) and \
# not e_atom_a.same_atom_negation(e_atom_b):
# return True
# if e_atom_a.same_modal(e_atom_b) and e_atom_a.know()
# elif (optimisation & 0x2) == 2:
# if ((mod_a[1] & 0x6) == (mod_b[1] & 0x6)) and \
# ((mod_a[1] & 0x2) == 1) and \
# ((mod_a[1] & 0x4) == 0) and \
# ((mod_a[1] & 0x1) != (mod_b[1] & 0x1)):
| galactose/wviews | optimisation.py | Python | gpl-3.0 | 5,702 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
Copyright 2016,暗夜幽灵 <darknightghost.cn@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import curses
import traceback
import locale
from data import *
from menu import *
class screen:
'''
This class is used to show a menu in console
Example:
from ui.ui import *
scr = screen.screen()
scr.screen_main(menu)
The menu is a list in the format of
[[type,text,value],
[type,text,value],
[type,text,value],
...
[type,text,value]]
Current support types:
Type Value Description
"lable" None Static text
"submenu" menu Sub Menu Entery
"checkbox" True or False CheckBox
"textbox" string TextBox
"listcontrol" [[text1,text2,text3...],selected-index] Show a list and select one
'''
def __init__(self):
locale.setlocale(locale.LC_ALL, '')
self.stdscr = None
self.width = 0
self.height = 0
def screen_main(self,menu_list,title):
success = True
try:
#Begin GUI
self.stdscr = curses.initscr()
self.height = self.stdscr.getmaxyx()[0]
self.width = self.stdscr.getmaxyx()[1]
curses.noecho()
curses.cbreak()
self.stdscr.keypad(1)
color = color_t()
color.init_color()
self.stdscr.nodelay(0)
#Draw background
self.stdscr.bkgd(' ',color.get_color(0,color_t.BLUE))
curses.curs_set(0)
e = encoder()
self.stdscr.addstr(0,0,e.convert(title),
color.get_color(color_t.WHITE,color_t.BLUE) | curses.A_BOLD)
self.update()
#Create menu window
m = menu(self,title)
m.show_menu(menu_list)
except:
success = False
finally:
#End GUI
self.stdscr.keypad(0)
curses.echo()
curses.nocbreak()
curses.endwin()
if not success:
traceback.print_exc()
def update(self):
self.stdscr.refresh()
def get_size(self):
'''
Return the size of screen in the form of (width,height).
'''
return rect_t(self.width,self.height)
| darknightghost/AntiPkgLoss | ui/screen.py | Python | gpl-3.0 | 2,542 |
####################################################################################################
# Copyright 2013 John Crawford
#
# This file is part of PatchCorral.
#
# PatchCorral is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PatchCorral is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PatchCorral. If not, see <http://www.gnu.org/licenses/>.
####################################################################################################
## @file
# Defines a base class for MIDI devices.
# @date 03/08/2013 Created file. -jc
# @author John Crawford
import re #For user-defined iteration filters.
import rtmidi
import threading
import time
import yaml
##
# Returns a list of the available MIDI Input Devices.
# @return List of tuples "(portNum, portName)".
def getMIDIInDevices():
midi = rtmidi.RtMidiIn()
return list((port, midi.getPortName(port)) for port in range(midi.getPortCount()))
##
# Returns a list of the available MIDI Output Devices.
# @return List of tuples "(portNum, portName)".
def getMIDIOutDevices():
midi = rtmidi.RtMidiOut()
return list((port, midi.getPortName(port)) for port in range(midi.getPortCount()))
##
# Class representing a specific MIDI voice.
class MIDIVoice():
tags = [
'name',
'msb',
'lsb',
'_pc',
'device.portNum',
'device.portName',
'channel',
'category',
'voiceNum',
]
##
# Class constructor.
# @param name String
# @param device MIDIOutDevice object
# @param channel MIDI Channcel (1-16)
# @param msb Most Significant Bit
# @param lsb Least Significant Bit
# @param pc Program Change value
# @param category Category of the voice
# @param voiceNum Number of the voice as displayed on the device
def __init__(self, name, device, channel, msb, lsb, pc, category=None, voiceNum=None):
self.name = name
self.msb = msb
self.lsb = lsb
self._pc = pc
self.device = device
self.channel = channel
self.category = category
self.voiceNum = voiceNum
def __getitem__(self, key):
keys = key.split('.')
v = self
for k in keys:
try:
v = getattr(v, k)
except AttributeError:
raise KeyError('Unable to find key {}.'.format(key))
return v
def __iter__(self):
return (tag for tag in self.tags)
def items(self):
for key in iter(self):
yield key, self[key]
def keys(self):
return iter(self)
##
# Sends the MIDI messages that will select this voice on the given device.
# @return None.
def pc(self):
self.device.sendMessage(rtmidi.MidiMessage.controllerEvent(self.channel, 0x00, self.msb))
self.device.sendMessage(rtmidi.MidiMessage.controllerEvent(self.channel, 0x20, self.lsb))
self.device.sendMessage(rtmidi.MidiMessage.programChange(self.channel, self._pc))
##
# For use by PyYAML.
def __repr__(self):
return '{}({})'.format(
self.__class__.__name__,
', '.join('{}={}'.format(attr, val) for attr, val in self.items()),
)
def __setitem__(self, key, val):
keys = key.split('.')
v = self
for k in keys[:-1]:
try:
v = getattr(v, k)
except AttributeError:
raise KeyError('Unable to find key {}.'.format(key))
setattr(v, keys[-1], val)
##
# Method for converting this object to string. Prints out essential information.
def __str__(self):
return '\n'.join('{}: {}'.format(key, val) for key, val in self.items())
def values(self):
for key in iter(self):
yield self[key]
##
# Class representing a MIDI Device. This is an abstract base class that doesn't do anything on its
# own.
class MIDIDevice():
##
# Class initializer.
# @param id Identifier for the device interface. Can be an integer (index) or a string (name).
# @pre "self.midi" has been initialized.
def __init__(self, port, name):
# self.midi = None #INITIALIZE THIS IN THE SUBCLASS!
#Resolve missing details.
if port is None:
if name is None:
raise ValueError('Must provide at least the "name" or "port" to identify a MIDI device.')
portNames = list(self.midi.getPortName(port) for port in range(self.midi.getPortCount()))
for port, portName in enumerate(portNames):
if portName == name:
self.portNum = port
break
else:
raise ValueError('Unable to find device matching name "{}" in list "{}".'.format(name, portNames))
else:
portCount = self.midi.getPortCount()
if 0 > port > portCount:
raise ValueError('Given port "{}" is outside the expected range (0-{}).'.format(port, portCount))
self.portNum = port
if name is None:
self.portName = self.midi.getPortName(port)
else:
self.portName = name
#Open the MIDI port!
self.midi.openPort(self.portNum)
def getPortName(self):
return self.portName
##
# Class representing a MIDI Input Device.
class MIDIInDevice(MIDIDevice):
##
# Class initializer.
# @param id Identifier for the device input and output interfaces. Can be an integer (index) or
# a string (name).
def __init__(self, id):
self.midi = rtmidi.RtMidiIn()
self.midi.setCallback(self.onMIDIMsg)
MIDIDevice.__init__(self, id)
self.midiOutDevice = None
self.midiOutChannel = None
self.forwardingLock = threading.Lock()
##
# Enables/disables forwarding of incoming MIDI events to the given output device.
# @param midiOutDevice MIDIOutDevice object. If "None", will disable forwarding.
# @param channel If not "None", will change the channel of any incoming messages to this channel
# before forwarding it to the output device.
# @return None.
def setMIDIOutDevice(self, midiOutDevice=None, channel=None):
if not isinstance(midiOutDevice, MIDIOutDevice):
raise ValueError('Given midiOutDevice "{0}" is of type "{1}"; expected type "MIDIOutDevice".'.format(
midiOutDevice,
type(midiOutDevice),
))
if channel is not None and 0 > channel > 15:
raise ValueError('Unexpected channel value "{0}". Expected integer 0-15 or "None".'.format(channel))
with self.forwardingLock:
self.midiOutDevice = midiOutDevice
self.midiOutChannel = channel
def onMIDIMsg(self, data):
# print 'id(self): onMIDIMsg: Recieved data "{0}".'.format(data)
with self.forwardingLock:
if self.midiOutChannel is not None:
data.setChannel(self.midiOutChannel)
if self.midiOutDevice is not None:
self.midiOutDevice.sendMessage(data)
##
# Class representing a MIDI Output Device.
class MIDIOutDevice(MIDIDevice):
ID = 'Generic USB-MIDI Device'
## Number of the note "A0", usually the lowest supported note on the MIDI device.
noteNumA0 = 21
## Offsets for the different note letters
noteOffsets = {
'Ab': 11,
'A': 0,
'A#': 1,
'Bb': 1,
'B': 2,
'C': 3,
'C#': 4,
'Db': 4,
'D': 5,
'D#': 6,
'Eb': 6,
'E': 7,
'F': 8,
'F#': 9,
'Gb': 9,
'G': 10,
'G#': 11,
}
##
# Class initializer.
# @param port Integer port number for the MIDI device.
# @param name String name of the MIDI device. If "None", will use this class's ID string.
# @param voices List of MIDIVoice objects available from this MIDI Device.
# @param defaultChannel If given, will use this channel by default for all outgoing commands.
def __init__(self, port, name=None, voices=None, defaultChannel=None):
if name is None:
name = MIDIOutDevice.ID
if voices is None:
voices = []
self.midi = rtmidi.RtMidiOut()
super().__init__(port, name)
self.voices = voices
self._defaultChannel = defaultChannel
self.sendLock = threading.Lock()
##
# Sets/Gets the default MIDI channel.
# @param defaultChannel Integer 0-15.
# @return Current default channel ("None" if a default hasn't been defined).
def defaultChannel(self, defaultChannel=None):
if defaultChannel is not None:
if self._defaultChannel is None:
raise ValueError('No default channel defined and no channel given.')
self._defaultChannel = defaultChannel
return self._defaultChannel
##
# Returns the full list of voices available from this device.
def getVoiceList(self):
return list(self.voices)
##
# Returns an iterator that steps over the voices. Supports filtering.
# @param filter Python statement that can be evaluated such that "v" stands for a MIDIVoice
# object.
# @return Iterator object that returns MIDIVoice objects.
def iter(self, filter='True'):
for v in self.voices:
if eval(filter):
yield v
##
# Converts the given note name to a MIDI note number.
# @param noteName String
# @return Integer
def noteName2Num(self, noteName):
m = re.match(r'^([A-Ga-g][#b]?)(-?\d)$', noteName)
if m is None:
raise ValueError('Unable to parse note name "{0}".'.format(noteName))
return self.noteNumA0 + self.noteOffsets[m.group(1)] + 12 * int(m.group(2))
##
# Plays the given note for the given number of seconds. Returns immediately (doesn't block).
# @param duration Seconds to play (float or int)
# @param noteNum Note to play
# @param vel Velocity to play at
# @param channel Channel to play on. If "None", will use default channel for object.
# @return None.
def playNote(self, duration, note, vel, channel=None):
if channel is None:
if self._defaultChannel is None:
raise ValueError('No default channel defined and no channel given.')
channel = self._defaultChannel
if isinstance(note, str):
note = self.noteName2Num(note)
onMsg = rtmidi.MidiMessage.noteOn(channel, note, vel)
offMsg = rtmidi.MidiMessage.noteOff(channel, note)
def play():
self.sendMessage(onMsg)
time.sleep(duration)
self.sendMessage(offMsg)
t = threading.Thread(target=play)
t.start()
##
# Sends the given message to the MIDI device.
# @param msg rtmidi.MidiMessage object
# @return None.
def sendMessage(self, msg):
with self.sendLock:
self.midi.sendMessage(msg)
##
# Sends the given messages to the MIDI device.
# @param msgs Any number of rtmidi.MidiMessage objects.
# @return None.
def sendMessages(self, *msgs):
for msg in msgs:
self.midi.sendMessage(msg)
| defcello/Children-of-Eden-Synth-Server | src/engine/mididevice.py | Python | gpl-3.0 | 10,822 |
#!/usr/bin/env python
"""
conference.py -- Udacity conference server-side Python App Engine API;
uses Google Cloud Endpoints
$Id: conference.py,v 1.25 2014/05/24 23:42:19 wesc Exp wesc $
created by wesc on 2014 apr 21
"""
__author__ = 'wesc+api@google.com (Wesley Chun)'
from datetime import datetime
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
from google.appengine.ext import ndb
from models import Profile
from models import ProfileMiniForm
from models import ProfileForm
from models import TeeShirtSize
from settings import WEB_CLIENT_ID
from utils import getUserId
EMAIL_SCOPE = endpoints.EMAIL_SCOPE
API_EXPLORER_CLIENT_ID = endpoints.API_EXPLORER_CLIENT_ID
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@endpoints.api( name='conference',
version='v1',
allowed_client_ids=[WEB_CLIENT_ID, API_EXPLORER_CLIENT_ID],
scopes=[EMAIL_SCOPE])
class ConferenceApi(remote.Service):
"""Conference API v0.1"""
# - - - Profile objects - - - - - - - - - - - - - - - - - - -
def _copyProfileToForm(self, prof):
"""Copy relevant fields from Profile to ProfileForm."""
# copy relevant fields from Profile to ProfileForm
pf = ProfileForm()
for field in pf.all_fields():
if hasattr(prof, field.name):
# convert t-shirt string to Enum; just copy others
if field.name == 'teeShirtSize':
setattr(pf, field.name, getattr(TeeShirtSize, getattr(prof, field.name)))
else:
setattr(pf, field.name, getattr(prof, field.name))
pf.check_initialized()
return pf
def _getProfileFromUser(self):
"""Return user Profile from datastore, creating new one if non-existent."""
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# TODO 1
# step 1. copy utils.py from additions folder to this folder
# and import getUserId from it
# step 2. get user id by calling getUserId(user)
# step 3. create a new key of kind Profile from the id
user_id = getUserId(user)
p_key = ndb.Key(Profile, user_id)
# TODO 3
# get the entity from datastore by using get() on the key
profile = p_key.get()
if not profile:
profile = Profile(
key = p_key, # TODO 1 step 4. replace with the key from step 3
displayName = user.nickname(),
mainEmail= user.email(),
teeShirtSize = str(TeeShirtSize.NOT_SPECIFIED),
)
# TODO 2
# save the profile to datastore
profile.put()
return profile # return Profile
def _doProfile(self, save_request=None):
"""Get user Profile and return to user, possibly updating it first."""
# get user Profile
prof = self._getProfileFromUser()
# if saveProfile(), process user-modifyable fields
if save_request:
for field in ('displayName', 'teeShirtSize'):
if hasattr(save_request, field):
val = getattr(save_request, field)
if val:
setattr(prof, field, str(val))
# TODO 4
# put the modified profile to datastore
prof.put()
# return ProfileForm
return self._copyProfileToForm(prof)
@endpoints.method(message_types.VoidMessage, ProfileForm,
path='profile', http_method='GET', name='getProfile')
def getProfile(self, request):
"""Return user profile."""
return self._doProfile()
@endpoints.method(ProfileMiniForm, ProfileForm,
path='profile', http_method='POST', name='saveProfile')
def saveProfile(self, request):
"""Update & return user profile."""
return self._doProfile(request)
# registers API
api = endpoints.api_server([ConferenceApi])
| SteveWooding/fullstack-nanodegee-conference | Lesson_3/00_Conference_Central/conference.py | Python | gpl-3.0 | 4,120 |
from django.utils.translation import ugettext_lazy as _
from oioioi.base.menu import MenuRegistry
navbar_links_registry = MenuRegistry(_("Navigation Bar Menu"))
| sio2project/oioioi | oioioi/problems/menu.py | Python | gpl-3.0 | 162 |
# -*- coding: utf-8 -*-
from flask import Flask,render_template,send_file,Response,flash,request,redirect,session
from werkzeug.utils import secure_filename
import json
import os.path
import os
import gzip
import urllib
from db import DbGetListOfDates,DbGet,DbGetComments,DbGetMulitple,DbGetNearbyPoints,DbPut,DbPutWithoutPassword,DbSearchWord,DbGetMapsOfUser,DbGetAllMaps,DbAddComment,CheckValidMapId,CheckValidFreetext,DbDelMap,DbChkPwd
import anydbm
import traceback
from progress import GetProgress,SetProgress
from users import CheckSession,Login,ActivateUser,SendActivationMail,ReserveUser,GetUserFromUserOrEmail,SendForgotPasswordMail
import sys
from orchestrator import BuildMap,ProcessTrkSegWithProgress,BuildMapFromTrack
from searchparser import SearchQueryParser
from sets import Set
from textutils import remove_accents
from log import Log
from mapparser import ParseMap
from model import Track
from options import options_default
from dem import GetEleFromLatLon
from computeprofile import ComputeProfile
from demize import Demize
from generate_id import uniqid
from config import keysnpwds, config
from flask_babel import Babel, gettext
from thumbnail import selectPointsForThumbnail, thumbnailUrlMapbox
# Create flask application
application = Flask(__name__)
application.config['UPLOAD_FOLDER'] = 'uploads'
application.secret_key = keysnpwds['secret_key']
## Internationalization (i18n)
babel = Babel(application)
LANGUAGES = {
'en': 'English',
'fr': 'Francais',
'es': 'Español'
}
@babel.localeselector
def get_locale():
# Uncomment for testing a specific language
#return 'es'
#return 'fr'
# Check if there is a lang in session
if session.has_key('lang'):
return session['lang']
# Else guess the lang from browser request
return request.accept_languages.best_match(LANGUAGES.keys())
@application.route('/i18n.js/<item>')
def i18n_js(item):
""" Translation strings for javascript """
assert(item in ('header','map','prepare')) #basic security check
return render_template('i18n_%s.js'%item)
@application.route('/<lang>/testi18n.js')
def test_i18n_js(lang):
""" To test i18n for javascript because js escaping is not well handled by jinja2 """
session['lang']=lang
return '<html><head></head><body>Press Ctrl+Maj+K and check no errors in console<script>'+render_template('i18n_header.js')+render_template('i18n_map.js')+'</script>'
## Index page
@application.route('/',defaults={'lang':None,'limit':10})
@application.route('/indexall',defaults={'lang':None,'limit':-1})
@application.route('/<lang>/',defaults={'limit':10})
@application.route('/<lang>/indexall',defaults={'limit':10})
def index(lang,limit):
if lang!=None:
session['lang']=lang
maplist = DbGetListOfDates()
cptr = 0
mapsout = []
for date in sorted(maplist.iterkeys(),reverse=True):
maps = maplist[date]
for mapid in maps:
(lat,lon) = DbGet(mapid,'startpoint').split(',')
trackdesc = DbGet(mapid,'trackdesc')
trackuser = DbGet(mapid,'trackuser')
desc=trackdesc.decode('utf8')
mapsout.append({'mapid':mapid,'lat':lat,'lon':lon,'user':trackuser,'desc':desc,'date':date})
cptr += 1
if(limit>-1) and (cptr>limit):
break
if(limit>-1) and (cptr>limit):
break
return render_template('index.html',limit=limit,maps=mapsout,GMapsApiKey=keysnpwds['GMapsApiKey'])
## GPX Export
@application.route('/togpx/<mapid>')
def togpx(mapid):
# Read map data
f=gzip.open('data/mapdata/%s.json.gz'%mapid,'rb')
mapdata=json.load(f)
f.close()
return '<?xml version="1.0" encoding="UTF-8"?>\n<gpx version="1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://www.topografix.com/GPX/1/0" xsi:schemaLocation="http://www.topografix.com/GPX/1/0 http://www.topografix.com/GPX/1/0/gpx.xsd"><trk><trkseg>' + ''.join(map(lambda p:'<trkpt lat="%f" lon="%f"></trkpt>'%(p[0],p[1]),mapdata['points'])) + '</trkseg></trk></gpx>'
## Thumbnails
if not os.path.isdir('data'):
os.mkdir('data')
if not os.path.isdir('data/thumbnail_cache'):
os.mkdir('data/thumbnail_cache')
@application.route('/thumbnail/<mapid>')
@application.route('/thumbnail.php',defaults={'mapid':None})
def thumbnail(mapid):
if mapid==None:
mapid = request.args.get('mapid')
filename = 'data/thumbnail_cache/%s.png'%mapid
if os.path.isfile(filename):
# Return image in cache
return send_file(filename, mimetype='image/png')
else:
ptlist = selectPointsForThumbnail(mapid)
# Build map image url
url = thumbnailUrlMapbox(ptlist)
furl = open('data/thumbnail_cache/%s.url'%(mapid),'w')
furl.write(url)
furl.close()
# Download png, put it in cache and send it
f = urllib.urlopen(url)
fcache = open(filename,'wb')
contents = f.read()
fcache.write(contents)
fcache.close()
f.close()
return contents
## Show map
@application.route('/<lang>/showmap/<mapid>', defaults={'map_type': None})
@application.route('/<lang>/showmap/<mapid>/<map_type>')
@application.route('/<lang>/showmap-flot.php',defaults={'mapid':None,'map_type': None})
@application.route('/<lang>/showmap.php',defaults={'mapid':None,'map_type': None})
@application.route('/showmap/<mapid>', defaults={'lang':None,'map_type': None})
@application.route('/showmap/<mapid>/<map_type>',defaults={'lang':None})
@application.route('/showmap-flot.php',defaults={'lang':None,'mapid':None,'map_type': None})
@application.route('/showmap.php',defaults={'lang':None,'mapid':None,'map_type': None})
def showmap(lang,mapid,map_type):
if lang!=None:
session['lang']=lang
if mapid==None:
mapid=request.args.get('mapid')
# Read map data
f=gzip.open('data/mapdata/%s.json.gz'%mapid,'rb')
mapdata=json.load(f)
f.close()
# Read map db
mapdb = anydbm.open('data/maps/%s.db'%mapid, 'r')
if map_type==None:
map_type = mapdata['type']
# Render
_mapdb={}
for key in mapdb:
_mapdb[key] = mapdb[key].decode('utf-8') # We must convert each utf8 string into unicode for jinja2
out = render_template('showmap.html',domain=config['domain'],mapid=mapid,type=map_type,mapdb=_mapdb,mapdata=mapdata,GMapsApiKey=keysnpwds['GMapsApiKey'],GeoPortalApiKey=keysnpwds['GeoPortalApiKey'])
mapdb.close()
return out
@application.route('/mapdata/<mapid>')
def mapdata(mapid):
# Read map data
f=gzip.open('data/mapdata/%s.json.gz'%mapid,'rb')
mapfromfile=json.load(f)
f.close()
return Response(render_template('mapdata.js',mapdata=mapfromfile,chartdata=json.dumps(mapfromfile['chartdata'])), mimetype='text/javascript')
@application.route('/comments/<mapid>')
def comments(mapid):
comments = DbGetComments(mapid)
return Response('<?xml version="1.0" encoding="UTF-8"?><result>%s</result>' % ''.join(map(lambda comment: '<comment user="%s" date="%s">%s</comment>' % (comment[1],comment[0],comment[2]),comments)), mimetype='text/xml')
@application.route('/sendcomment/<mapid>/<comment>')
def sendcomment(mapid,comment):
try:
user = 'unknown'
if request.form.has_key('user'):
user = request.form.getvalue('user')
if not CheckValidUserName(user):
raise Exception('Invalid user name')
sess = request.form.getvalue('sess')
if CheckSession(user,sess):
pass
else:
raise Exception(gettext('Invalid session, please re-login'))
else:
user = request.remote_addr
if not CheckValidMapId(mapid):
raise Exception(gettext('Invalid map id'))
if not CheckValidFreetext(comment):
raise Exception(gettext('Invalid map id'))
DbAddComment(mapid,user,comment)
result = 'OK'
except Exception, e:
result = str(e)
out = '<?xml version="1.0" encoding="UTF-8"?>\n<result>%s</result>'%result
return Response(out, mimetype='text/xml')
@application.route('/nearmaps/<mapid>')
def nearmaps(mapid):
lat,lon = map(float,DbGet(mapid,'startpoint').split(','))
return '{'+','.join(['"%s":%s' % (_mapid,json.dumps(DbGetMulitple(_mapid,('startpoint','trackdesc','trackuser','date')))) for _mapid in filter(lambda mid: mid!=mapid,DbGetNearbyPoints(lat,lon))])+'}'
@application.route('/dbget/<mapid>/<element>')
def dbget(mapid,element):
try:
val = DbGet(mapid,element.encode('ascii'))
message = 'OK'
except Exception, e:
message = 'Error: ' + str(e)+'\n'+traceback.format_exc()
val = 'Error'
out = '<?xml version="1.0" encoding="UTF-8"?>\n<answer><message>%s</message><pageelementid>%s</pageelementid><value>%s</value></answer>' % (message,element,val)
return Response(out, mimetype='text/xml')
@application.route('/dbput/<mapid>/<pwd>/<ele>/<val>',defaults={'user':None,'sess':-1})
@application.route('/dbput/<mapid>/<pwd>/<ele>/<val>/<user>/<sess>')
def dbput(mapid,pwd,ele,val,user,sess,defaults={'user': None,'sess': -1}):
try:
if user!=None and sess!=-1:
if CheckSession(user,sess):
map_user = DbGet(mapid,'trackuser')
if len(map_user)>0 and map_user==user:
DbPutWithoutPassword(mapid,ele.encode('ascii'),val.encode('utf8'))
message = 'OK'
else:
raise Exception(gettext('Map %s does not belong to user %s, but to user %s') % (mapid,user,map_user))
else:
raise Exception(gettext('Invalid session, please re-login'))
else:
DbPut(mapid,pwd,ele.encode('ascii'),val.encode('utf8'))
message = 'OK'
except Exception, e:
message = 'Error: ' + str(e)
val = 'Error'
out = '<?xml version="1.0" encoding="UTF-8"?>\n<answer><message>%s</message><pageelementid>%s</pageelementid><value>%s</value></answer>' % (message,ele,val)
return Response(out, mimetype='text/xml')
## Send map
@application.route('/<lang>/submitform')
@application.route('/submitform',defaults={'lang':None})
def submitform(lang):
if lang!=None:
session['lang']=lang
return render_template('submitform.html',GMapsApiKey=keysnpwds['GMapsApiKey'])
@application.route('/upload', methods=['POST'])
def upload():
# Get submit_id
submit_id = request.form['submit_id'].encode('ascii')
if not submit_id.isalnum():
return 'Bad submitid'
# Build inputfile array
inputfile = []
i=0
for file in request.files.getlist("file[]"):
# Save each uploaded file
if not os.path.isdir(application.config['UPLOAD_FOLDER']):
os.mkdir(application.config['UPLOAD_FOLDER'])
p=os.path.join(application.config['UPLOAD_FOLDER'], secure_filename('%s_%s.gpx'%(submit_id,i)))
Log('Saving file to %s'%p,submit_id)
file.save(p)
Log('File saved',submit_id)
i+=1
inputfile.append(file)
# In case of import from URL
if request.form.has_key('fromurl') and len(request.form['fromurl'])>0:
inputfile.append(request.form.get('fromurl').encode('ascii'))
if len(inputfile)<1:
return gettext('Error while uploading file')
# Track selection in case file contains several tracks
if request.form.has_key('trk_select'):
trk_id = int(request.form['trk_select'])
else:
trk_id = 0
trk_seg_id = 0
# Get track description
Log('Get track desc',submit_id)
desc = request.form['desc'].encode('utf8')
Log('Check session',submit_id)
# Check session
user = request.form['user']
#sys.stderr.write('%s\n'%(request.form))
if user=='NoUser' or user=='':
user = 'unknown'
else:
sess = request.form['sess']
if not CheckSession(user,sess):
user = 'unknown'
# Parse options (flat,wind,maptype,...)
options = options_default
for key in options:
if request.form.has_key(key):
if type(options[key])==bool:
if request.form.get(key):
options[key]=True
else:
options[key]=False
#options[key]=(request.form[key]=='yes')
elif type(options[key])==int:
options[key]=int(request.form[key])
elif type(options[key])==str or type(options[key])==unicode:
options[key]=request.form[key]
else:
raise Exception(gettext('type %s not handled')%type(options[key]))
Log('options=%s'%options,submit_id)
Log('start BuildMap',submit_id)
try:
pwd = BuildMap(inputfile,submit_id,trk_id,trk_seg_id,submit_id,desc,user,options)
except Exception,e:
Log(str(e))
SetProgress(submit_id,str(e))
return str(e)
Log('end BuildMap',submit_id)
return '''<script type="text/javascript">
var date = new Date();
date.setTime(date.getTime()+(10*24*60*60*1000));
var expires = "; expires="+date.toGMTString();
document.cookie = "pwd%(mapid)s=%(pwd)s"+expires+"; path=/";
location.href=\'/showmap/%(mapid)s\';
</script>'''% {'mapid':submit_id,'pwd':pwd}
@application.route('/getprogress/<submitid>')
def getprogress(submitid):
return GetProgress(submitid.encode('ascii')).decode('utf8')
## Search
class MapSeach(SearchQueryParser):
def GetWord(self, word):
return Set(DbSearchWord('trackdesc',word))
def GetWordWildcard(self, word):
return Set()
def GetQuotes(self, search_string, tmp_result):
return Set()
def map_search_result(mapid):
try:
(lat,lon) = DbGet(mapid,'startpoint').split(',')
except:
(lat,lon)=(0.0,0.0)
trackdesc = DbGet(mapid,'trackdesc')
startdate = DbGet(mapid,'date')
trackuser = DbGet(mapid,'trackuser')
try:
desc = trackdesc.encode('ascii', 'xmlcharrefreplace')
except:
desc = trackdesc
desc = desc.replace('&','&')
return('<map mapid="%s" lat="%s" lon="%s" date="%s" user="%s">%s</map>' % (mapid,lat,lon,startdate,trackuser,desc))
@application.route('/search/<search_req>')
def search(search_req):
try:
req = remove_accents(search_req.encode('utf8').lower(),'utf-8')
mapids = MapSeach().Parse(req)
out='<result><maps>%s</maps></result>'%''.join(map(map_search_result,mapids))
except Exception, e:
out='<error>Error: %s</error>'%e
return Response(out, mimetype='text/xml')
## Show user
def map_retrieve_infos_showuser(mapid):
trackdesc = DbGet(mapid,'trackdesc').decode('utf8')
startdate = DbGet(mapid,'date')
return {'mapid':mapid,'desc':trackdesc,'date':startdate}
@application.route('/<lang>/showuser/<user>')
@application.route('/showuser/<user>',defaults={'lang':None})
def showuser(lang,user):
if lang!=None:
session['lang']=lang
mapids = DbGetMapsOfUser(user.encode('ascii'))
maps = map(map_retrieve_infos_showuser,mapids)
return render_template('showuser.html',user=user,maps=maps)
@application.route('/userinfo/<user>')
def userinfo(user):
mapids = DbGetMapsOfUser(user.encode('ascii'))
out = '<maps>%s</maps>'%''.join(map(map_search_result,mapids))
return Response(out, mimetype='text/xml')
## Browse maps
@application.route('/<lang>/mapofmaps')
@application.route('/mapofmaps',defaults={'lang':None})
def mapofmaps(lang):
if lang!=None:
session['lang']=lang
return render_template('mapofmaps.html',GMapsApiKey=keysnpwds['GMapsApiKey'])
def map_search_result2(lat,lon,mapid):
trackdesc = DbGet(mapid,'trackdesc')
startdate = DbGet(mapid,'date')
trackuser = DbGet(mapid,'trackuser')
try:
desc = trackdesc.encode('ascii', 'xmlcharrefreplace').replace('<','<').replace('>','>')
except:
desc = trackdesc
return('<map mapid="%s" lat="%s" lon="%s" date="%s" user="%s">%s</map>' % (mapid,lat,lon,startdate,trackuser,desc))
def latlonmapids2xml(latlonmapids):
lat,lon,mapids = latlonmapids
return '<maps lat="%.4f" lon="%.4f">%s</maps>' % (lat,lon,''.join(map(lambda mapid:map_search_result2(lat,lon,mapid),mapids)))
@application.route('/getmaplist')
def getmaplist():
latlonmapidss = DbGetAllMaps()
out = '<results>%s</results>' % ''.join(map(latlonmapids2xml,latlonmapidss))
return Response(out, mimetype='text/xml')
## Map Tools
def auth(mapid,pwd,user,sess):
# Check rights
if user!=None and sess!=None:
if CheckSession(user,sess):
map_user = DbGet(mapid,'trackuser')
if len(map_user)>0 and map_user==user:
pass
else:
raise Exception(gettext('Map %s does not belong to user %s, but to user %s') % (mapid,user,map_user))
else:
raise Exception(gettext('Invalid session, please re-login'))
else:
if not DbChkPwd(mapid,pwd):
raise Exception(gettext('You do not have the map\'s password in your browser\'s cookies'))
@application.route('/delmap/<mapid>/<pwd>',defaults={'user':None,'sess':None})
@application.route('/delmap/<mapid>/<pwd>/<user>/<sess>')
def delmap(mapid,pwd,user,sess):
try:
auth(mapid,pwd,user,sess)
# Delete map
DbDelMap(mapid)
mapfile = 'data/mapdata/%s.json.gz' % mapid
os.remove(mapfile)
message = gettext('Map deleted')
except Exception, e:
message = str(e)
return render_template('map_deleted.html',message=message)
def modifymap(mapid,pwd,user,sess,modifyfunction):
try:
# Authentificate
auth(mapid,pwd,user,sess)
# Parse map
options, ptlist = ParseMap(mapid)
# Apply modifications
ptlist,startpointchanged = modifyfunction(ptlist)
# Rebuild map
track = Track(ptlist)
ProcessTrkSegWithProgress(track,mapid,mapid,True,options)
# If start point has changed, then update the database
if startpointchanged:
DbPutWithoutPassword(mapid,'startpoint','%.4f,%.4f' % (track.ptlist[0].lat,track.ptlist[0].lon))
# Recompute thumbnail
previewfile = 'data/thumbnail_cache/%s.png' % mapid
if os.access(previewfile,os.F_OK):
os.remove(previewfile)
message = None
except Exception, e:
message = str(e)
if message==None:
return redirect('/showmap/%s'%mapid)
else:
return render_template('map_action_error.html',message=message,mapid=mapid)
@application.route('/map/crop/<mapid>/<pwd>/<int:pt1>/<int:pt2>',defaults={'user':None,'sess':None})
@application.route('/map/crop/<mapid>/<pwd>/<int:pt1>/<int:pt2>/<user>/<sess>')
def cropmap(mapid,pwd,pt1,pt2,user,sess):
return modifymap(mapid,pwd,user,sess,lambda ptlist: (ptlist[pt1:pt2],pt1!=0))
@application.route('/map/clear/<mapid>/<pwd>/<int:pt1>/<int:pt2>',defaults={'user':None,'sess':None})
@application.route('/map/clear/<mapid>/<pwd>/<int:pt1>/<int:pt2>/<user>/<sess>')
def clearmap(mapid,pwd,pt1,pt2,user,sess):
return modifymap(mapid,pwd,user,sess,lambda ptlist: (ptlist[:pt1]+ptlist[pt2:],pt1==0))
def removepoints(ptlist,ptidxtodel):
l=range(0,len(ptlist))
Log('removepoints: %s %s'%(ptidxtodel,len(ptlist)))
for i in ptidxtodel:
l.remove(i)
return ([ptlist[i] for i in l],0 in ptidxtodel)
@application.route('/map/clearlist/<mapid>/<pwd>/<ptliststr>',defaults={'user':None,'sess':None})
@application.route('/map/clearlist/<mapid>/<pwd>/<ptliststr>/<user>/<sess>')
def clearmaplist(mapid,pwd,ptliststr,user,sess):
ptidxtodel = map(int,ptliststr.split(','))
return modifymap(mapid,pwd,user,sess,lambda ptlist: removepoints(ptlist,ptidxtodel))
@application.route('/map/export/<mapid>')
def exportmap(mapid):
# TODO: build it from client side
pass
@application.route('/map/demize/<int:index>/<mapid>/<pwd>',defaults={'user':None,'sess':None})
@application.route('/map/demize/<int:index>/<mapid>/<pwd>/<user>/<sess>')
def demize(index,mapid,pwd,user,sess):
try:
# Authentificate
auth(mapid,pwd,user,sess)
# Start/continue/finish DEMization. index is current point index, l is total number of points in map
index,l = Demize(index,mapid)
# Format answer
if index==0:
answer = '<answer><result>Done</result></answer>'
else:
percent = index * 100 / l
answer = '<answer><result>OK</result><nextindex>%s</nextindex><percent>%s</percent></answer>' % (index,percent)
except Exception, e:
answer = '<answer><result>%s</result></answer>' % e
return Response('<?xml version="1.0" encoding="UTF-8"?>\n%s'%answer,mimetype='text/xml')
## User services
def CheckHumain(humaincheck):
return ((humaincheck.strip().lower()==gettext('earth'))or(humaincheck.strip().lower()==gettext('the earth')))
@application.route('/<lang>/registerform')
@application.route('/registerform',defaults={'lang':None})
def registerform(lang):
""" Display register form """
if lang!=None:
session['lang']=lang
return render_template('register.html')
@application.route('/register', methods=['POST'])
def register():
mail = request.form['mail'].lower()
user = request.form['user'].lower()
pwd1 = request.form['pwd1']
pwd2 = request.form['pwd2']
humaincheck = request.form['humaincheck']
if not CheckHumain(humaincheck):
return render_template('register.html',error_message=gettext('Humain check error'))
if pwd1!=pwd2:
return render_template('register.html',error_message=gettext('The two password you entered are different. Please enter twice the same password'))
activation_id,err_msg = ReserveUser(user.encode('ascii'),mail.encode('ascii'),pwd1.encode('utf8'))
if activation_id==None:
return render_template('register.html',error_message=err_msg)
SendActivationMail(mail,user,activation_id)
return render_template('user_registered.html',user=user)
@application.route('/activate/<user>/<activationid>')
def activate(user,activationid):
""" Activate user given it's activation_id """
try:
ActivateUser(user,activationid)
except Exception, e:
return render_template('user_activate_error.html',message=str(e))
return render_template('user_activated.html',user=user)
@application.route('/login/<user>/<pwd>')
def login(user,pwd):
""" Check login/password return sesssion_id """
user = user.lower()
try:
(user,sessid) = Login(user,pwd)
except Exception, e:
return Response('<result><user>NoUser</user><sess>-1</sess><error>%s</error></result>'%e, mimetype='text/xml')
out = '<result>'
if user==None:
user = 'NoUser'
sess = -1
out = '<result><user>%s</user><sess>%s</sess></result>' % (user,sessid)
return Response(out, mimetype='text/xml')
@application.route('/chksess/<user>/<sess>')
def chksess(user,sess):
""" Check session_id for a given user """
try:
ret = CheckSession(user,sess)
except Exception, e:
out = '<answer><result>Error: %s</result><user>NoUser</user><sess>-1</sess></answer>' % str(e)
return Response(out, mimetype='text/xml')
if ret:
result = 'OK'
else:
result = 'Expired'
out = '<answer><result>%s</result><user>%s</user><sess>%s</sess></answer>' % (result,user,sess)
return Response(out, mimetype='text/xml')
@application.route('/<lang>/forgotpwd')
@application.route('/forgotpwd',defaults={'lang':None})
def forgotpwd(lang):
if lang!=None:
session['lang']=lang
return render_template('forgotpwd.html')
@application.route('/resendpwd', methods=['POST'])
def resendpwd():
user_mail = request.form['user_mail'].encode('ascii').lower()
humaincheck = request.form['humaincheck']
if not CheckHumain(humaincheck):
return render_template('resendpwd_error.html',error_message=gettext('Humain check error'))
user,err_str = GetUserFromUserOrEmail(user_mail)
if user==None:
return render_template('resendpwd_error.html',error_message=err_str)
mail = SendForgotPasswordMail(user)
return render_template('resendpwd_ok.html',mail=mail)
def retrievemap(mapid):
(lat,lon) = DbGet(mapid,'startpoint').split(',')
desc = DbGet(mapid,'trackdesc').decode('utf8')
startdate = DbGet(mapid,'date')
user = DbGet(mapid,'trackuser')
return {'mapid':mapid,'lat':lat,'lon':lon,'desc':desc,'date':startdate,'user':user}
@application.route('/<lang>/userhome/<user>')
@application.route('/userhome/<user>',defaults={'lang':None})
def userhome(lang,user):
if lang!=None:
session['lang']=lang
mapids = DbGetMapsOfUser(user.encode('ascii'))
return render_template('userhome.html',user=user,maps=map(retrievemap,mapids),GMapsApiKey=keysnpwds['GMapsApiKey'])
@application.route('/mergemaps/<mapidsliststr>/<user>/<sess>')
def mergemaps(mapidsliststr,user,sess):
if not CheckSession(user,sess):
message = gettext('Cannot identify user %s %s')%(user,sess)
else:
mapids = mapidsliststr.split(',')
ptlistmerged = {}
for mapid in mapids:
newmapid = uniqid()
Log("MergeCgi: parse map %s" % mapid,newmapid)
# Parse map
options,ptlist = ParseMap(mapid)
#TODO: merge options
# set right day if needed
if ptlist[0].datetime.year<=1980:
dfromdb = DbGet(mapid,'date')
if dfromdb:
d = datetime.datetime.strptime(dfromdb,'%Y-%m-%d')
for pt in ptlist:
pt.datetime = pt.datetime.replace(year=d.year,month=d.month,day=d.day)
# append to dict
for pt in ptlist:
ptlistmerged[pt.datetime] = pt
ptlistmerged = ptlistmerged.values()
ptlistmerged.sort(key=lambda pt:pt.datetime)
Log("MergeCgi: rebuild: Track len=%d" % len(ptlistmerged),newmapid)
# Rebuild map
track = Track(ptlistmerged)
pwd = BuildMapFromTrack(track,newmapid,newmapid,'Result of merge',user,options)
Log("MergeCgi: finished",newmapid)
# Redirect to map
return redirect('/showmap/%s'%newmapid)
@application.route('/delmaps/<mapidsliststr>/<user>/<sess>')
def delmaps(mapidsliststr,user,sess):
if not CheckSession(user,sess):
message = gettext('Cannot identify user %s %s')%(user,sess)
else:
try:
mapids = mapidsliststr.split(',')
message = ''
for mapid in mapids:
map_user = DbGet(mapid,'trackuser')
if len(map_user)>0 and map_user==user:
DbDelMap(mapid)
os.remove('data/mapdata/%s.json.gz'%mapid)
message += gettext('Map %s deleted. ')%mapid
else:
message += gettext('Map %s do not belong to you')%mapid
break
except Exception, e:
message += gettext('Error: %s')%e
return render_template('map_deleted.html',message=message)
## Prepare
@application.route('/<lang>/prepare',defaults={'map_type':'GeoPortal','pts':[],'names':[]})
@application.route('/<lang>/prepare/<map_type>',defaults={'pts':[],'names':[]})
@application.route('/<lang>/prepare/<map_type>/<pts>',defaults={'names':None})
@application.route('/<lang>/prepare/<map_type>/<pts>/<names>')
@application.route('/prepare',defaults={'lang':None,'map_type':'GeoPortal','pts':[],'names':[]})
@application.route('/prepare/<map_type>',defaults={'lang':None,'pts':[],'names':[]})
@application.route('/prepare/<map_type>/<pts>',defaults={'lang':None,'names':None})
@application.route('/prepare/<map_type>/<pts>/<names>',defaults={'lang':None})
def prepare(lang,map_type,pts,names):
if lang!=None:
session['lang']=lang
return render_template('prepare.html',domain=config['domain'],map_type=map_type,GMapsApiKey=keysnpwds['GMapsApiKey'],GeoPortalApiKey=keysnpwds['GeoPortalApiKey'])
# Backward compatibility
@application.route('/prepare.php?ptlist=<ptlist>',defaults={'lang':None})
#@application.route('/fr/prepare.php',defaults={'lang':'fr'})
def prepare_php(lang):
pts=request.args.get('ptlist')
maptype=request.args.get('maptype')
names=request.args.get('names')
return prepare(lang,maptype,pts,names)
@application.route('/ele/<float:lat>/<float:lon>')
def getele(lat,lon):
return Response('%d'%GetEleFromLatLon(lat,lon), mimetype='text/plain')
def PtStr2FloatArray(ptstr):
out = ptstr.split(',')
return (float(out[0]),float(out[1]))
@application.route('/profile/<ptliststr>/<width>/<height>')
def profile(ptliststr,width,height):
ptlist = map(PtStr2FloatArray,ptliststr.split('~'))
if(len(ptlist)<2):
return Response(gettext('Error: Cannot compute profile for only one point'), mimetype='text/plain')
nbpts = 400
return Response('\n'.join(map(str,ComputeProfile(ptlist,nbpts,width,height))), mimetype='text/plain')
@application.route('/prepare/export/<format>/<ptlist>/<names>')
def prepare_export(format,ptlist,names):
# TODO: build it from client side
pass
## Misc
@application.route('/<lang>/mobile')
@application.route('/mobile',defaults={'lang':None})
def mobile(lang):
if lang!=None:
session['lang']=lang
return render_template('mobile.html')
@application.route('/<lang>/tour')
@application.route('/tour',defaults={'lang':None})
def tour(lang):
if lang!=None:
session['lang']=lang
return render_template('tour.html')
## Add .min.js in all templates if debug mode is true
@application.context_processor
def inject_min_js():
if application.debug:
return {'minify':''}
else:
return {'minify':'.min'}
## Program entry point
if __name__ == '__main__':
# Start web server
if len(sys.argv)==2:
if sys.argv[1] in ('-h','--help'):
print 'Usage: %s [bindingip]' % sys.argv[0]
exit()
else:
host = sys.argv[1]
else:
host = "127.0.0.1"
application.run(port=8080,debug=True,host=host)
| fparrel/regepe | vps/regepe_flask_server.py | Python | gpl-3.0 | 30,280 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-06-02 20:34
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Credencial',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_name', models.CharField(max_length=60, unique=True)),
('password', models.CharField(max_length=255)),
('token', models.CharField(blank=True, max_length=60, unique=True)),
('agente', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Ferramenta',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(max_length=60, unique=True)),
('link', models.URLField()),
],
),
migrations.CreateModel(
name='Linguagem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(max_length=60, unique=True)),
],
),
migrations.CreateModel(
name='Projeto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(max_length=60, unique=True)),
('dono', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='dono', to=settings.AUTH_USER_MODEL)),
('ferramentas', models.ManyToManyField(related_name='ferramentas', to='project_manager.Ferramenta')),
('linguagem', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='linguagem', to='project_manager.Linguagem')),
('participantes', models.ManyToManyField(related_name='participantes', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='credencial',
name='ferramenta',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project_manager.Ferramenta'),
),
]
| gabriellmb05/trabalho-les | src/project_manager/migrations/0001_initial.py | Python | gpl-3.0 | 2,659 |
# encoding: utf8
from sympy import Add
from uncertainties import __version_info__ as uncert_version
from uncertainties import ufloat, ufloat_fromstr
from uncertainties.core import Variable, AffineScalarFunc
if uncert_version < (3, 0):
raise Warning("Your version of uncertanties is not supported. Try\n"
"$ sudo pip install uncertainties --upgrade")
class Series:
"""
The class that provides the expansion in powers of g up to the n-th order,
taking the error into account.
"""
def __init__(self, n, d={0: 0}, name='g', analytic=None):
"""
Example:
`z2 = Series(3, {0: ufloat(-1, 0.4), 1: ufloat(-2, .004), 2: ufloat(999, .1)})`
will give:
Z₂(g) = -1.0(4) - 2.000(4) g + 999.00(10) g²
:param n: number of the "known" orders, `int`
:param d: dictionary with k=powers, v=`ufloat`s
:param name: name of the series variable, arbitrary character, default is `'g'`
:param analytic: boolean
"""
self.n = n
self.gSeries = d
self.name = name
for k, v in d.items():
if isinstance(v, AffineScalarFunc):
self.gSeries[k] = v
elif isinstance(v, (list, tuple)):
self.gSeries[k] = ufloat(v[0], v[1])
elif isinstance(v, str):
self.gSeries[k] = ufloat_fromstr(v)
elif isinstance(v, int):
self.gSeries[k] = v
self.analytic = True
else:
raise TypeError("Series constructor warning: Type(v)={}".format(type(v)))
if analytic is not None:
# XXX: if defined explicitly:
self.analytic = bool(analytic)
else:
# XXX: if all values are ints assume analytic
self.analytic = all(map(lambda x: type(x) == int, d.values()))
for i in range(0, n):
if i not in d.keys():
if self.analytic:
self.gSeries[i] = 0
else:
self.gSeries[i] = ufloat(0, 0)
def __lt__(self, other):
return len(self.gSeries) < len(other.gSeries)
def __add__(self, other):
tmp = dict(self.gSeries)
# print "From __add__:",self.analytic," + ",other.pprint() ## FIXME
if isinstance(other, Series):
stop = min(self.n, other.n)
if stop == 0:
stop = max(self.n, other.n)
for g in other.gSeries.keys():
if g <= stop:
try:
tmp[g] += other.gSeries[g]
except KeyError:
tmp[g] = other.gSeries[g]
elif isinstance(other, (int, float)):
tmp[0] += other
else:
print("{} {}".format(type(self), type(other)))
raise NotImplementedError
return Series(len(tmp), tmp, name=self.name, analytic=self.analytic)
def __radd__(self, other):
return self + other
def __sub__(self, other):
return self + (-1) * other
def __mul__(self, other):
tmp = {}
if isinstance(other, Series):
stop = min(self.n, other.n)
for i in self.gSeries.keys():
for j in other.gSeries.keys():
if (i + j) <= stop:
try:
tmp[i + j] += self.gSeries[i] * other.gSeries[j]
except KeyError:
tmp[i + j] = self.gSeries[i] * other.gSeries[j]
res = Series(max(self.n, other.n), tmp, name=self.name, analytic=self.analytic)
elif isinstance(other, (int, float, Variable, AffineScalarFunc, Add)):
for i in self.gSeries.keys():
tmp[i] = self.gSeries[i] * other
res = Series(self.n, tmp, name=self.name, analytic=self.analytic)
elif other == 0 or sum(map(lambda v: v == 0, self.gSeries.values())) == len(self.gSeries):
return 0
# elif isinstance(other, sympy.core.add.Add):
# print "\n\nself=",self
# print "other=",other
# return 0
else:
print("\nself = {}, type(self) = {}".format(self.gSeries, type(self)))
print("\nother = {}, type(other) = {}".format(other, type(other)))
raise NotImplementedError
return res
def __rmul__(self, other):
return self * other
def __neg__(self):
return self * (-1)
def __invert__(self):
""" Z.__invert__() = 1/Z
1/(1+x)=Sum_i (-1)^i x^i
"""
res = Series(self.n, {}, self.name, analytic=self.analytic)
if self.gSeries[0] == 1:
c = 1.
normed_series = self + Series(self.n, {0: -1}, self.name, analytic=self.analytic) # <-- it's -1!
elif self.gSeries[0] != 0:
c = 1. / self.gSeries[0]
normed_series = self / self.gSeries[0] + Series(self.n, {0: -1}, self.name,
analytic=self.analytic) # <-- it's -1!
else:
raise NotImplementedError("no constant term in series: %s" % self.gSeries)
# if self.gSeries[0] == 1:
# tmp = Series(self.gSeries[1:], n = self.n-1, name=self.name)
# for i in range(tmp.n):
for i in range(len(self.gSeries)):
res += (-1) ** i * normed_series ** i
return res * c
def __div__(self, other):
""" For now we assume all the powers of g as non-negative
"""
if isinstance(other, Series):
return self * other.__invert__()
elif isinstance(other, (int, float, Variable, AffineScalarFunc)):
return self * (1. / other)
else:
raise NotImplementedError("type: {}; {}".format(type(other), other.__repr__()))
def __rdiv__(self, other):
return other * self.__invert__()
def __pow__(self, power, modulo=None):
if isinstance(power, int) and power > 1:
return reduce(lambda x, y: x * y, [self] * power)
elif isinstance(power, int) and power == 1:
return self
elif isinstance(power, int) and power == 0:
if self.analytic:
return Series(self.n, {0: 1}, self.name, analytic=self.analytic)
else:
return Series(self.n, {0: ufloat(1, 0)}, self.name, analytic=self.analytic)
else:
print("power = {}, type(power) = {}".format(power, type(power)))
raise NotImplementedError
def diff(self):
"""
Differentiation of the polynomial in g
"""
res = {}
for i in range(len(self.gSeries) - 1):
res[i] = (i + 1) * self.gSeries[i + 1]
return Series(self.n, res, analytic=self.analytic)
def __repr__(self):
return self.pprint()
## FIXME
def _approx(self, other):
for k, v in self.gSeries.items():
if v != other.gSeries[k]:
return False
return True
def __str__(self):
"""
The result is truncated according to the error, indicating the accuracy of the least significant digit
"""
res = ''
for g, c in self.gSeries.items():
if c != 0 and g == 0 and isinstance(c, int):
res += " %d + " % (c)
elif c != 0 and g == 0:
res += " %s + " % (c.format('S'))
elif c != 0 and g <= self.n and isinstance(c, (Variable, AffineScalarFunc)):
if c.s < 1e-14:
res += "%s * %s**%s + " % (str(c.n), self.name, str(g))
else:
res += " %s * %s**%s + " % (c.format('S'), self.name, str(g))
elif c != 0 and g <= self.n and isinstance(c, (int, float)):
res += "%s * %s**%s + " % (str(c), self.name, str(g))
return res[:-3] or '0'
def coeffs(self):
"""
"""
return map(lambda x: float(x.format('S').split("(")[0]), self.gSeries.values())[:self.n + 1]
def pprint(self):
res = ""
for g, c in self.gSeries.items():
if c != 0 and g <= self.n and not self.analytic:
res += "(%s ± %s) * %s**%s + " % (str(c.n), str(c.s), self.name, str(g))
elif c != 0 and g <= self.n and self.analytic:
try:
this_term = c.format('S')
except AttributeError:
this_term = str(c)
res += "(%s) * %s**%s + " % (this_term, self.name, str(g))
return res[:-3] or '0'
def __len__(self):
return len(self.gSeries)
def subs(self, point):
res = Series(n=self.n, d={0: ufloat(0, 0)}, name=point.name, analytic=self.analytic)
for i, c in self.gSeries.items():
res += c * (point ** i)
return res
def save(self):
"""Save value to file"""
slov = ''
for k, v in self.gSeries.items():
slov += "%d: '%s', " % (k, v)
print("Series({}, {}, '{}')".format(self.n, slov, self.name))
if __name__ == "__main__":
Z1 = Series(1)
Z2 = Series(2, {0: ufloat(-4, 0.3), 1: ufloat(2, .002)})
print("Z1 = {}".format(Z1))
print("Z2 = {}".format(Z2))
print("Z2.diff() = {}".format(Z2.diff()))
print("Z2 - Z2 = {}".format(Z2-Z2))
print("1/Z2 = {}".format(1 / Z2))
print("Z1*Z2 = {}".format(Z1 * Z2))
print("Z2**2 = {}".format(Z2 ** 2))
| kirienko/unseries | unseries.py | Python | gpl-3.0 | 9,529 |
from __future__ import with_statement
from sympy import Symbol, exp, Integer, Float, sin, cos, log, Poly, Lambda, \
Function, I, S, sqrt, srepr, Rational, Tuple, Matrix, Interval
from sympy.abc import x, y
from sympy.core.sympify import sympify, _sympify, SympifyError, kernS
from sympy.core.decorators import _sympifyit
from sympy.utilities.pytest import XFAIL, raises
from sympy.utilities.decorator import conserve_mpmath_dps
from sympy.geometry import Point, Line
from sympy.functions.combinatorial.factorials import factorial, factorial2
from sympy.abc import _clash, _clash1, _clash2
from sympy.core.compatibility import HAS_GMPY
from sympy import mpmath
def test_439():
v = sympify("exp(x)")
assert v == exp(x)
assert type(v) == type(exp(x))
assert str(type(v)) == str(type(exp(x)))
def test_sympify1():
assert sympify("x") == Symbol("x")
assert sympify(" x") == Symbol("x")
assert sympify(" x ") == Symbol("x")
# 1778
n1 = Rational(1, 2)
assert sympify('--.5') == n1
assert sympify('-1/2') == -n1
assert sympify('-+--.5') == -n1
assert sympify('-.[3]') == Rational(-1, 3)
assert sympify('.[3]') == Rational(1, 3)
assert sympify('+.[3]') == Rational(1, 3)
assert sympify('+0.[3]*10**-2') == Rational(1, 300)
assert sympify('.[052631578947368421]') == Rational(1, 19)
assert sympify('.0[526315789473684210]') == Rational(1, 19)
assert sympify('.034[56]') == Rational(1711, 49500)
# options to make reals into rationals
assert sympify('1.22[345]', rational=True) == \
1 + Rational(22, 100) + Rational(345, 99900)
assert sympify('2/2.6', rational=True) == Rational(10, 13)
assert sympify('2.6/2', rational=True) == Rational(13, 10)
assert sympify('2.6e2/17', rational=True) == Rational(260, 17)
assert sympify('2.6e+2/17', rational=True) == Rational(260, 17)
assert sympify('2.6e-2/17', rational=True) == Rational(26, 17000)
assert sympify('2.1+3/4', rational=True) == \
Rational(21, 10) + Rational(3, 4)
assert sympify('2.234456', rational=True) == Rational(279307, 125000)
assert sympify('2.234456e23', rational=True) == 223445600000000000000000
assert sympify('2.234456e-23', rational=True) == \
Rational(279307, 12500000000000000000000000000)
assert sympify('-2.234456e-23', rational=True) == \
Rational(-279307, 12500000000000000000000000000)
assert sympify('12345678901/17', rational=True) == \
Rational(12345678901, 17)
assert sympify('1/.3 + x', rational=True) == Rational(10, 3) + x
# make sure longs in fractions work
assert sympify('222222222222/11111111111') == \
Rational(222222222222, 11111111111)
# ... even if they come from repetend notation
assert sympify('1/.2[123456789012]') == Rational(333333333333, 70781892967)
# ... or from high precision reals
assert sympify('.1234567890123456', rational=True) == \
Rational(19290123283179, 156250000000000)
def test_sympify_Fraction():
try:
import fractions
except ImportError:
pass
else:
value = sympify(fractions.Fraction(101, 127))
assert value == Rational(101, 127) and type(value) is Rational
def test_sympify_gmpy():
if HAS_GMPY:
if HAS_GMPY == 2:
import gmpy2 as gmpy
elif HAS_GMPY == 1:
import gmpy
value = sympify(gmpy.mpz(1000001))
assert value == Integer(1000001) and type(value) is Integer
value = sympify(gmpy.mpq(101, 127))
assert value == Rational(101, 127) and type(value) is Rational
@conserve_mpmath_dps
def test_sympify_mpmath():
value = sympify(mpmath.mpf(1.0))
assert value == Float(1.0) and type(value) is Float
mpmath.mp.dps = 12
assert sympify(
mpmath.pi).epsilon_eq(Float("3.14159265359"), Float("1e-12")) is True
assert sympify(
mpmath.pi).epsilon_eq(Float("3.14159265359"), Float("1e-13")) is False
mpmath.mp.dps = 6
assert sympify(
mpmath.pi).epsilon_eq(Float("3.14159"), Float("1e-5")) is True
assert sympify(
mpmath.pi).epsilon_eq(Float("3.14159"), Float("1e-6")) is False
assert sympify(mpmath.mpc(1.0 + 2.0j)) == Float(1.0) + Float(2.0)*I
def test_sympify2():
class A:
def _sympy_(self):
return Symbol("x")**3
a = A()
assert _sympify(a) == x**3
assert sympify(a) == x**3
assert a == x**3
def test_sympify3():
assert sympify("x**3") == x**3
assert sympify("x^3") == x**3
assert sympify("1/2") == Integer(1)/2
raises(SympifyError, lambda: _sympify('x**3'))
raises(SympifyError, lambda: _sympify('1/2'))
def test_sympify_keywords():
raises(SympifyError, lambda: sympify('if'))
raises(SympifyError, lambda: sympify('for'))
raises(SympifyError, lambda: sympify('while'))
raises(SympifyError, lambda: sympify('lambda'))
def test_sympify_float():
assert sympify("1e-64") != 0
assert sympify("1e-20000") != 0
def test_sympify_bool():
"""Test that sympify accepts boolean values
and that output leaves them unchanged"""
assert sympify(True) is True
assert sympify(False) is False
def test_sympyify_iterables():
ans = [Rational(3, 10), Rational(1, 5)]
assert sympify(['.3', '.2'], rational=True) == ans
assert sympify(set(['.3', '.2']), rational=True) == set(ans)
assert sympify(tuple(['.3', '.2']), rational=True) == Tuple(*ans)
assert sympify(dict(x=0, y=1)) == {x: 0, y: 1}
assert sympify(['1', '2', ['3', '4']]) == [S(1), S(2), [S(3), S(4)]]
def test_sympify4():
class A:
def _sympy_(self):
return Symbol("x")
a = A()
assert _sympify(a)**3 == x**3
assert sympify(a)**3 == x**3
assert a == x
def test_sympify_text():
assert sympify('some') == Symbol('some')
assert sympify('core') == Symbol('core')
assert sympify('True') is True
assert sympify('False') is False
assert sympify('Poly') == Poly
assert sympify('sin') == sin
def test_sympify_function():
assert sympify('factor(x**2-1, x)') == -(1 - x)*(x + 1)
assert sympify('sin(pi/2)*cos(pi)') == -Integer(1)
def test_sympify_poly():
p = Poly(x**2 + x + 1, x)
assert _sympify(p) is p
assert sympify(p) is p
def test_sympify_factorial():
assert sympify('x!') == factorial(x)
assert sympify('(x+1)!') == factorial(x + 1)
assert sympify('(1 + y*(x + 1))!') == factorial(1 + y*(x + 1))
assert sympify('(1 + y*(x + 1)!)^2') == (1 + y*factorial(x + 1))**2
assert sympify('y*x!') == y*factorial(x)
assert sympify('x!!') == factorial2(x)
assert sympify('(x+1)!!') == factorial2(x + 1)
assert sympify('(1 + y*(x + 1))!!') == factorial2(1 + y*(x + 1))
assert sympify('(1 + y*(x + 1)!!)^2') == (1 + y*factorial2(x + 1))**2
assert sympify('y*x!!') == y*factorial2(x)
assert sympify('factorial2(x)!') == factorial(factorial2(x))
raises(SympifyError, lambda: sympify("+!!"))
raises(SympifyError, lambda: sympify(")!!"))
raises(SympifyError, lambda: sympify("!"))
raises(SympifyError, lambda: sympify("(!)"))
raises(SympifyError, lambda: sympify("x!!!"))
def test_sage():
# how to effectivelly test for the _sage_() method without having SAGE
# installed?
assert hasattr(x, "_sage_")
assert hasattr(Integer(3), "_sage_")
assert hasattr(sin(x), "_sage_")
assert hasattr(cos(x), "_sage_")
assert hasattr(x**2, "_sage_")
assert hasattr(x + y, "_sage_")
assert hasattr(exp(x), "_sage_")
assert hasattr(log(x), "_sage_")
def test_bug496():
assert sympify("a_") == Symbol("a_")
assert sympify("_a") == Symbol("_a")
@XFAIL
def test_lambda():
x = Symbol('x')
assert sympify('lambda: 1') == Lambda((), 1)
assert sympify('lambda x: 2*x') == Lambda(x, 2*x)
assert sympify('lambda x, y: 2*x+y') == Lambda([x, y], 2*x + y)
def test_lambda_raises():
with raises(SympifyError):
_sympify('lambda: 1')
def test_sympify_raises():
raises(SympifyError, lambda: sympify("fx)"))
def test__sympify():
x = Symbol('x')
f = Function('f')
# positive _sympify
assert _sympify(x) is x
assert _sympify(f) is f
assert _sympify(1) == Integer(1)
assert _sympify(0.5) == Float("0.5")
assert _sympify(1 + 1j) == 1.0 + I*1.0
class A:
def _sympy_(self):
return Integer(5)
a = A()
assert _sympify(a) == Integer(5)
# negative _sympify
raises(SympifyError, lambda: _sympify('1'))
raises(SympifyError, lambda: _sympify([1, 2, 3]))
def test_sympifyit():
x = Symbol('x')
y = Symbol('y')
@_sympifyit('b', NotImplemented)
def add(a, b):
return a + b
assert add(x, 1) == x + 1
assert add(x, 0.5) == x + Float('0.5')
assert add(x, y) == x + y
assert add(x, '1') == NotImplemented
@_sympifyit('b')
def add_raises(a, b):
return a + b
assert add_raises(x, 1) == x + 1
assert add_raises(x, 0.5) == x + Float('0.5')
assert add_raises(x, y) == x + y
raises(SympifyError, lambda: add_raises(x, '1'))
def test_int_float():
class F1_1(object):
def __float__(self):
return 1.1
class F1_1b(object):
"""
This class is still a float, even though it also implements __int__().
"""
def __float__(self):
return 1.1
def __int__(self):
return 1
class F1_1c(object):
"""
This class is still a float, because it implements _sympy_()
"""
def __float__(self):
return 1.1
def __int__(self):
return 1
def _sympy_(self):
return Float(1.1)
class I5(object):
def __int__(self):
return 5
class I5b(object):
"""
This class implements both __int__() and __float__(), so it will be
treated as Float in SymPy. One could change this behavior, by using
float(a) == int(a), but deciding that integer-valued floats represent
exact numbers is arbitrary and often not correct, so we do not do it.
If, in the future, we decide to do it anyway, the tests for I5b need to
be changed.
"""
def __float__(self):
return 5.0
def __int__(self):
return 5
class I5c(object):
"""
This class implements both __int__() and __float__(), but also
a _sympy_() method, so it will be Integer.
"""
def __float__(self):
return 5.0
def __int__(self):
return 5
def _sympy_(self):
return Integer(5)
i5 = I5()
i5b = I5b()
i5c = I5c()
f1_1 = F1_1()
f1_1b = F1_1b()
f1_1c = F1_1c()
assert sympify(i5) == 5
assert isinstance(sympify(i5), Integer)
assert sympify(i5b) == 5
assert isinstance(sympify(i5b), Float)
assert sympify(i5c) == 5
assert isinstance(sympify(i5c), Integer)
assert abs(sympify(f1_1) - 1.1) < 1e-5
assert abs(sympify(f1_1b) - 1.1) < 1e-5
assert abs(sympify(f1_1c) - 1.1) < 1e-5
assert _sympify(i5) == 5
assert isinstance(_sympify(i5), Integer)
assert _sympify(i5b) == 5
assert isinstance(_sympify(i5b), Float)
assert _sympify(i5c) == 5
assert isinstance(_sympify(i5c), Integer)
assert abs(_sympify(f1_1) - 1.1) < 1e-5
assert abs(_sympify(f1_1b) - 1.1) < 1e-5
assert abs(_sympify(f1_1c) - 1.1) < 1e-5
def test_issue1034():
a = sympify('Integer(4)')
assert a == Integer(4)
assert a.is_Integer
def test_issue883():
a = [3, 2.0]
assert sympify(a) == [Integer(3), Float(2.0)]
assert sympify(tuple(a)) == Tuple(Integer(3), Float(2.0))
assert sympify(set(a)) == set([Integer(3), Float(2.0)])
def test_S_sympify():
assert S(1)/2 == sympify(1)/2
assert (-2)**(S(1)/2) == sqrt(2)*I
def test_issue1689():
assert srepr(S(1.0 + 0J)) == srepr(S(1.0)) == srepr(Float(1.0))
def test_issue1699_None():
assert S(None) is None
def test_issue3218():
assert sympify("x+\ny") == x + y
def test_issue1889_builtins():
C = Symbol('C')
vars = {}
vars['C'] = C
exp1 = sympify('C')
assert exp1 == C # Make sure it did not get mixed up with sympy.C
exp2 = sympify('C', vars)
assert exp2 == C # Make sure it did not get mixed up with sympy.C
def test_geometry():
p = sympify(Point(0, 1))
assert p == Point(0, 1) and type(p) == Point
L = sympify(Line(p, (1, 0)))
assert L == Line((0, 1), (1, 0)) and type(L) == Line
def test_kernS():
s = '-1 - 2*(-(-x + 1/x)/(x*(x - 1/x)**2) - 1/(x*(x - 1/x)))'
# when 1497 is fixed, this no longer should pass: the expression
# should be unchanged
assert -1 - 2*(-(-x + 1/x)/(x*(x - 1/x)**2) - 1/(x*(x - 1/x))) == -1
# sympification should not allow the constant to enter a Mul
# or else the structure can change dramatically
ss = kernS(s)
assert ss != -1 and ss.simplify() == -1
s = '-1 - 2*(-(-x + 1/x)/(x*(x - 1/x)**2) - 1/(x*(x - 1/x)))'.replace(
'x', '_kern')
ss = kernS(s)
assert ss != -1 and ss.simplify() == -1
# issue 3588
assert kernS('Interval(-1,-2 - 4*(-3))') == Interval(-1, 10)
assert kernS('_kern') == Symbol('_kern')
assert kernS('E**-(x)') == exp(-x)
e = 2*(x + y)*y
assert kernS(['2*(x + y)*y', ('2*(x + y)*y',)]) == [e, (e,)]
assert kernS('-(2*sin(x)**2 + 2*sin(x)*cos(x))*y/2') == \
-y*(2*sin(x)**2 + 2*sin(x)*cos(x))/2
def test_issue_3441_3453():
assert S('[[1/3,2], (2/5,)]') == [[Rational(1, 3), 2], (Rational(2, 5),)]
assert S('[[2/6,2], (2/4,)]') == [[Rational(1, 3), 2], (Rational(1, 2),)]
assert S('[[[2*(1)]]]') == [[[2]]]
assert S('Matrix([2*(1)])') == Matrix([2])
def test_issue_2497():
assert str(S("Q & C", locals=_clash1)) == 'And(C, Q)'
assert str(S('pi(x)', locals=_clash2)) == 'pi(x)'
assert str(S('pi(C, Q)', locals=_clash)) == 'pi(C, Q)'
locals = {}
exec "from sympy.abc import Q, C" in locals
assert str(S('C&Q', locals)) == 'And(C, Q)'
| lidavidm/mathics-heroku | venv/lib/python2.7/site-packages/sympy/core/tests/test_sympify.py | Python | gpl-3.0 | 14,157 |
import collections
import os
import unittest
from ansible.modules.identity.keycloak.keycloak_group import *
class KeycloakGroupTestCase(unittest.TestCase):
def test_create_group(self):
toCreate = {
"username":"admin",
"password":"admin",
"realm":"master",
"url":"http://localhost:18081",
"name":"test1",
"attributes": {
"attr1":["value1"],
"attr2":["value2"]
},
"realmRoles": [
"uma_athorization"
],
"clientRoles": {
"master-realm": [
"manage-users"
]
},
"state":"present",
"force":False
}
results = group(toCreate)
print (str(results))
self.assertTrue(results['changed'])
self.assertEquals(results["ansible_facts"]["group"]["name"], toCreate["name"], "name: " + results["ansible_facts"]["group"]["name"] + " : " + toCreate["name"])
self.assertDictEqual(results["ansible_facts"]["group"]["attributes"], toCreate["attributes"], "attributes: " + str(results["ansible_facts"]["group"]["attributes"]) + " : " + str(toCreate["attributes"]))
#self.assertDictEqual(results["ansible_facts"]["group"]["clientRoles"], toCreate["clientRoles"], "clientRoles: " + str(results["ansible_facts"]["group"]["clientRoles"]) + " : " + str(toCreate["clientRoles"]))
#self.assertListEqual(results["ansible_facts"]["group"]["realmRoles"], toCreate["realmRoles"], "realmRoles: " + str(results["ansible_facts"]["group"]["realmRoles"]) + " : " + str(toCreate["realmRoles"]))
def test_group_not_changed(self):
toDoNotChange = {
"username":"admin",
"password":"admin",
"realm":"master",
"url":"http://localhost:18081",
"name":"test2",
"attributes": {
"attr1":["value1"],
"attr2":["value2"]
},
"realmRoles": [
"uma_athorization"
],
"clientRoles": {
"master-realm": [
"manage-users"
]
},
"state":"present",
"force":False
}
group(toDoNotChange)
results = group(toDoNotChange)
self.assertFalse(results['changed'])
self.assertEquals(results["ansible_facts"]["group"]["name"], toDoNotChange["name"], "name: " + results["ansible_facts"]["group"]["name"] + " : " + toDoNotChange["name"])
self.assertDictEqual(results["ansible_facts"]["group"]["attributes"], toDoNotChange["attributes"], "attributes: " + str(results["ansible_facts"]["group"]["attributes"]) + " : " + str(toDoNotChange["attributes"]))
#self.assertDictEqual(results["ansible_facts"]["group"]["clientRoles"], toDoNotChange["clientRoles"], "clientRoles: " + str(results["ansible_facts"]["group"]["clientRoles"]) + " : " + str(toDoNotChange["clientRoles"]))
#self.assertListEqual(results["ansible_facts"]["group"]["realmRoles"], toDoNotChange["realmRoles"], "realmRoles: " + str(results["ansible_facts"]["group"]["realmRoles"]) + " : " + str(toDoNotChange["realmRoles"]))
def test_group_modify_force(self):
toDoNotChange = {
"username":"admin",
"password":"admin",
"realm":"master",
"url":"http://localhost:18081",
"name":"test3",
"attributes": {
"attr1":["value1"],
"attr2":["value2"]
},
"realmRoles": [
"uma_athorization"
],
"clientRoles": {
"master-realm": [
"manage-users"
]
},
"state":"present",
"force":False
}
group(toDoNotChange)
toDoNotChange["force"] = True
results = group(toDoNotChange)
self.assertTrue(results['changed'])
self.assertEquals(results["ansible_facts"]["group"]["name"], toDoNotChange["name"], "name: " + results["ansible_facts"]["group"]["name"] + " : " + toDoNotChange["name"])
self.assertDictEqual(results["ansible_facts"]["group"]["attributes"], toDoNotChange["attributes"], "attributes: " + str(results["ansible_facts"]["group"]["attributes"]) + " : " + str(toDoNotChange["attributes"]))
#self.assertDictEqual(results["ansible_facts"]["group"]["clientRoles"], toDoNotChange["clientRoles"], "clientRoles: " + str(results["ansible_facts"]["group"]["clientRoles"]) + " : " + str(toDoNotChange["clientRoles"]))
#self.assertListEqual(results["ansible_facts"]["group"]["realmRoles"], toDoNotChange["realmRoles"], "realmRoles: " + str(results["ansible_facts"]["group"]["realmRoles"]) + " : " + str(toDoNotChange["realmRoles"]))
def test_modify_group(self):
toChange = {
"username":"admin",
"password":"admin",
"realm":"master",
"url":"http://localhost:18081",
"name":"test4",
"attributes": {
"attr1":["value1"],
"attr2":["value2"]
},
"realmRoles": [
"uma_athorization"
],
"clientRoles": {
"master-realm": [
"manage-users"
]
},
"state":"present",
"force":False
}
group(toChange)
toChange["attributes"] = {
"attr3":["value3"]
}
results = group(toChange)
self.assertTrue(results['changed'])
self.assertEquals(results["ansible_facts"]["group"]["name"], toChange["name"], "name: " + results["ansible_facts"]["group"]["name"] + " : " + toChange["name"])
self.assertDictEqual(results["ansible_facts"]["group"]["attributes"], toChange["attributes"], "attributes: " + str(results["ansible_facts"]["group"]["attributes"]) + " : " + str(toChange["attributes"]))
#self.assertDictEqual(results["ansible_facts"]["group"]["clientRoles"], toChange["clientRoles"], "clientRoles: " + str(results["ansible_facts"]["group"]["clientRoles"]) + " : " + str(toChange["clientRoles"]))
#self.assertListEqual(results["ansible_facts"]["group"]["realmRoles"], toChange["realmRoles"], "realmRoles: " + str(results["ansible_facts"]["group"]["realmRoles"]) + " : " + str(toChange["realmRoles"]))
def test_delete_group(self):
toDelete = {
"username":"admin",
"password":"admin",
"realm":"master",
"url":"http://localhost:18081",
"name":"test1",
"attributes": {
"attr1":["value1"],
"attr2":["value2"]
},
"state":"present",
"force":False
}
group(toDelete)
toDelete["state"] = "absent"
results = group(toDelete)
self.assertTrue(results['changed'])
self.assertEqual(results['stdout'], 'deleted', 'group has been deleted')
| Inspq/ansible | test/units/modules/identity/keycloak/test_keycloak_group.py | Python | gpl-3.0 | 7,158 |
# -*- coding: utf-8 -*-
#
# GromacsWrapper documentation build configuration file, created by
# sphinx-quickstart on Tue Jun 23 19:38:56 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
import sys, os
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.append(os.path.abspath('some/directory'))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = u'GromacsWrapper'
copyright = u'2009-2018, The Authors of GromacsWrapper (see AUTHORS)'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# Dynamically calculate the version (uses versioneer)
packageversion = __import__('gromacs').__version__
# The short X.Y version.
version = '.'.join(packageversion.split('.')[:2])
# The full version, including alpha/beta/rc tags.
release = packageversion
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directories, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinxdoc'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "logos/GromacsWrapper_logo_200x200.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "logos/GromacsWrapper_logo_32x32.ico"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['.static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'GromacsWrapperdoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'GromacsWrapper.tex', u'GromacsWrapper Documentation',
u'Oliver Beckstein', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# Options for ext.intersphinx
# ---------------------------
# intersphinx: reference standard lib and RecSQL
# http://sphinx.pocoo.org/latest/ext/intersphinx.html
intersphinx_mapping = {'https://docs.python.org/': None,
'https://docs.scipy.org/doc/numpy/': None,
'https://docs.scipy.org/doc/scipy/reference/': None,
}
# Options for ext.autodoc
# -----------------------
# see http://sphinx.pocoo.org/ext/autodoc.html
# This value selects what content will be inserted into the main body of an autoclass directive.
# "class", "init", "both"
autoclass_content = "both"
| PicoCentauri/GromacsWrapper | doc/sphinx/source/conf.py | Python | gpl-3.0 | 6,547 |
import time
from menu.ncolor import *
from menu.showMainMenu import *
from command.shell import *
from write.dnsmasq_write import *
class Sub_Menu():
dns_message = """ you can add a redirect entry in this menu or edit the dnsmasq configuration
file located in""" + color.BLEU + """ '/etc/redirect/dnsmasq.host'\n """ + color.ENDC
#the user choose a new name. the input of the user will be put in the user
#object
def nameMenu(ssid):
while True:
print ("\nthe current name of the access point is " + color.VERT + "'" + ssid + "'" + color.ENDC)
print("")
print("%49s" % ("current options" + color.ENDC))
print("%58s" % (color.DARKCYAN + "-----------------------" + color.ENDC))
print("%48s" % ("(1) choose a new name."))
print("%41s" % ("(5) main menu.\n"))
while True:
NameChoice = input(color.BLEU + "name > " + color.ENDC)
if NameChoice == "1":
print(color.DARKYELLOW + "enter the new name of the ap..." + color.ENDC)
ssid = input(color.BLEU + "name > " + color.DARKROUGE + "new name > " + color.ENDC)
print (color.VERT + "[+]" + color.ENDC + " changing the name for " + color.VERT + "'" + ssid + "'" + color.ENDC)
time.sleep(1)
return ssid
elif NameChoice == "5":
print(color.VERT + "[+]" + color.ENDC + " going back to main menu.")
time.sleep(0.3)
return ssid
else:
print(color.ROUGE + "[*]" + color.ENDC + " please enter a valid option!")
#taking the crypt variable object to check if an encryption have been chosen. If not
#the user is ask to choose an encryption type. The PassHandle function will be called
#to verify if the password respect the security exigence
def PassordMenu(crypt, password):
while True:
if crypt != "N/A":
print("")
print("%48s" % ("current options" + color.ENDC))
print("%56s" % (color.DARKCYAN + "-----------------------" + color.ENDC))
print("%48s" % ("(1) choose new password."))
print("%39s" % ("(5) main menu.\n"))
while True:
PasswordChoice = input(color.BLEU + "password > " + color.ENDC)
if PasswordChoice == "1":
print(color.DARKYELLOW + "enter the new password for the ap..." + color.ENDC)
error = False
while error == False:
password = input(color.BLEU + "password > " + color.DARKROUGE + "new password > " + color.ENDC)
error = Sub_Menu.PassHandle(crypt, password)
print (color.VERT + "[+]" + color.ENDC + " changing the password to " + color.VERT + "'" + password + "'" + color.ENDC)
time.sleep(1)
return password
elif PasswordChoice == "5":
print(color.VERT + "[+]" + color.ENDC + " going back to main menu.")
time.sleep(0.3)
return password
else:
print(color.ROUGE + "[*]" + color.ENDC + " please enter a valid option!")
else:
print(color.ROUGE + "[*]" + color.ENDC + " please select a security type if you want to choose a password.")
time.sleep(1.5)
return password
#take the security type and password in parameter. If a new password is chosen the old
#password gonna be reset to zero.
def securityMenu(crypt, password):
while True:
security_text = color.BLEU + color.BOLD + """
-WPA2 """ + color.ENDC + """is the most advanced wifi security protocol curently used by most
router by default. The passphrase must have a minimum of 8 character.""" + color.BLEU + color.BOLD + """\n
-WPA""" + color.ENDC + """ wpa is older and less secure than wpa2. it is using an older
encryption (TKIP). Like wpa2 you need to put at least 8 charactere. """ + color.BLEU + color.BOLD + """\n
-WEP""" + color.ENDC + """ wep is deprecated and can be very easely cracked. your wep key must
be at least 10 charactere and only contain hexadecimal character."""
print(security_text)
print ("\n - the current security of the access point is " + color.VERT + "'" + crypt + "'" + color.ENDC)
print("")
print("%53s" % ("current options" + color.ENDC))
print("%61s" % (color.DARKCYAN + "-----------------------" + color.ENDC))
print("%38s" % ("(1) WPA2."))
print("%44s" % ("(2) WPA (TKIP)."))
print("%47s" % ("(3) WEP (64 bits)."))
print("%45s" % ("(4) no security."))
print("%44s" % ("(5) main menu.\n"))
while True:
NameChoice = input(color.BLEU + "security > " + color.ENDC)
pwd = ""
if NameChoice == "1":
Sec = "WPA2"
crypt, password = Sub_Menu.AskPassword(Sec, pwd)
return crypt, password
elif NameChoice == "2":
Sec = "WPA"
crypt, password = Sub_Menu.AskPassword(Sec, pwd)
return crypt, password
elif NameChoice == "3":
Sec = "WEP"
crypt, password = Sub_Menu.AskPassword(Sec, pwd)
return crypt, password
elif NameChoice == "4":
print (color.VERT + "[+]" + color.ENDC + " deleting the " + color.VERT + crypt + color.ENDC + " security.")
time.sleep(1)
crypt = "N/A"
password = "N/A"
return crypt, password
elif NameChoice == "5":
print(color.VERT + "[+]" + color.ENDC + " going back to main menu.")
time.sleep(0.3)
return crypt, password
else:
print(color.ROUGE + "[*]" + color.ENDC + " please enter a valid option!")
#giving the option to decide if the dhcp server will be on or off. It will also
#give the option to change the dhcp pool adresse.
def dhcpMenu(dhcp):
while True:
#putting some information for the dhcp in variable
couleur = color.Color_check(dhcp)
dhcpPool = "10.0.0.10-250"
dhcpLease = "12h"
# show the appropriate option in the menu
if dhcp == "N/A":
dhcpOPTION = "(1) set dhcp server to" + color.VERT + " 'on'" + color.ENDC
else:
dhcpOPTION = "%47s" % " (1) set dhcp server to" + color.ROUGE + " 'off'" + color.ENDC
print ("""\n the dhcp server should always be on. If the dhcp is set to 'N/A' the client
will need to have is adresse, gateway and dns set manualy.\n""")
print (color.BOLD + " dhcp status: " + color.ENDC + couleur + "'" + dhcp + "'" + color.ENDC)
print (color.BOLD + " dhcp pool: " + color.ENDC + color.BLEU + dhcpPool + color.ENDC)
print (color.BOLD + " dhcp lease: " + color.ENDC + color.BLEU + dhcpLease + color.ENDC)
print("")
print("%49s" % ("current options" + color.ENDC))
print("%57s" % (color.DARKCYAN + "-----------------------" + color.ENDC))
print("%61s" % ( dhcpOPTION))
print("%40s" % ("(5) main menu.\n"))
while True:
DhcpChoice = input(color.BLEU + "dhcp > " + color.ENDC)
#check the last dhcp value and take the decision to put it to on or off
if DhcpChoice == "1":
if dhcp == "N/A":
dhcp = "ON"
else:
dhcp = "N/A"
print (color.VERT + "[+]" + color.ENDC + " changing dhcp status to " + color.VERT + "'" + dhcp + "'" + color.ENDC)
time.sleep(1)
return dhcp
#if this option is chosen to go back to main menu
elif DhcpChoice == "5":
print(color.VERT + "[+]" + color.ENDC + " going back to main menu.")
time.sleep(0.3)
return dhcp
else:
print(color.ROUGE + "[*]" + color.ENDC + " please enter a valid option!")
#show the menu for chosing dns option. The dns object can be change to on or N/A.
# I am planing to give the user the choice to put their dns redirect entry directly
# in the program and in the config file.
def dnsMenu(dns):
while True:
couleur = color.Color_check(dns)
# show the appropriate option in the menu
if dns == "N/A":
dnsOPTION = "(1) set dns server to" + color.VERT + " 'on' " + color.ENDC
else:
dnsOPTION = "(1) set dns server to" + color.ROUGE + " 'off'" + color.ENDC
print ("""\n if dns fowarding is set to 'on' dnsmasq will start the dns server and
start fowarding all the request to the google dns server. When the dns
server is active its possible to redirect the client to the ip adresse
of your choice """)
print (color.BOLD + "\n dns status:" + color.ENDC + couleur + " '" + dns + "'" + color.ENDC)
print("%51s" % ("current options" + color.ENDC))
print("%59s" % (color.DARKCYAN + "-----------------------" + color.ENDC))
print("%63s" % (dnsOPTION))
print("%47s" % ("(2) redirect client."))
print("%46s" % ("(3) cleaning entry."))
print("%42s" % ("(5) main menu.\n"))
while True:
DnsChoice = input(color.BLEU + "dns > " + color.ENDC)
if DnsChoice == "1":
if dns == "N/A":
dns = "ON"
else:
dns = "N/A"
print (color.VERT + "[+]" + color.ENDC + " changing dns status to " + color.VERT + "'" + dns + "'" + color.ENDC)
time.sleep(1)
return dns
if DnsChoice == "2":
while True:
# read the dnsmasq.host file and print the message.
print(Sub_Menu.dns_message)
entry_number = read_dnsmasq_host()
# give the user de choice to do a new entry.
print(color.DARKYELLOW + "\ndo you want to write an entry in the file? (y/n)" + color.ENDC)
choice = input(color.BLEU + "dns > " + color.ENDC)
# if choice is yes, we ask the user to enter the entry withthe spicified format.
if choice == "y":
error = False
print (color.DARKCYAN + "enter the new entry with the adresse and the domain separated only by a single")
print("space. Example: (192.168.1.60 www.google.com)")
# if an error is detected in the checkup of the pattern, we stay in the loop.
while not error:
entry = input(color.BLEU + "dns > " + color.DARKROUGE + "entry > " + color.ENDC)
error = Entry_handeling(entry)
else:
break
break
if DnsChoice == "3":
# handle the delete of the entry.
delete_handeling()
break
if DnsChoice == "5":
print(color.VERT + "[+]" + color.ENDC + " going back to main menu.")
time.sleep(0.3)
return dns
else:
print(color.ROUGE + "[*]" + color.ENDC + " please enter a valid option!")
#this function is allowing the user to chose the in and out interface. When the
#interface will be chosen it gonna allow the user to see the status. A refresh
#option will be included
def interfaceMenu(inside, outside):
print("""\n Quick ap will use the interface that you have selected to apply the ip tables
rules on them and make the hotspot working. The inside interface is the wifi
card that will be use whith hostapd for creating the hotspot. The outside
interface will be use to share the connection with the victims. You need to
make sure that the outside interface have an addresse if you want to share
the Internet. \n""")
while True:
#put genral status of the interface in the variables and return false if interface is down.
addresse_in, addresse_out, check_in, check_out = command.nic_selectedStatus(inside, outside)
#color status of the interface are put into varirables.
color_in = color.color_checkINT(inside, check_in)
color_out = color.color_checkINT(outside, check_out)
#show the status of the selected interface with the help of the method nic_selected
print("%50s" % (" interface status" + color.ENDC))
print("%59s" % (color.DARKCYAN + "=======================" + color.ENDC))
print("\t\t\t [" + color_in + inside + color.ENDC + "]" + " <-> " + addresse_in)
print("\t\t\t [" + color_out + outside + color.ENDC + "]" + " <-> " + addresse_out + "\n")
print("%50s" % ("current options" + color.ENDC))
print("%59s" % (color.DARKCYAN + "-----------------------" + color.ENDC))
print("%48s" % ("(1) choose interface."))
print("%39s" % ("(2) refresh."))
print("%41s" % ("(5) main menu."))
#first menu choice.
interfaceChoiceFirst = input(color.BLEU + "\nnetwork > " + color.ENDC)
if interfaceChoiceFirst == "1":
Menu = True
check_choice = False
while Menu == True:
print("%52s" % (" available interface" + color.ENDC))
print("%59s" % (color.VERT + "=======================" + color.ENDC))
#looping to all interface disponible and show their status and if they are selected
interface = command.nic_status(inside, outside)
print("")
print("%51s" % ("current options" + color.ENDC))
print("%59s" % (color.VERT + "-----------------------" + color.ENDC))
print("%49s" % ("(1) choose inside nic."))
print("%50s" % ("(2) choose outside nic."))
print("%48s" % ("(3) deselect all nic."))
print("%39s" % ("(4) refresh."))
print("%41s" % ("(5) main menu."))
interfaceChoice = input(color.BLEU + "\nnetwork > " + color.ENDC + color.DARKROUGE + "nic > " + color.ENDC)
if interfaceChoice == "1":
print(color.DARKYELLOW + "enter the name of the inside interface that you want to select..." + color.ENDC)
insideChoice = input(color.BLEU + "network > " + color.ENDC + color.DARKROUGE + "inside > " + color.ENDC)
#checking in the list of interface to see if the interface is in the choice
interface_check = command.choice_check(insideChoice, interface)
#make sure that the interface selected is not the same has the outside interface.
duplicate = command.nic_duplicate("inside", insideChoice, "", inside, outside)
# if the duplicate is detected the statement continu make the program skip the conditions
# and go back to the start of the loop
if duplicate == True:
continue
#if interface_check return false, return the user to main menu.
elif interface_check == False:
print(color.ROUGE + "[*]" + color.ENDC + " please enter a valid interface. Press 'refresh' to scan interface again.")
time.sleep(1.5)
print("\n")
#run sevral check to see if the choice is wireless compactible etc... If the choice is
#not accepted the last_choice is returned by wifi check
else:
last_choice_in = inside
inside = command.wifi_check(insideChoice, last_choice_in)
elif interfaceChoice == "2":
print(color.DARKYELLOW + "enter the name of the outside interface that you want to select..." + color.ENDC)
outsideChoice = input(color.BLEU + "network > " + color.ENDC + color.DARKROUGE + "outside > " + color.ENDC)
interface_check = command.choice_check(outsideChoice, interface)
duplicate = command.nic_duplicate("outside", "", outsideChoice, inside, outside)
if duplicate == True:
continue
elif interface_check == False:
print(color.ROUGE + "[*]" + color.ENDC + " please enter a valid interface. Press 'refresh' to scan interface again.")
time.sleep(1.5)
print("\n")
else:
outside = command.out_check(outsideChoice)
elif interfaceChoice == "3":
inside = "N/A"
outside = "N/A"
print(color.VERT + "[+]" + color.ENDC + " unselecting all network interface!")
time.sleep(1)
print("\n")
elif interfaceChoice == "4":
print (color.VERT + "[+] " + color.ENDC + "refreshing!")
time.sleep(0.3)
elif interfaceChoice == "5":
print (color.VERT + "[+] " + color.ENDC + "main menu.")
time.sleep(0.3)
return inside, outside
else:
print (color.ROUGE + "[-] " + color.ENDC + "please enter a valid option!\n")
time.sleep(0.3)
elif interfaceChoiceFirst == "2":
print (color.VERT + "[+] " + color.ENDC + "refreshing!")
time.sleep(0.3)
elif interfaceChoiceFirst == "5":
print (color.VERT + "[+] " + color.ENDC + "main menu")
time.sleep(0.3)
return inside, outside
else:
print(color.ROUGE + "[-] " + color.ENDC + "please enter a valid choice!\n")
time.sleep(0.3)
return inside, outside
#this function take in parameter the security type and the actual password. If the
#wpa key or the wep key is incorrect it gonna show an error message and send true or
#false depending on the situation
def PassHandle(handleSEC, handlePASS):
passLenght = len(handlePASS)
allowed = set("123456789" + "abcdef")
if handleSEC == "WPA2" or handleSEC == "WPA":
if passLenght < 8:
print (color.ROUGE + "[*]" + color.ENDC + " the wpa password must be at least 8 charactere!")
return False
else:
return True
elif handleSEC == "WEP":
if set(handlePASS) <= allowed and passLenght == 10:
return True
else:
print (color.ROUGE + "[*]" + color.ENDC + " the wep password must have 10 charactere and use HEX only")
return False
#this function take the secutiry type and password in parameter and it check with a loop
#if the password is following the rule.
def AskPassword(Sec, pwd):
error = False
print(color.DARKYELLOW + "enter the new " + Sec + " password for the ap..." + color.ENDC)
while error == False:
pwd = input(color.BLEU + "security > " + color.DARKROUGE + Sec + " > " + color.ENDC)
error = Sub_Menu.PassHandle(Sec, pwd)
print (color.VERT + "[+]" + color.ENDC + " changing the security to " + color.VERT + "'" + Sec + "'" + color.ENDC)
print (color.VERT + "[+]" + color.ENDC + " changing the password to " + color.VERT + "'" + pwd + "'" + color.ENDC)
time.sleep(1)
return Sec, pwd
| blackice5514/QuickAp | menu/showSubMenu.py | Python | gpl-3.0 | 20,613 |
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import os
import re
import threading
import datetime
import traceback
import sickbeard
from common import SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, Quality, SEASON_RESULT, MULTI_EP_RESULT
from sickbeard import logger, db, show_name_helpers, exceptions, helpers
from sickbeard import sab
from sickbeard import nzbget
from sickbeard import clients
from sickbeard import history
from sickbeard import notifiers
from sickbeard import nzbSplitter
from sickbeard import ui
from sickbeard import encodingKludge as ek
from sickbeard import failed_history
from sickbeard.exceptions import ex
from sickbeard.providers.generic import GenericProvider
from sickbeard.blackandwhitelist import BlackAndWhiteList
from sickbeard import common
def _downloadResult(result):
"""
Downloads a result to the appropriate black hole folder.
Returns a bool representing success.
result: SearchResult instance to download.
"""
resProvider = result.provider
if resProvider == None:
logger.log(u"Invalid provider name - this is a coding error, report it please", logger.ERROR)
return False
# nzbs with an URL can just be downloaded from the provider
if result.resultType == "nzb":
newResult = resProvider.downloadResult(result)
# if it's an nzb data result
elif result.resultType == "nzbdata":
# get the final file path to the nzb
fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, result.name + ".nzb")
logger.log(u"Saving NZB to " + fileName)
newResult = True
# save the data to disk
try:
with ek.ek(open, fileName, 'w') as fileOut:
fileOut.write(result.extraInfo[0])
helpers.chmodAsParent(fileName)
except EnvironmentError, e:
logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR)
newResult = False
elif resProvider.providerType == "torrent":
newResult = resProvider.downloadResult(result)
else:
logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR)
newResult = False
return newResult
def snatchEpisode(result, endStatus=SNATCHED):
"""
Contains the internal logic necessary to actually "snatch" a result that
has been found.
Returns a bool representing success.
result: SearchResult instance to be snatched.
endStatus: the episode status that should be used for the episode object once it's snatched.
"""
if result is None:
return False
result.priority = 0 # -1 = low, 0 = normal, 1 = high
if sickbeard.ALLOW_HIGH_PRIORITY:
# if it aired recently make it high priority
for curEp in result.episodes:
if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7):
result.priority = 1
if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None:
endStatus = SNATCHED_PROPER
# NZBs can be sent straight to SAB or saved to disk
if result.resultType in ("nzb", "nzbdata"):
if sickbeard.NZB_METHOD == "blackhole":
dlResult = _downloadResult(result)
elif sickbeard.NZB_METHOD == "sabnzbd":
dlResult = sab.sendNZB(result)
elif sickbeard.NZB_METHOD == "nzbget":
is_proper = True if endStatus == SNATCHED_PROPER else False
dlResult = nzbget.sendNZB(result, is_proper)
else:
logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR)
dlResult = False
# TORRENTs can be sent to clients or saved to disk
elif result.resultType == "torrent":
# torrents are saved to disk when blackhole mode
if sickbeard.TORRENT_METHOD == "blackhole":
dlResult = _downloadResult(result)
else:
# make sure we have the torrent file content
if not result.content:
if not result.url.startswith('magnet'):
result.content = result.provider.getURL(result.url)
if not result.content:
logger.log(
u"Torrent content failed to download from " + result.url, logger.ERROR
)
# Snatches torrent with client
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
dlResult = client.sendTORRENT(result)
else:
logger.log(u"Unknown result type, unable to download it", logger.ERROR)
dlResult = False
if not dlResult:
return False
if sickbeard.USE_FAILED_DOWNLOADS:
failed_history.logSnatch(result)
ui.notifications.message('Episode snatched', result.name)
history.logSnatch(result)
# don't notify when we re-download an episode
sql_l = []
for curEpObj in result.episodes:
with curEpObj.lock:
if isFirstBestMatch(result):
curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
else:
curEpObj.status = Quality.compositeStatus(endStatus, result.quality)
sql_l.append(curEpObj.get_sql())
if curEpObj.status not in Quality.DOWNLOADED:
notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
if len(sql_l) > 0:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
return True
def filter_release_name(name, filter_words):
"""
Filters out results based on filter_words
name: name to check
filter_words : Words to filter on, separated by comma
Returns: False if the release name is OK, True if it contains one of the filter_words
"""
if filter_words:
filters = [re.compile('.*%s.*' % filter.strip(), re.I) for filter in filter_words.split(',')]
for regfilter in filters:
if regfilter.search(name):
logger.log(u"" + name + " contains pattern: " + regfilter.pattern, logger.DEBUG)
return True
return False
def pickBestResult(results, show, quality_list=None):
logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG)
# build the black And white list
bwl = None
if show:
if show.is_anime:
bwl = BlackAndWhiteList(show.indexerid)
else:
logger.log("Could not create black and white list no show was given", logger.DEBUG)
# find the best result for the current episode
bestResult = None
for cur_result in results:
logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality])
if bwl:
if not bwl.is_valid(cur_result):
logger.log(cur_result.name+" does not match the blacklist or the whitelist, rejecting it. Result: " + bwl.get_last_result_msg(), logger.MESSAGE)
continue
if quality_list and cur_result.quality not in quality_list:
logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG)
continue
if show.rls_ignore_words and filter_release_name(cur_result.name, show.rls_ignore_words):
logger.log(u"Ignoring " + cur_result.name + " based on ignored words filter: " + show.rls_ignore_words,
logger.MESSAGE)
continue
if show.rls_require_words and not filter_release_name(cur_result.name, show.rls_require_words):
logger.log(u"Ignoring " + cur_result.name + " based on required words filter: " + show.rls_require_words,
logger.MESSAGE)
continue
if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(cur_result.name, cur_result.size,
cur_result.provider.name):
logger.log(cur_result.name + u" has previously failed, rejecting it")
continue
if not bestResult or bestResult.quality < cur_result.quality and cur_result.quality != Quality.UNKNOWN:
bestResult = cur_result
elif bestResult.quality == cur_result.quality:
if "proper" in cur_result.name.lower() or "repack" in cur_result.name.lower():
bestResult = cur_result
elif "internal" in bestResult.name.lower() and "internal" not in cur_result.name.lower():
bestResult = cur_result
elif "xvid" in bestResult.name.lower() and "x264" in cur_result.name.lower():
logger.log(u"Preferring " + cur_result.name + " (x264 over xvid)")
bestResult = cur_result
if bestResult:
logger.log(u"Picked " + bestResult.name + " as the best", logger.DEBUG)
else:
logger.log(u"No result picked.", logger.DEBUG)
return bestResult
def isFinalResult(result):
"""
Checks if the given result is good enough quality that we can stop searching for other ones.
If the result is the highest quality in both the any/best quality lists then this function
returns True, if not then it's False
"""
logger.log(u"Checking if we should keep searching after we've found " + result.name, logger.DEBUG)
show_obj = result.episodes[0].show
bwl = None
if show_obj.is_anime:
bwl = BlackAndWhiteList(show_obj.indexerid)
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
# if there is a redownload that's higher than this then we definitely need to keep looking
if best_qualities and result.quality < max(best_qualities):
return False
# if it does not match the shows black and white list its no good
elif bwl and not bwl.is_valid(result):
return False
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
elif any_qualities and result.quality in any_qualities:
return True
elif best_qualities and result.quality == max(best_qualities):
# if this is the best redownload but we have a higher initial download then keep looking
if any_qualities and result.quality < max(any_qualities):
return False
# if this is the best redownload and we don't have a higher initial download then we're done
else:
return True
# if we got here than it's either not on the lists, they're empty, or it's lower than the highest required
else:
return False
def isFirstBestMatch(result):
"""
Checks if the given result is a best quality match and if we want to archive the episode on first match.
"""
logger.log(u"Checking if we should archive our first best quality match for for episode " + result.name,
logger.DEBUG)
show_obj = result.episodes[0].show
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
# if there is a redownload that's a match to one of our best qualities and we want to archive the episode then we are done
if best_qualities and show_obj.archive_firstmatch and result.quality in best_qualities:
return True
return False
def wantedEpisodes(show, fromDate):
anyQualities, bestQualities = common.Quality.splitQuality(show.quality) # @UnusedVariable
allQualities = list(set(anyQualities + bestQualities))
logger.log(u"Seeing if we need anything from " + show.name)
myDB = db.DBConnection()
if show.air_by_date:
sqlResults = myDB.select(
"SELECT ep.status, ep.season, ep.episode FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 AND ep.airdate > ? AND ep.showid = ? AND show.air_by_date = 1",
[fromDate.toordinal(), show.indexerid])
else:
sqlResults = myDB.select(
"SELECT status, season, episode FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?",
[show.indexerid, fromDate.toordinal()])
# check through the list of statuses to see if we want any
wanted = []
for result in sqlResults:
curCompositeStatus = int(result["status"])
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
if bestQualities:
highestBestQuality = max(allQualities)
else:
highestBestQuality = 0
# if we need a better one then say yes
if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER,
common.SNATCHED_BEST) and curQuality < highestBestQuality) or curStatus == common.WANTED:
epObj = show.getEpisode(int(result["season"]), int(result["episode"]))
epObj.wantedQuality = [i for i in allQualities if (i > curQuality and i != common.Quality.UNKNOWN)]
wanted.append(epObj)
return wanted
def searchForNeededEpisodes():
foundResults = {}
didSearch = False
origThreadName = threading.currentThread().name
threads = []
show_list = sickbeard.showList
fromDate = datetime.date.fromordinal(1)
episodes = []
for curShow in show_list:
if curShow.paused:
continue
episodes.extend(wantedEpisodes(curShow, fromDate))
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily]
for curProvider in providers:
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation
threads.append(threading.Thread(target=curProvider.cache.updateCache, name=origThreadName +
" :: [" + curProvider.name + "]"))
# start the thread we just created
threads[-1].start()
# wait for all threads to finish
for t in threads:
t.join()
for curProvider in providers:
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
curFoundResults = curProvider.searchRSS(episodes)
didSearch = True
# pick a single result for each episode, respecting existing results
for curEp in curFoundResults:
if curEp.show.paused:
logger.log(
u"Show " + curEp.show.name + " is paused, ignoring all RSS items for " + curEp.prettyName(),
logger.DEBUG)
continue
# find the best result for the current episode
bestResult = None
for curResult in curFoundResults[curEp]:
if not bestResult or bestResult.quality < curResult.quality:
bestResult = curResult
bestResult = pickBestResult(curFoundResults[curEp], curEp.show)
# if all results were rejected move on to the next episode
if not bestResult:
logger.log(u"All found results for " + curEp.prettyName() + " were rejected.", logger.DEBUG)
continue
# if it's already in the list (from another provider) and the newly found quality is no better then skip it
if curEp in foundResults and bestResult.quality <= foundResults[curEp].quality:
continue
# filter out possible bad torrents from providers such as ezrss
if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole":
bestResult.content = None
if not bestResult.url.startswith('magnet'):
bestResult.content = bestResult.provider.getURL(bestResult.url)
if not bestResult.content:
continue
foundResults[curEp] = bestResult
threading.currentThread().name = origThreadName
if not didSearch:
logger.log(
u"No NZB/Torrent providers found or enabled in the SickGear config for daily searches. Please check your settings.",
logger.ERROR)
return foundResults.values()
def searchProviders(show, episodes, manualSearch=False):
foundResults = {}
finalResults = []
didSearch = False
# build name cache for show
sickbeard.name_cache.buildNameCache(show)
origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_backlog]
for providerNum, curProvider in enumerate(providers):
if curProvider.anime_only and not show.is_anime:
logger.log(u"" + str(show.name) + " is not an anime, skiping", logger.DEBUG)
continue
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
foundResults[curProvider.name] = {}
searchCount = 0
search_mode = curProvider.search_mode
while(True):
searchCount += 1
if search_mode == 'eponly':
logger.log(u"Performing episode search for " + show.name)
else:
logger.log(u"Performing season pack search for " + show.name)
try:
curProvider.cache.updateCache()
searchResults = curProvider.findSearchResults(show, episodes, search_mode, manualSearch)
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
break
except Exception, e:
logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
break
finally:
threading.currentThread().name = origThreadName
didSearch = True
if len(searchResults):
# make a list of all the results for this provider
for curEp in searchResults:
# skip non-tv crap
searchResults[curEp] = filter(
lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, searchResults[curEp])
if curEp in foundResults:
foundResults[curProvider.name][curEp] += searchResults[curEp]
else:
foundResults[curProvider.name][curEp] = searchResults[curEp]
break
elif not curProvider.search_fallback or searchCount == 2:
break
if search_mode == 'sponly':
logger.log(u"FALLBACK EPISODE SEARCH INITIATED ...")
search_mode = 'eponly'
else:
logger.log(u"FALLBACK SEASON PACK SEARCH INITIATED ...")
search_mode = 'sponly'
# skip to next provider if we have no results to process
if not len(foundResults[curProvider.name]):
continue
anyQualities, bestQualities = Quality.splitQuality(show.quality)
# pick the best season NZB
bestSeasonResult = None
if SEASON_RESULT in foundResults[curProvider.name]:
bestSeasonResult = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show,
anyQualities + bestQualities)
highest_quality_overall = 0
for cur_episode in foundResults[curProvider.name]:
for cur_result in foundResults[curProvider.name][cur_episode]:
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
highest_quality_overall = cur_result.quality
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall],
logger.DEBUG)
# see if every episode is wanted
if bestSeasonResult:
searchedSeasons = []
searchedSeasons = [str(x.season) for x in episodes]
# get the quality of the season nzb
seasonQual = bestSeasonResult.quality
logger.log(
u"The quality of the season " + bestSeasonResult.provider.providerType + " is " + Quality.qualityStrings[
seasonQual], logger.DEBUG)
myDB = db.DBConnection()
allEps = [int(x["episode"])
for x in myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND ( season IN ( " + ','.join(searchedSeasons) + " ) )",
[show.indexerid])]
logger.log(u"Executed query: [SELECT episode FROM tv_episodes WHERE showid = %s AND season in %s]" % (show.indexerid, ','.join(searchedSeasons)))
logger.log(u"Episode list: " + str(allEps), logger.DEBUG)
allWanted = True
anyWanted = False
for curEpNum in allEps:
for season in set([x.season for x in episodes]):
if not show.wantEpisode(season, curEpNum, seasonQual):
allWanted = False
else:
anyWanted = True
# if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
if allWanted and bestSeasonResult.quality == highest_quality_overall:
logger.log(
u"Every ep in this season is needed, downloading the whole " + bestSeasonResult.provider.providerType + " " + bestSeasonResult.name)
epObjs = []
for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonResult.episodes = epObjs
return [bestSeasonResult]
elif not anyWanted:
logger.log(
u"No eps from this season are wanted at this quality, ignoring the result of " + bestSeasonResult.name,
logger.DEBUG)
else:
if bestSeasonResult.provider.providerType == GenericProvider.NZB:
logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG)
# if not, break it apart and add them as the lowest priority results
individualResults = nzbSplitter.splitResult(bestSeasonResult)
individualResults = filter(
lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, individualResults)
for curResult in individualResults:
if len(curResult.episodes) == 1:
epNum = curResult.episodes[0].episode
elif len(curResult.episodes) > 1:
epNum = MULTI_EP_RESULT
if epNum in foundResults[curProvider.name]:
foundResults[curProvider.name][epNum].append(curResult)
else:
foundResults[curProvider.name][epNum] = [curResult]
# If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
else:
# Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it.
logger.log(
u"Adding multi-ep result for full-season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!")
epObjs = []
for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonResult.episodes = epObjs
epNum = MULTI_EP_RESULT
if epNum in foundResults[curProvider.name]:
foundResults[curProvider.name][epNum].append(bestSeasonResult)
else:
foundResults[curProvider.name][epNum] = [bestSeasonResult]
# go through multi-ep results and see if we really want them or not, get rid of the rest
multiResults = {}
if MULTI_EP_RESULT in foundResults[curProvider.name]:
for multiResult in foundResults[curProvider.name][MULTI_EP_RESULT]:
logger.log(u"Seeing if we want to bother with multi-episode result " + multiResult.name, logger.DEBUG)
if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multiResult.name, multiResult.size,
multiResult.provider.name):
logger.log(multiResult.name + u" has previously failed, rejecting this multi-ep result")
continue
# see how many of the eps that this result covers aren't covered by single results
neededEps = []
notNeededEps = []
for epObj in multiResult.episodes:
epNum = epObj.episode
# if we have results for the episode
if epNum in foundResults[curProvider.name] and len(foundResults[curProvider.name][epNum]) > 0:
neededEps.append(epNum)
else:
notNeededEps.append(epNum)
logger.log(
u"Single-ep check result is neededEps: " + str(neededEps) + ", notNeededEps: " + str(notNeededEps),
logger.DEBUG)
if not notNeededEps:
logger.log(u"All of these episodes were covered by single episode results, ignoring this multi-episode result", logger.DEBUG)
continue
# check if these eps are already covered by another multi-result
multiNeededEps = []
multiNotNeededEps = []
for epObj in multiResult.episodes:
epNum = epObj.episode
if epNum in multiResults:
multiNotNeededEps.append(epNum)
else:
multiNeededEps.append(epNum)
logger.log(
u"Multi-ep check result is multiNeededEps: " + str(multiNeededEps) + ", multiNotNeededEps: " + str(
multiNotNeededEps), logger.DEBUG)
if not multiNeededEps:
logger.log(
u"All of these episodes were covered by another multi-episode nzbs, ignoring this multi-ep result",
logger.DEBUG)
continue
# if we're keeping this multi-result then remember it
for epObj in multiResult.episodes:
multiResults[epObj.episode] = multiResult
# don't bother with the single result if we're going to get it with a multi result
for epObj in multiResult.episodes:
epNum = epObj.episode
if epNum in foundResults[curProvider.name]:
logger.log(
u"A needed multi-episode result overlaps with a single-episode result for ep #" + str(
epNum) + ", removing the single-episode results from the list", logger.DEBUG)
del foundResults[curProvider.name][epNum]
# of all the single ep results narrow it down to the best one for each episode
finalResults += set(multiResults.values())
for curEp in foundResults[curProvider.name]:
if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
continue
if len(foundResults[curProvider.name][curEp]) == 0:
continue
bestResult = pickBestResult(foundResults[curProvider.name][curEp], show)
# if all results were rejected move on to the next episode
if not bestResult:
continue
# filter out possible bad torrents from providers such as ezrss
if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole":
bestResult.content = None
if not bestResult.url.startswith('magnet'):
bestResult.content = bestResult.provider.getURL(bestResult.url)
if not bestResult.content:
continue
# add result if its not a duplicate and
found = False
for i, result in enumerate(finalResults):
for bestResultEp in bestResult.episodes:
if bestResultEp in result.episodes:
if result.quality < bestResult.quality:
finalResults.pop(i)
else:
found = True
if not found:
finalResults += [bestResult]
# check that we got all the episodes we wanted first before doing a match and snatch
wantedEpCount = 0
for wantedEp in episodes:
for result in finalResults:
if wantedEp in result.episodes and isFinalResult(result):
wantedEpCount += 1
# make sure we search every provider for results unless we found everything we wanted
if wantedEpCount == len(episodes):
break
if not didSearch:
logger.log(u"No NZB/Torrent providers found or enabled in the SickGear config for backlog searches. Please check your settings.",
logger.ERROR)
return finalResults
| ressu/SickGear | sickbeard/search.py | Python | gpl-3.0 | 30,487 |
# !/usr/bin/env python3
"""
Custom Bi Directional Binary Data Parser
ModParser
"""
# ====================== GPL License and Copyright Notice ======================
# This file is part of ModParser
# Copyright (C) 2017 Diana Land
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ModReader. If not, see <http://www.gnu.org/licenses/>.
#
# https://github.com/iaz3/ModParser
#
# =============================================================================
| iaz3/ModParser | modparser/__init__.py | Python | gpl-3.0 | 983 |
import unittest
import os
import sys
import copy
import nose
from numpy.testing import *
from numpy import array, alltrue, ndarray, asarray, can_cast,zeros, dtype
from numpy.core.multiarray import typeinfo
import util
wrap = None
def setup():
"""
Build the required testing extension module
"""
global wrap
# Check compiler availability first
if not util.has_c_compiler():
raise nose.SkipTest("No C compiler available")
if wrap is None:
config_code = """
config.add_extension('test_array_from_pyobj_ext',
sources=['wrapmodule.c', 'fortranobject.c'],
define_macros=[])
"""
d = os.path.dirname(__file__)
src = [os.path.join(d, 'src', 'array_from_pyobj', 'wrapmodule.c'),
os.path.join(d, '..', 'src', 'fortranobject.c'),
os.path.join(d, '..', 'src', 'fortranobject.h')]
wrap = util.build_module_distutils(src, config_code,
'test_array_from_pyobj_ext')
def flags_info(arr):
flags = wrap.array_attrs(arr)[6]
return flags2names(flags)
def flags2names(flags):
info = []
for flagname in ['CONTIGUOUS','FORTRAN','OWNDATA','ENSURECOPY',
'ENSUREARRAY','ALIGNED','NOTSWAPPED','WRITEABLE',
'UPDATEIFCOPY','BEHAVED','BEHAVED_RO',
'CARRAY','FARRAY'
]:
if abs(flags) & getattr(wrap,flagname, 0):
info.append(flagname)
return info
class Intent(object):
def __init__(self,intent_list=[]):
self.intent_list = intent_list[:]
flags = 0
for i in intent_list:
if i=='optional':
flags |= wrap.F2PY_OPTIONAL
else:
flags |= getattr(wrap,'F2PY_INTENT_'+i.upper())
self.flags = flags
def __getattr__(self,name):
name = name.lower()
if name=='in_': name='in'
return self.__class__(self.intent_list+[name])
def __str__(self):
return 'intent(%s)' % (','.join(self.intent_list))
def __repr__(self):
return 'Intent(%r)' % (self.intent_list)
def is_intent(self,*names):
for name in names:
if name not in self.intent_list:
return False
return True
def is_intent_exact(self,*names):
return len(self.intent_list)==len(names) and self.is_intent(*names)
intent = Intent()
class Type(object):
_type_names = ['BOOL','BYTE','UBYTE','SHORT','USHORT','INT','UINT',
'LONG','ULONG','LONGLONG','ULONGLONG',
'FLOAT','DOUBLE','LONGDOUBLE','CFLOAT','CDOUBLE',
'CLONGDOUBLE']
_type_cache = {}
_cast_dict = {'BOOL':['BOOL']}
_cast_dict['BYTE'] = _cast_dict['BOOL'] + ['BYTE']
_cast_dict['UBYTE'] = _cast_dict['BOOL'] + ['UBYTE']
_cast_dict['BYTE'] = ['BYTE']
_cast_dict['UBYTE'] = ['UBYTE']
_cast_dict['SHORT'] = _cast_dict['BYTE'] + ['UBYTE','SHORT']
_cast_dict['USHORT'] = _cast_dict['UBYTE'] + ['BYTE','USHORT']
_cast_dict['INT'] = _cast_dict['SHORT'] + ['USHORT','INT']
_cast_dict['UINT'] = _cast_dict['USHORT'] + ['SHORT','UINT']
_cast_dict['LONG'] = _cast_dict['INT'] + ['LONG']
_cast_dict['ULONG'] = _cast_dict['UINT'] + ['ULONG']
_cast_dict['LONGLONG'] = _cast_dict['LONG'] + ['LONGLONG']
_cast_dict['ULONGLONG'] = _cast_dict['ULONG'] + ['ULONGLONG']
_cast_dict['FLOAT'] = _cast_dict['SHORT'] + ['USHORT','FLOAT']
_cast_dict['DOUBLE'] = _cast_dict['INT'] + ['UINT','FLOAT','DOUBLE']
_cast_dict['LONGDOUBLE'] = _cast_dict['LONG'] + ['ULONG','FLOAT','DOUBLE','LONGDOUBLE']
_cast_dict['CFLOAT'] = _cast_dict['FLOAT'] + ['CFLOAT']
_cast_dict['CDOUBLE'] = _cast_dict['DOUBLE'] + ['CFLOAT','CDOUBLE']
_cast_dict['CLONGDOUBLE'] = _cast_dict['LONGDOUBLE'] + ['CFLOAT','CDOUBLE','CLONGDOUBLE']
def __new__(cls,name):
if isinstance(name,dtype):
dtype0 = name
name = None
for n,i in typeinfo.items():
if isinstance(i,tuple) and dtype0.type is i[-1]:
name = n
break
obj = cls._type_cache.get(name.upper(),None)
if obj is not None:
return obj
obj = object.__new__(cls)
obj._init(name)
cls._type_cache[name.upper()] = obj
return obj
def _init(self,name):
self.NAME = name.upper()
self.type_num = getattr(wrap,'NPY_'+self.NAME)
assert_equal(self.type_num,typeinfo[self.NAME][1])
self.dtype = typeinfo[self.NAME][-1]
self.elsize = typeinfo[self.NAME][2] / 8
self.dtypechar = typeinfo[self.NAME][0]
def cast_types(self):
return map(self.__class__,self._cast_dict[self.NAME])
def all_types(self):
return map(self.__class__,self._type_names)
def smaller_types(self):
bits = typeinfo[self.NAME][3]
types = []
for name in self._type_names:
if typeinfo[name][3]<bits:
types.append(Type(name))
return types
def equal_types(self):
bits = typeinfo[self.NAME][3]
types = []
for name in self._type_names:
if name==self.NAME: continue
if typeinfo[name][3]==bits:
types.append(Type(name))
return types
def larger_types(self):
bits = typeinfo[self.NAME][3]
types = []
for name in self._type_names:
if typeinfo[name][3]>bits:
types.append(Type(name))
return types
class Array(object):
def __init__(self,typ,dims,intent,obj):
self.type = typ
self.dims = dims
self.intent = intent
self.obj_copy = copy.deepcopy(obj)
self.obj = obj
# arr.dtypechar may be different from typ.dtypechar
self.arr = wrap.call(typ.type_num,dims,intent.flags,obj)
assert_(isinstance(self.arr, ndarray),`type(self.arr)`)
self.arr_attr = wrap.array_attrs(self.arr)
if len(dims)>1:
if self.intent.is_intent('c'):
assert_(intent.flags & wrap.F2PY_INTENT_C)
assert_(not self.arr.flags['FORTRAN'],`self.arr.flags,getattr(obj,'flags',None)`)
assert_(self.arr.flags['CONTIGUOUS'])
assert_(not self.arr_attr[6] & wrap.FORTRAN)
else:
assert_(not intent.flags & wrap.F2PY_INTENT_C)
assert_(self.arr.flags['FORTRAN'])
assert_(not self.arr.flags['CONTIGUOUS'])
assert_(self.arr_attr[6] & wrap.FORTRAN)
if obj is None:
self.pyarr = None
self.pyarr_attr = None
return
if intent.is_intent('cache'):
assert_(isinstance(obj,ndarray),`type(obj)`)
self.pyarr = array(obj).reshape(*dims).copy()
else:
self.pyarr = array(array(obj,
dtype = typ.dtypechar).reshape(*dims),
order=self.intent.is_intent('c') and 'C' or 'F')
assert_(self.pyarr.dtype == typ, \
`self.pyarr.dtype,typ`)
assert_(self.pyarr.flags['OWNDATA'], (obj, intent))
self.pyarr_attr = wrap.array_attrs(self.pyarr)
if len(dims)>1:
if self.intent.is_intent('c'):
assert_(not self.pyarr.flags['FORTRAN'])
assert_(self.pyarr.flags['CONTIGUOUS'])
assert_(not self.pyarr_attr[6] & wrap.FORTRAN)
else:
assert_(self.pyarr.flags['FORTRAN'])
assert_(not self.pyarr.flags['CONTIGUOUS'])
assert_(self.pyarr_attr[6] & wrap.FORTRAN)
assert_(self.arr_attr[1]==self.pyarr_attr[1]) # nd
assert_(self.arr_attr[2]==self.pyarr_attr[2]) # dimensions
if self.arr_attr[1]<=1:
assert_(self.arr_attr[3]==self.pyarr_attr[3],\
`self.arr_attr[3],self.pyarr_attr[3],self.arr.tostring(),self.pyarr.tostring()`) # strides
assert_(self.arr_attr[5][-2:]==self.pyarr_attr[5][-2:],\
`self.arr_attr[5],self.pyarr_attr[5]`) # descr
assert_(self.arr_attr[6]==self.pyarr_attr[6],\
`self.arr_attr[6],self.pyarr_attr[6],flags2names(0*self.arr_attr[6]-self.pyarr_attr[6]),flags2names(self.arr_attr[6]),intent`) # flags
if intent.is_intent('cache'):
assert_(self.arr_attr[5][3]>=self.type.elsize,\
`self.arr_attr[5][3],self.type.elsize`)
else:
assert_(self.arr_attr[5][3]==self.type.elsize,\
`self.arr_attr[5][3],self.type.elsize`)
assert_(self.arr_equal(self.pyarr,self.arr))
if isinstance(self.obj,ndarray):
if typ.elsize==Type(obj.dtype).elsize:
if not intent.is_intent('copy') and self.arr_attr[1]<=1:
assert_(self.has_shared_memory())
def arr_equal(self,arr1,arr2):
if arr1.shape != arr2.shape:
return False
s = arr1==arr2
return alltrue(s.flatten())
def __str__(self):
return str(self.arr)
def has_shared_memory(self):
"""Check that created array shares data with input array.
"""
if self.obj is self.arr:
return True
if not isinstance(self.obj,ndarray):
return False
obj_attr = wrap.array_attrs(self.obj)
return obj_attr[0]==self.arr_attr[0]
##################################################
class test_intent(unittest.TestCase):
def test_in_out(self):
assert_equal(str(intent.in_.out),'intent(in,out)')
assert_(intent.in_.c.is_intent('c'))
assert_(not intent.in_.c.is_intent_exact('c'))
assert_(intent.in_.c.is_intent_exact('c','in'))
assert_(intent.in_.c.is_intent_exact('in','c'))
assert_(not intent.in_.is_intent('c'))
class _test_shared_memory:
num2seq = [1,2]
num23seq = [[1,2,3],[4,5,6]]
def test_in_from_2seq(self):
a = self.array([2],intent.in_,self.num2seq)
assert_(not a.has_shared_memory())
def test_in_from_2casttype(self):
for t in self.type.cast_types():
obj = array(self.num2seq,dtype=t.dtype)
a = self.array([len(self.num2seq)],intent.in_,obj)
if t.elsize==self.type.elsize:
assert_(a.has_shared_memory(),`self.type.dtype,t.dtype`)
else:
assert_(not a.has_shared_memory(),`t.dtype`)
def test_inout_2seq(self):
obj = array(self.num2seq,dtype=self.type.dtype)
a = self.array([len(self.num2seq)],intent.inout,obj)
assert_(a.has_shared_memory())
try:
a = self.array([2],intent.in_.inout,self.num2seq)
except TypeError,msg:
if not str(msg).startswith('failed to initialize intent(inout|inplace|cache) array'):
raise
else:
raise SystemError('intent(inout) should have failed on sequence')
def test_f_inout_23seq(self):
obj = array(self.num23seq,dtype=self.type.dtype,order='F')
shape = (len(self.num23seq),len(self.num23seq[0]))
a = self.array(shape,intent.in_.inout,obj)
assert_(a.has_shared_memory())
obj = array(self.num23seq,dtype=self.type.dtype,order='C')
shape = (len(self.num23seq),len(self.num23seq[0]))
try:
a = self.array(shape,intent.in_.inout,obj)
except ValueError,msg:
if not str(msg).startswith('failed to initialize intent(inout) array'):
raise
else:
raise SystemError('intent(inout) should have failed on improper array')
def test_c_inout_23seq(self):
obj = array(self.num23seq,dtype=self.type.dtype)
shape = (len(self.num23seq),len(self.num23seq[0]))
a = self.array(shape,intent.in_.c.inout,obj)
assert_(a.has_shared_memory())
def test_in_copy_from_2casttype(self):
for t in self.type.cast_types():
obj = array(self.num2seq,dtype=t.dtype)
a = self.array([len(self.num2seq)],intent.in_.copy,obj)
assert_(not a.has_shared_memory(),`t.dtype`)
def test_c_in_from_23seq(self):
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_,self.num23seq)
assert_(not a.has_shared_memory())
def test_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype)
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_,obj)
assert_(not a.has_shared_memory(),`t.dtype`)
def test_f_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype,order='F')
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_,obj)
if t.elsize==self.type.elsize:
assert_(a.has_shared_memory(),`t.dtype`)
else:
assert_(not a.has_shared_memory(),`t.dtype`)
def test_c_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype)
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_.c,obj)
if t.elsize==self.type.elsize:
assert_(a.has_shared_memory(),`t.dtype`)
else:
assert_(not a.has_shared_memory(),`t.dtype`)
def test_f_copy_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype,order='F')
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_.copy,obj)
assert_(not a.has_shared_memory(),`t.dtype`)
def test_c_copy_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype)
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_.c.copy,obj)
assert_(not a.has_shared_memory(),`t.dtype`)
def test_in_cache_from_2casttype(self):
for t in self.type.all_types():
if t.elsize != self.type.elsize:
continue
obj = array(self.num2seq,dtype=t.dtype)
shape = (len(self.num2seq),)
a = self.array(shape,intent.in_.c.cache,obj)
assert_(a.has_shared_memory(),`t.dtype`)
a = self.array(shape,intent.in_.cache,obj)
assert_(a.has_shared_memory(),`t.dtype`)
obj = array(self.num2seq,dtype=t.dtype,order='F')
a = self.array(shape,intent.in_.c.cache,obj)
assert_(a.has_shared_memory(),`t.dtype`)
a = self.array(shape,intent.in_.cache,obj)
assert_(a.has_shared_memory(),`t.dtype`)
try:
a = self.array(shape,intent.in_.cache,obj[::-1])
except ValueError,msg:
if not str(msg).startswith('failed to initialize intent(cache) array'):
raise
else:
raise SystemError('intent(cache) should have failed on multisegmented array')
def test_in_cache_from_2casttype_failure(self):
for t in self.type.all_types():
if t.elsize >= self.type.elsize:
continue
obj = array(self.num2seq,dtype=t.dtype)
shape = (len(self.num2seq),)
try:
a = self.array(shape,intent.in_.cache,obj)
except ValueError,msg:
if not str(msg).startswith('failed to initialize intent(cache) array'):
raise
else:
raise SystemError('intent(cache) should have failed on smaller array')
def test_cache_hidden(self):
shape = (2,)
a = self.array(shape,intent.cache.hide,None)
assert_(a.arr.shape==shape)
shape = (2,3)
a = self.array(shape,intent.cache.hide,None)
assert_(a.arr.shape==shape)
shape = (-1,3)
try:
a = self.array(shape,intent.cache.hide,None)
except ValueError,msg:
if not str(msg).startswith('failed to create intent(cache|hide)|optional array'):
raise
else:
raise SystemError('intent(cache) should have failed on undefined dimensions')
def test_hidden(self):
shape = (2,)
a = self.array(shape,intent.hide,None)
assert_(a.arr.shape==shape)
assert_(a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype)))
shape = (2,3)
a = self.array(shape,intent.hide,None)
assert_(a.arr.shape==shape)
assert_(a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype)))
assert_(a.arr.flags['FORTRAN'] and not a.arr.flags['CONTIGUOUS'])
shape = (2,3)
a = self.array(shape,intent.c.hide,None)
assert_(a.arr.shape==shape)
assert_(a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype)))
assert_(not a.arr.flags['FORTRAN'] and a.arr.flags['CONTIGUOUS'])
shape = (-1,3)
try:
a = self.array(shape,intent.hide,None)
except ValueError,msg:
if not str(msg).startswith('failed to create intent(cache|hide)|optional array'):
raise
else:
raise SystemError('intent(hide) should have failed on undefined dimensions')
def test_optional_none(self):
shape = (2,)
a = self.array(shape,intent.optional,None)
assert_(a.arr.shape==shape)
assert_(a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype)))
shape = (2,3)
a = self.array(shape,intent.optional,None)
assert_(a.arr.shape==shape)
assert_(a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype)))
assert_(a.arr.flags['FORTRAN'] and not a.arr.flags['CONTIGUOUS'])
shape = (2,3)
a = self.array(shape,intent.c.optional,None)
assert_(a.arr.shape==shape)
assert_(a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype)))
assert_(not a.arr.flags['FORTRAN'] and a.arr.flags['CONTIGUOUS'])
def test_optional_from_2seq(self):
obj = self.num2seq
shape = (len(obj),)
a = self.array(shape,intent.optional,obj)
assert_(a.arr.shape==shape)
assert_(not a.has_shared_memory())
def test_optional_from_23seq(self):
obj = self.num23seq
shape = (len(obj),len(obj[0]))
a = self.array(shape,intent.optional,obj)
assert_(a.arr.shape==shape)
assert_(not a.has_shared_memory())
a = self.array(shape,intent.optional.c,obj)
assert_(a.arr.shape==shape)
assert_(not a.has_shared_memory())
def test_inplace(self):
obj = array(self.num23seq,dtype=self.type.dtype)
assert_(not obj.flags['FORTRAN'] and obj.flags['CONTIGUOUS'])
shape = obj.shape
a = self.array(shape,intent.inplace,obj)
assert_(obj[1][2]==a.arr[1][2],`obj,a.arr`)
a.arr[1][2]=54
assert_(obj[1][2]==a.arr[1][2]==array(54,dtype=self.type.dtype),`obj,a.arr`)
assert_(a.arr is obj)
assert_(obj.flags['FORTRAN']) # obj attributes are changed inplace!
assert_(not obj.flags['CONTIGUOUS'])
def test_inplace_from_casttype(self):
for t in self.type.cast_types():
if t is self.type:
continue
obj = array(self.num23seq,dtype=t.dtype)
assert_(obj.dtype.type==t.dtype)
assert_(obj.dtype.type is not self.type.dtype)
assert_(not obj.flags['FORTRAN'] and obj.flags['CONTIGUOUS'])
shape = obj.shape
a = self.array(shape,intent.inplace,obj)
assert_(obj[1][2]==a.arr[1][2],`obj,a.arr`)
a.arr[1][2]=54
assert_(obj[1][2]==a.arr[1][2]==array(54,dtype=self.type.dtype),`obj,a.arr`)
assert_(a.arr is obj)
assert_(obj.flags['FORTRAN']) # obj attributes are changed inplace!
assert_(not obj.flags['CONTIGUOUS'])
assert_(obj.dtype.type is self.type.dtype) # obj type is changed inplace!
for t in Type._type_names:
exec '''\
class test_%s_gen(unittest.TestCase,
_test_shared_memory
):
def setUp(self):
self.type = Type(%r)
array = lambda self,dims,intent,obj: Array(Type(%r),dims,intent,obj)
''' % (t,t,t)
if __name__ == "__main__":
setup()
import nose
nose.runmodule()
| beiko-lab/gengis | bin/Lib/site-packages/numpy/f2py/tests/test_array_from_pyobj.py | Python | gpl-3.0 | 21,255 |
from greencouriers.tests import *
class TestCourierController(TestController):
def test_index(self):
response = self.app.get(url(controller='courier', action='index'))
# Test response...
| guyromm/greencouriers | greencouriers/tests/functional/test_courier.py | Python | gpl-3.0 | 209 |
import os
from typing import Dict, List, Union
OptionalJSON = Union[List, Dict, float, int, str, bool, None]
def ensure_dir_exists(directory):
if not os.path.exists(directory):
os.mkdir(directory)
def get_dir(directory: str) -> str:
"""
Return a string which contains the complete path to the input directory
Current directory structure:
PATinderBot
src
img
like
match
nope
json
data
:param directory: string of the directory to search for
:return: string with the complete path to the searched for directory
"""
current_dir = os.path.dirname(__file__)
project_dir = os.path.join(current_dir, '..')
result = os.path.join(project_dir, directory)
ensure_dir_exists(result)
return result
| physicalattraction/PATinderBot | src/common.py | Python | gpl-3.0 | 821 |
# Copyright (C) 2006-2007 Red Hat, Inc.
# Copyright (C) 2008 One Laptop Per Child
# Copyright (C) 2008-2013 Sugar Labs
# Copyright (C) 2013 Daniel Francis
# Copyright (C) 2013 Walter Bender
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
from gettext import gettext as _
from gi.repository import GObject
from gi.repository import Gio
from gi.repository import GLib
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GdkPixbuf
from sugar3.graphics import style
from sugar3.graphics.icon import Icon
from sugar3.graphics.icon import CanvasIcon
from sugar3.graphics.palettemenu import PaletteMenuItem
from sugar3.graphics.palettemenu import PaletteMenuItemSeparator
from sugar3.graphics.alert import Alert, ErrorAlert
from sugar3.graphics.xocolor import XoColor
from sugar3.activity import activityfactory
from sugar3 import dispatch
from sugar3.datastore import datastore
from jarabe.view.palettes import JournalPalette
from jarabe.view.palettes import CurrentActivityPalette
from jarabe.view.palettes import ActivityPalette
from jarabe.view.buddyicon import BuddyIcon
from jarabe.view.buddymenu import BuddyMenu
from jarabe.model.buddy import get_owner_instance
from jarabe.model import shell
from jarabe.model import bundleregistry
from jarabe.model import desktop
from jarabe.journal import misc
from jarabe.desktop import schoolserver
from jarabe.desktop.schoolserver import RegisterError
from jarabe.desktop import favoriteslayout
from jarabe.desktop.viewcontainer import ViewContainer
from jarabe.util.normalize import normalize_string
_logger = logging.getLogger('FavoritesView')
_ICON_DND_TARGET = ('activity-icon', Gtk.TargetFlags.SAME_WIDGET, 0)
LAYOUT_MAP = {favoriteslayout.RingLayout.key: favoriteslayout.RingLayout,
# favoriteslayout.BoxLayout.key: favoriteslayout.BoxLayout,
# favoriteslayout.TriangleLayout.key:
# favoriteslayout.TriangleLayout,
# favoriteslayout.SunflowerLayout.key:
# favoriteslayout.SunflowerLayout,
favoriteslayout.RandomLayout.key: favoriteslayout.RandomLayout}
"""Map numeric layout identifiers to uninstantiated subclasses of
`FavoritesLayout` which implement the layouts. Additional information
about the layout can be accessed with fields of the class."""
_favorites_settings = None
class FavoritesBox(Gtk.VBox):
__gtype_name__ = 'SugarFavoritesBox'
def __init__(self, favorite_view):
Gtk.VBox.__init__(self)
self.favorite_view = favorite_view
self._view = FavoritesView(self)
self.pack_start(self._view, True, True, 0)
self._view.show()
self._alert = None
def set_filter(self, query):
self._view.set_filter(query)
def set_resume_mode(self, resume_mode):
self._view.set_resume_mode(resume_mode)
def grab_focus(self):
# overwrite grab focus in order to grab focus from the parent
self._view.grab_focus()
def add_alert(self, alert):
if self._alert is not None:
self.remove_alert()
self._alert = alert
self.pack_start(alert, False, True, 0)
self.reorder_child(alert, 0)
def remove_alert(self):
self.remove(self._alert)
self._alert = None
def _get_selected(self, query):
return self._view._get_selected(query)
class FavoritesView(ViewContainer):
__gtype_name__ = 'SugarFavoritesView'
def __init__(self, box):
self._box = box
self._layout = None
owner_icon = OwnerIcon(style.XLARGE_ICON_SIZE)
owner_icon.connect('register-activate', self.__register_activate_cb)
current_activity = CurrentActivityIcon()
ViewContainer.__init__(self, layout=self._layout,
owner_icon=owner_icon,
activity_icon=current_activity)
self.set_can_focus(False)
self.add_events(Gdk.EventMask.BUTTON_PRESS_MASK |
Gdk.EventMask.POINTER_MOTION_HINT_MASK)
self.drag_dest_set(0, [], 0)
# Drag and drop is set only for the Random layout. This is
# the flag that enables or disables it.
self._dragging_mode = False
self._drag_motion_hid = None
self._drag_drop_hid = None
self._drag_data_received_hid = None
self._dragging = False
self._pressed_button = None
self._press_start_x = 0
self._press_start_y = 0
self._hot_x = None
self._hot_y = None
self._last_clicked_icon = None
self._alert = None
self._resume_mode = True
GLib.idle_add(self.__connect_to_bundle_registry_cb)
favorites_settings = get_settings(self._box.favorite_view)
favorites_settings.changed.connect(self.__settings_changed_cb)
self._set_layout(favorites_settings.layout)
def __settings_changed_cb(self, **kwargs):
favorites_settings = get_settings(self._box.favorite_view)
layout_set = self._set_layout(favorites_settings.layout)
if layout_set:
self.set_layout(self._layout)
registry = bundleregistry.get_registry()
for info in registry:
if registry.is_bundle_favorite(info.get_bundle_id(),
info.get_activity_version(),
self._box.favorite_view):
self._add_activity(info)
def _set_layout(self, layout):
if layout not in LAYOUT_MAP:
logging.warn('Unknown favorites layout: %r', layout)
layout = favoriteslayout.RingLayout.key
assert layout in LAYOUT_MAP
if type(self._layout) == LAYOUT_MAP[layout]:
return False
if self._layout is not None and self._dragging_mode:
self.disconnect(self._drag_motion_hid)
self.disconnect(self._drag_drop_hid)
self.disconnect(self._drag_data_received_hid)
if layout == favoriteslayout.RandomLayout.key:
self._dragging_mode = True
self._drag_motion_hid = self.connect(
'drag-motion', self.__drag_motion_cb)
self._drag_drop_hid = self.connect(
'drag-drop', self.__drag_drop_cb)
self._drag_data_received_hid = self.connect(
'drag-data-received', self.__drag_data_received_cb)
else:
self._dragging_mode = False
self._layout = LAYOUT_MAP[layout]()
return True
layout = property(None, _set_layout)
def do_add(self, child):
if child != self._owner_icon and child != self._activity_icon:
self._children.append(child)
child.connect('button-press-event', self.__button_press_cb)
child.connect('button-release-event', self.__button_release_cb)
child.connect('motion-notify-event', self.__motion_notify_event_cb)
child.connect('drag-begin', self.__drag_begin_cb)
if child.get_realized():
child.set_parent_window(self.get_parent_window())
child.set_parent(self)
def __button_release_cb(self, widget, event):
if self._dragging:
return True
else:
return False
def __button_press_cb(self, widget, event):
if event.button == 1 and event.type == Gdk.EventType.BUTTON_PRESS:
self._last_clicked_icon = widget
self._pressed_button = event.button
self._press_start_x = event.x
self._press_start_y = event.y
return False
def __motion_notify_event_cb(self, widget, event):
if not self._dragging_mode:
return False
# if the mouse button is not pressed, no drag should occurr
if not event.get_state() & Gdk.ModifierType.BUTTON1_MASK:
self._pressed_button = None
return False
if event.is_hint:
x, y, state_ = event.window.get_pointer()
else:
x = event.x
y = event.y
if widget.drag_check_threshold(int(self._press_start_x),
int(self._press_start_y),
int(x),
int(y)):
self._dragging = True
target_entry = Gtk.TargetEntry.new(*_ICON_DND_TARGET)
target_list = Gtk.TargetList.new([target_entry])
widget.drag_begin(target_list,
Gdk.DragAction.MOVE,
1,
event)
return False
def __drag_begin_cb(self, widget, context):
if not self._dragging_mode:
return False
pixbuf = GdkPixbuf.Pixbuf.new_from_file(widget.props.file_name)
self._hot_x = pixbuf.props.width / 2
self._hot_y = pixbuf.props.height / 2
Gtk.drag_set_icon_pixbuf(context, pixbuf, self._hot_x, self._hot_y)
def __drag_motion_cb(self, widget, context, x, y, time):
if self._last_clicked_icon is not None:
Gdk.drag_status(context, context.get_suggested_action(), time)
return True
else:
return False
def __drag_drop_cb(self, widget, context, x, y, time):
if self._last_clicked_icon is not None:
target = Gdk.Atom.intern_static_string(_ICON_DND_TARGET[0])
self.drag_get_data(context, target, time)
self._layout.move_icon(self._last_clicked_icon,
x - self._hot_x, y - self._hot_y,
self.get_allocation())
self._pressed_button = None
self._press_start_x = None
self._press_start_y = None
self._hot_x = None
self._hot_y = None
self._last_clicked_icon = None
self._dragging = False
return True
else:
return False
def __drag_data_received_cb(self, widget, context, x, y, selection_data,
info, time):
Gdk.drop_finish(context, success=True, time_=time)
def __connect_to_bundle_registry_cb(self):
registry = bundleregistry.get_registry()
for info in registry:
if registry.is_bundle_favorite(info.get_bundle_id(),
info.get_activity_version(),
self._box.favorite_view):
self._add_activity(info)
registry.connect('bundle-added', self.__activity_added_cb)
registry.connect('bundle-removed', self.__activity_removed_cb)
registry.connect('bundle-changed', self.__activity_changed_cb)
def _add_activity(self, activity_info):
if activity_info.get_bundle_id() == 'org.laptop.JournalActivity':
return
icon = ActivityIcon(activity_info)
icon.props.pixel_size = style.STANDARD_ICON_SIZE
# icon.set_resume_mode(self._resume_mode)
self.add(icon)
icon.show()
def __activity_added_cb(self, activity_registry, activity_info):
registry = bundleregistry.get_registry()
if registry.is_bundle_favorite(activity_info.get_bundle_id(),
activity_info.get_activity_version(),
self._box.favorite_view):
self._add_activity(activity_info)
def __activity_removed_cb(self, activity_registry, activity_info):
icon = self._find_activity_icon(activity_info.get_bundle_id(),
activity_info.get_activity_version())
if icon is not None:
self.remove(icon)
def _find_activity_icon(self, bundle_id, version):
for icon in self.get_children():
if isinstance(icon, ActivityIcon) and \
icon.bundle_id == bundle_id and icon.version == version:
return icon
return None
def __activity_changed_cb(self, activity_registry, activity_info):
if activity_info.get_bundle_id() == 'org.laptop.JournalActivity':
return
icon = self._find_activity_icon(activity_info.get_bundle_id(),
activity_info.get_activity_version())
if icon is not None:
self.remove(icon)
registry = bundleregistry.get_registry()
if registry.is_bundle_favorite(activity_info.get_bundle_id(),
activity_info.get_activity_version(),
self._box.favorite_view):
self._add_activity(activity_info)
def set_filter(self, query):
query = query.strip()
for icon in self.get_children():
if icon not in [self._owner_icon, self._activity_icon]:
activity_name = icon.get_activity_name().decode('utf-8')
normalized_name = normalize_string(activity_name)
if normalized_name.find(query) > -1:
icon.alpha = 1.0
else:
icon.alpha = 0.33
def _get_selected(self, query):
query = query.strip()
selected = []
for icon in self.get_children():
if icon not in [self._owner_icon, self._activity_icon]:
activity_name = icon.get_activity_name().decode('utf-8')
normalized_name = normalize_string(activity_name)
if normalized_name.find(query) > -1:
selected.append(icon)
return selected
def __register_activate_cb(self, icon):
alert = Alert()
alert.props.title = _('Registration')
alert.props.msg = _('Please wait, searching for your school server.')
self._box.add_alert(alert)
GObject.idle_add(self.__register)
def __register(self):
self._box.remove_alert()
alert = ErrorAlert()
try:
schoolserver.register_laptop()
except RegisterError, e:
alert.props.title = _('Registration Failed')
alert.props.msg = '%s' % e
else:
alert.props.title = _('Registration Successful')
alert.props.msg = _('You are now registered '
'with your school server.')
alert.connect('response', self.__register_alert_response_cb)
self._box.add_alert(alert)
return False
def __register_alert_response_cb(self, alert, response_id):
self._box.remove_alert()
def set_resume_mode(self, resume_mode):
self._resume_mode = resume_mode
for icon in self.get_children():
if hasattr(icon, 'set_resume_mode'):
icon.set_resume_mode(self._resume_mode)
class ActivityIcon(CanvasIcon):
__gtype_name__ = 'SugarFavoriteActivityIcon'
_BORDER_WIDTH = style.zoom(9)
_MAX_RESUME_ENTRIES = 5
def __init__(self, activity_info):
CanvasIcon.__init__(self, cache=True,
file_name=activity_info.get_icon())
self._activity_info = activity_info
self._journal_entries = []
self._resume_mode = True
self.connect_after('activate', self.__button_activate_cb)
datastore.updated.connect(self.__datastore_listener_updated_cb)
datastore.deleted.connect(self.__datastore_listener_deleted_cb)
self._refresh()
self._update()
def _refresh(self):
bundle_id = self._activity_info.get_bundle_id()
properties = ['uid', 'title', 'icon-color', 'activity', 'activity_id',
'mime_type', 'mountpoint']
self._get_last_activity_async(bundle_id, properties)
def __datastore_listener_updated_cb(self, **kwargs):
bundle_id = self._activity_info.get_bundle_id()
if kwargs['metadata'].get('activity', '') == bundle_id:
self._refresh()
def __datastore_listener_deleted_cb(self, **kwargs):
for entry in self._journal_entries:
if entry['uid'] == kwargs['object_id']:
self._refresh()
break
def _get_last_activity_async(self, bundle_id, properties):
query = {'activity': bundle_id}
datastore.find(query, sorting=['+timestamp'],
limit=self._MAX_RESUME_ENTRIES,
properties=properties,
reply_handler=self.__get_last_activity_reply_handler_cb,
error_handler=self.__get_last_activity_error_handler_cb)
def __get_last_activity_reply_handler_cb(self, entries, total_count):
# If there's a problem with the DS index, we may get entries not
# related to this activity.
checked_entries = []
for entry in entries:
if entry['activity'] == self.bundle_id:
checked_entries.append(entry)
self._journal_entries = checked_entries
self._update()
def __get_last_activity_error_handler_cb(self, error):
logging.error('Error retrieving most recent activities: %r', error)
def _update(self):
self.palette = None
if not self._resume_mode or not self._journal_entries:
xo_color = XoColor('%s,%s' % (style.COLOR_BUTTON_GREY.get_svg(),
style.COLOR_WHITE.get_svg()))
else:
xo_color = misc.get_icon_color(self._journal_entries[0])
self.props.xo_color = xo_color
def create_palette(self):
palette = FavoritePalette(self._activity_info, self._journal_entries)
palette.connect('activate', self.__palette_activate_cb)
palette.connect('entry-activate', self.__palette_entry_activate_cb)
self.connect_to_palette_pop_events(palette)
return palette
def __palette_activate_cb(self, palette):
self._activate()
def __palette_entry_activate_cb(self, palette, metadata):
self._resume(metadata)
def do_get_preferred_width(self):
width = CanvasIcon.do_get_preferred_width(self)[0]
width += ActivityIcon._BORDER_WIDTH * 2
return (width, width)
def do_get_preferred_height(self):
height = CanvasIcon.do_get_preferred_height(self)[0]
height += ActivityIcon._BORDER_WIDTH * 2
return (height, height)
def __button_activate_cb(self, icon):
self._activate()
def _resume(self, journal_entry):
if not journal_entry['activity_id']:
journal_entry['activity_id'] = activityfactory.create_activity_id()
misc.resume(journal_entry, self._activity_info.get_bundle_id())
def _activate(self):
if self.palette is not None:
self.palette.popdown(immediate=True)
if self._resume_mode and self._journal_entries:
self._resume(self._journal_entries[0])
else:
misc.launch(self._activity_info)
def run_activity(self):
self._activate()
def get_bundle_id(self):
return self._activity_info.get_bundle_id()
bundle_id = property(get_bundle_id, None)
def get_version(self):
return self._activity_info.get_activity_version()
version = property(get_version, None)
def get_activity_name(self):
return self._activity_info.get_name()
def _get_installation_time(self):
return self._activity_info.get_installation_time()
installation_time = property(_get_installation_time, None)
def _get_fixed_position(self):
registry = bundleregistry.get_registry()
return registry.get_bundle_position(self.bundle_id, self.version)
fixed_position = property(_get_fixed_position, None)
def set_resume_mode(self, resume_mode):
self._resume_mode = resume_mode
self._update()
class FavoritePalette(ActivityPalette):
__gtype_name__ = 'SugarFavoritePalette'
__gsignals__ = {
'entry-activate': (GObject.SignalFlags.RUN_FIRST,
None, ([object])),
}
def __init__(self, activity_info, journal_entries):
ActivityPalette.__init__(self, activity_info)
if not journal_entries:
xo_color = XoColor('%s,%s' % (style.COLOR_BUTTON_GREY.get_svg(),
style.COLOR_WHITE.get_svg()))
else:
xo_color = misc.get_icon_color(journal_entries[0])
self.props.icon = Icon(file=activity_info.get_icon(),
xo_color=xo_color,
pixel_size=style.STANDARD_ICON_SIZE)
if journal_entries:
self.props.secondary_text = journal_entries[0]['title']
menu_items = []
for entry in journal_entries:
icon_file_name = misc.get_icon_name(entry)
color = misc.get_icon_color(entry)
menu_item = PaletteMenuItem(text_label=entry['title'],
file_name=icon_file_name,
xo_color=color)
menu_item.connect('activate', self.__resume_entry_cb, entry)
menu_items.append(menu_item)
menu_item.show()
if journal_entries:
separator = PaletteMenuItemSeparator()
menu_items.append(separator)
separator.show()
for i in range(0, len(menu_items)):
self.menu_box.pack_start(menu_items[i], True, True, 0)
def __resume_entry_cb(self, menu_item, entry):
if entry is not None:
self.emit('entry-activate', entry)
class CurrentActivityIcon(CanvasIcon):
def __init__(self):
CanvasIcon.__init__(self, icon_name='activity-journal',
pixel_size=style.STANDARD_ICON_SIZE, cache=True)
self._home_model = shell.get_model()
self._home_activity = self._home_model.get_active_activity()
if self._home_activity is not None:
self._update()
self._home_model.connect('active-activity-changed',
self.__active_activity_changed_cb)
self.connect_after('activate', self.__activate_cb)
def __activate_cb(self, icon):
window = self._home_model.get_active_activity().get_window()
window.activate(Gtk.get_current_event_time())
def _update(self):
if self._home_activity is not None:
self.props.file_name = self._home_activity.get_icon_path()
self.props.xo_color = self._home_activity.get_icon_color()
if self._home_activity.is_journal():
if self._unbusy():
GLib.timeout_add(100, self._unbusy)
self.props.pixel_size = style.STANDARD_ICON_SIZE
if self.palette is not None:
self.palette.destroy()
self.palette = None
def _unbusy(self):
if self.get_window():
import jarabe.desktop.homewindow
jarabe.desktop.homewindow.get_instance().unbusy()
return False
return True
def create_palette(self):
if self._home_activity is not None:
if self._home_activity.is_journal():
palette = JournalPalette(self._home_activity)
else:
palette = CurrentActivityPalette(self._home_activity)
self.connect_to_palette_pop_events(palette)
else:
palette = None
return palette
def __active_activity_changed_cb(self, home_model, home_activity):
self._home_activity = home_activity
self._update()
class OwnerIcon(BuddyIcon):
__gtype_name__ = 'SugarFavoritesOwnerIcon'
__gsignals__ = {
'register-activate': (GObject.SignalFlags.RUN_FIRST, None,
([])),
}
def __init__(self, size):
BuddyIcon.__init__(self, buddy=get_owner_instance(), pixel_size=size)
# This is a workaround to skip the callback for
# enter-notify-event in the parent class the first time.
def __enter_notify_event_cb(icon, event):
self.unset_state_flags(Gtk.StateFlags.PRELIGHT)
self.disconnect(self._enter_notify_hid)
self._enter_notify_hid = self.connect('enter-notify-event',
__enter_notify_event_cb)
def create_palette(self):
palette = BuddyMenu(get_owner_instance())
settings = Gio.Settings('org.sugarlabs')
if settings.get_boolean('show-register'):
backup_url = settings.get_string('backup-url')
if not backup_url:
text = _('Register')
else:
text = _('Register again')
register_menu = PaletteMenuItem(text, 'media-record')
register_menu.connect('activate', self.__register_activate_cb)
palette.menu_box.pack_end(register_menu, True, True, 0)
register_menu.show()
self.connect_to_palette_pop_events(palette)
return palette
def __register_activate_cb(self, menuitem):
self.emit('register-activate')
class FavoritesSetting(object):
_DESKTOP_DIR = 'org.sugarlabs.desktop'
_HOMEVIEWS_KEY = 'homeviews'
def __init__(self, favorite_view):
self._favorite_view = int(favorite_view)
settings = Gio.Settings(self._DESKTOP_DIR)
homeviews = settings.get_value(self._HOMEVIEWS_KEY).unpack()
self._layout = homeviews[self._favorite_view]['layout']
logging.debug('FavoritesSetting layout %r', self._layout)
self._mode = None
self.changed = dispatch.Signal()
def get_layout(self):
return self._layout
def set_layout(self, layout):
logging.debug('set_layout %r %r', layout, self._layout)
if layout != self._layout:
self._layout = layout
settings = Gio.Settings(self._DESKTOP_DIR)
homeviews = settings.get_value(self._HOMEVIEWS_KEY).unpack()
homeviews[self._favorite_view]['layout'] = layout
variant = GLib.Variant('aa{ss}', homeviews)
settings.set_value(self._HOMEVIEWS_KEY, variant)
self.changed.send(self)
layout = property(get_layout, set_layout)
def get_settings(favorite_view=0):
global _favorites_settings
number_of_views = desktop.get_number_of_views()
if _favorites_settings is None:
_favorites_settings = []
for i in range(number_of_views):
_favorites_settings.append(FavoritesSetting(i))
elif len(_favorites_settings) < number_of_views:
for i in range(number_of_views - len(_favorites_settings)):
_favorites_settings.append(
FavoritesSetting(len(_favorites_settings)))
return _favorites_settings[favorite_view]
| icarito/sugar | src/jarabe/desktop/favoritesview.py | Python | gpl-3.0 | 27,457 |
__problem_title__ = "Number Rotations"
__problem_url___ = "https://projecteuler.net/problem=168"
__problem_description__ = "Consider the number 142857. We can right-rotate this number by moving " \
"the last digit (7) to the front of it, giving us 714285. It can be " \
"verified that 714285=5×142857. This demonstrates an unusual property " \
"of 142857: it is a divisor of its right-rotation. Find the last 5 " \
"digits of the sum of all integers , 10 < < 10 , that have this " \
"property."
import timeit
class Solution():
@staticmethod
def solution1():
pass
@staticmethod
def time_solutions():
setup = 'from __main__ import Solution'
print('Solution 1:', timeit.timeit('Solution.solution1()', setup=setup, number=1))
if __name__ == '__main__':
s = Solution()
print(s.solution1())
s.time_solutions()
| jrichte43/ProjectEuler | Problem-0168/solutions.py | Python | gpl-3.0 | 996 |
__author__ = "davide"
import struct
import socket
import argparse
import sys
from datetime import datetime
import time
from collections import defaultdict
from signal import signal, SIGINT, SIG_IGN
ICMP_ECHO_REQUEST = 8, 0
ICMP_ECHO_RESPONSE = 0, 0
__all__ = ["ICMPPacket", "Pinger",
"ICMP_ECHO_REQUEST", "ICMP_ECHO_RESPONSE"]
# Python module for pinging hosts
class ICMPPacket:
"""Class that represents an ICMP struct_packet"""
__slots__ = "_data", "_checksum", "_type"
def __init__(self, packetType=ICMP_ECHO_RESPONSE, data=""):
"""Initialize the struct_packet
@param packetType: tuple
"""
self.packetType = packetType
self.data = data
self._checksum = -1
@property
def packetType(self):
"""16 bits that represent the struct_packet type, code"""
return self._type
@packetType.setter
def packetType(self, packet_type):
if len(packet_type) != 2:
raise ValueError("type must be a 2-element tuple")
if any(not 0 <= val < (1 << 8) for val in packet_type):
raise ValueError("Packet type not valid")
self._type = packet_type
@property
def data(self):
"""Packet content"""
return self._data
@data.setter
def data(self, data=b""):
self._data = data or b""
def compute_checksum(self):
# checksum set to zero
header = bytes([self._type[0], self._type[1], 0, 0])
struct_packet = header + self._data
length = len(struct_packet)
if length % 2:
odd = struct_packet[-1] << 8
struct_packet = struct_packet[:-1]
else:
odd = 0
format_len = len(struct_packet) // 2
blocks = struct.unpack("!{}H".format(format_len), struct_packet)
checksum = sum(blocks)
checksum += odd
checksum = (checksum >> 16) + (checksum & 0xFFFF)
checksum += checksum >> 16
self._checksum = ~checksum & 0xFFFF
@property
def checksum(self):
"""Packet checksum"""
return self._checksum
@property
def computedChecksum(self):
"""Computed checksum"""
return self._checksum >= 0
def __str__(self):
return ("ICMPPacket[type={}, data={}, checksum={}]"
.format(self._type, self._data[4:], self._checksum))
def encodePacket(self):
"""Returns the struct_packet encoded in a string"""
if not self.computedChecksum:
self.compute_checksum()
return struct.pack("!BBH{}s".format(len(self._data)),
self._type[0], self._type[1],
self._checksum, self._data)
@staticmethod
def buildPacket(raw):
"""Builds an ICMPPacket from the string raw
(received from a pong), returns (IP Header (raw), ICMP Packet)"""
ihl = (raw[0] & 0x0F) << 2
ip_header, raw_packet = raw[:ihl], raw[ihl:]
format_len = len(raw_packet) - 4
unpacked = struct.unpack("!BBH{}s".format(format_len), raw_packet)
packet = ICMPPacket(unpacked[:2], unpacked[3])
packet._checksum = unpacked[2]
return ip_header, packet
class Pinger:
"""Class useful for pinging remote hosts"""
DEFAULT_TIMEOUT = 5
def __init__(self, timeout=DEFAULT_TIMEOUT):
"""Initalize the Pinger with the timeout specified"""
self.socket = None
self.timeout = timeout
self.id_dict = defaultdict(int)
def ping(self, dest_address, data=None):
"""Sends to dest a ping packet with data specified"""
if not self.socket:
self.close()
dest_address = str(dest_address)
self.socket = socket.socket(socket.AF_INET, socket.SOCK_RAW,
socket.getprotobyname("icmp"))
self.socket.connect((dest_address, 0))
self.socket.settimeout(self.timeout)
packet = ICMPPacket(packetType=ICMP_ECHO_REQUEST)
idpacket = struct.pack("!I", self.id_dict[dest_address])
packet.data = idpacket + (data or b"")
self.id_dict[dest_address] += 1
packet_struct = packet.encodePacket()
self.socket.send(packet_struct)
def pong(self):
"""Returns the response of remote host"""
if not self.socket:
raise socket.error("Socket closed")
return ICMPPacket.buildPacket(self.socket.recv((1 << 16) - 1))
def close(self):
"""Closes the Pinger"""
if self.socket:
self.socket.close()
self.socket = None
def __del__(self):
"""Closes the Pinger"""
self.close()
def main():
def parseArgs():
handler = argparse.ArgumentParser(description="Pinger")
handler.add_argument('-r', '--remote_host', help="Destination",
default="localhost", dest="dest")
handler.add_argument('-d', '--data', help="Dati", default="",
dest="data")
handler.add_argument('-t', '--tries', help="Numero di ping",
default=sys.maxsize, dest="tries", type=int)
return handler.parse_args()
args = parseArgs()
try:
ip = socket.gethostbyname(args.dest)
except socket.gaierror:
sys.exit("{} not found".format(args.dest))
print("Pinging", args.dest, "(" + ip + ")")
pinger = Pinger()
tmax, tmin, tmean, total, received = -1, sys.maxsize, 0, 0, 0
for i in range(args.tries):
total += 1
try:
pinger.ping(args.dest, args.data.encode())
t = datetime.now()
pinger.pong()
t = (datetime.now() - t).microseconds / 1000.
print("Got ping from {} in {:1.2f} ms".format(args.dest, t))
handler = signal(SIGINT, SIG_IGN)
tmax, tmin = max(tmax, t), min(tmin, t)
received += 1
tmean = ((received - 1) * tmean + t) / received
signal(SIGINT, handler)
if i != args.tries - 1:
time.sleep(1)
except socket.timeout:
print("Host is not reachable")
except KeyboardInterrupt:
break
print("***** RESULTS *****")
if received != 0:
stats = "Max time: {:1.2f} ms, Min time: {:1.2f} ms, Avg time: {:1.2f} ms"
print(stats.format(tmax, tmin, tmean))
stats = "Sent packets: {}\tReceived: {}\tLost: {}"
print(stats.format(total, received, total - received))
print("Packet Lost: {:1.0f}%".format((total - received) / total * 100))
if __name__ == '__main__':
main()
| DavideCanton/Python3 | ping/pyng.py | Python | gpl-3.0 | 6,649 |
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
from calibre.gui2.complete2 import LineEdit
from calibre.gui2.widgets import history
class HistoryLineEdit2(LineEdit):
max_history_items = None
def __init__(self, parent=None, completer_widget=None, sort_func=lambda x:None):
LineEdit.__init__(self, parent=parent, completer_widget=completer_widget, sort_func=sort_func)
@property
def store_name(self):
return 'lineedit_history_'+self._name
def initialize(self, name):
self._name = name
self.history = history.get(self.store_name, [])
self.set_separator(None)
self.update_items_cache(self.history)
self.setText('')
self.editingFinished.connect(self.save_history)
def save_history(self):
ct = unicode(self.text())
if len(ct) > 2:
try:
self.history.remove(ct)
except ValueError:
pass
self.history.insert(0, ct)
if self.max_history_items is not None:
del self.history[self.max_history_items:]
history.set(self.store_name, self.history)
self.update_items_cache(self.history)
def clear_history(self):
self.history = []
history.set(self.store_name, self.history)
self.update_items_cache(self.history)
| insomnia-lab/calibre | src/calibre/gui2/widgets2.py | Python | gpl-3.0 | 1,547 |
from quanthistling.tests import *
class TestBookController(TestController):
def test_index(self):
response = self.app.get(url(controller='book', action='index'))
# Test response...
| FrankNagel/qlc | src/webapp/quanthistling/quanthistling/tests/functional/test_book.py | Python | gpl-3.0 | 203 |
# https://www.codewars.com/kata/sudoku-solver/train/python
def sudoku(puzzle):
import collections
print('=== in sudoku ===')
# Print arguments
print_board(puzzle)
# Count the numbers that are currently populated on the sudoku board
number_count = collections.Counter(number for row in puzzle for number in row)
number_count = dict(number_count)
print_dict(number_count)
if 0 in number_count:
zero_count = number_count[0]
else:
zero_count = 0
print('zero_count = {}'.format(zero_count))
if (zero_count > 0):
# Get the missing numbers for all the rows
rows_missing_numbers = check_rows(puzzle)
print('-- rows_missing_numbers --')
# print_board(rows_missing_numbers)
# Rotate the board to check the columns
rotated_board = list(reversed(zip(*puzzle)))
# print('-- Rotated Board --')
# print_board(rotated_board)
# Get the missing numbers for all the columns
columns_missing_numbers = list(reversed(check_rows(rotated_board)))
print('-- columns_missing_numbers --')
# print_board(columns_missing_numbers)
# Validate all the inner 3x3 boards
grid_missing_numbers = []
# Step 1: Split the rows into 3 columns
# Break up the 9x9 board into 3 9x3 boards (i.e. split up all the rows into 3 parts)
split_board = [] # Contains original board with all rows split into 3 parts
for row in puzzle: # Go through each row in the board
single_row = [row[s_index:s_index+3] for s_index in xrange(0, len(row), 3)]
# Break it down:
# single_row = [] #
# for s_index in xrange(0, len(row), 3): # s_index to define where to split the row (every 3 numbers)
# split_row = row[s_index:s_index+3] # Get the sub-row (of length 3) from the original row
# single_row.append(split_row) # Append that sub-row list to a new list containing the all 3 sub-rows
split_board.append(single_row) # Append "row that is split into 3 lists/rows" as a single row into the split board matrix
# Rotate the board
# Step 2: Split the columns into 3 rows
# Converts the 9x3 boards into 3x9 boards (i.e. split up all the columns into 3 parts)
# Technically, we're putting the 9 rows from the 9x3 board into a single row with 9 1x3 rows
rotated_board = list(zip(*split_board)) # Rotate the board, so we can work on the columns as if they were rows
# Split the board again
# Break up the 3 3x9 boards into 9 3x3 boards
for row in rotated_board: # For each row in the rotated board
for s_index in range(0, len(row), 3): # Define the an index to split the columns on (step by 3)
inner_board = row[s_index:s_index+3] # Every 3 1x3 sub-rows in this row define the inner 3x3 matrix
single_row = [[digit for row_3x3 in inner_board for digit in row_3x3]] # Convert the 3x3 matrix into a single nested list [[1, ..., 9]], so we can check it
# Break it down:
# for row_3x3 in inner_board: #
# for digit in row_3x3: #
# single_row[0].append(digit) #
grid_missing_numbers.append(*check_rows(single_row))
print('-- grid_missing_numbers --')
# print_board(grid_missing_numbers)
# Loop through the puzzle board, until we find a '0'
# Count of zeros
print('-- Looking for a 0 --')
board = replace_zero(puzzle, rows_missing_numbers, columns_missing_numbers, grid_missing_numbers)
print('-- (replaced) board --')
print_board(board)
sudoku(board)
else:
return puzzle
print_board(puzzle)
return puzzle
def replace_zero(puzzle, rows_missing_numbers, columns_missing_numbers, grid_missing_numbers):
print('-- in replace_zero --')
grid_mapping = {
'00': 0,
'01': 3,
'02': 6,
'10': 1,
'11': 4,
'12': 7,
'20': 2,
'21': 5,
'22': 8,
}
for row in xrange(9):
for column in xrange(9):
# print('zero_count = {}'.format(zero_count))
if (puzzle[row][column] == 0):
# print('row = {}'.format(row))
# print('column = {}'.format(column))
# Determine which grid the 0 is in
# Determine the row
if (0 <= row <= 2):
grid_row = 0
elif (3 <= row <= 5):
grid_row = 1
elif (6 <= row <= 8):
grid_row = 2
# Determine the column
if (0 <= column <= 2):
grid_column = 0
elif (3 <= column <= 5):
grid_column = 1
elif (6 <= column <= 8):
grid_column = 2
grid_key = '{}{}'.format(grid_row, grid_column)
grid = grid_mapping[grid_key]
# print('grid row, column = ({}, {} --> {})'.format(grid_row, grid_column, grid_key))
# print('rows_missing_numbers[{}]: {}'.format(row, rows_missing_numbers[row]))
# print('columns_missing_numbers[{}]: {}'.format(column, columns_missing_numbers[column]))
# print('grid_missing_numbers[{}]: {}'.format(grid, grid_missing_numbers[grid]))
# print
# Intersect the 3 lists to get the common numbers from all lists
missing_numbers = list(set.intersection(set(rows_missing_numbers[row]), set(columns_missing_numbers[column]), set(grid_missing_numbers[grid])))
# print('missing_numbers = {}'.format(missing_numbers))
# If there's only 1 missing number, put it into the original
# puzzle, and re-run the loop
if (len(missing_numbers) == 1):
print('++ Replacing these numbers ++')
print('missing_numbers = {}'.format(missing_numbers))
print('row = {}'.format(row))
print('column = {}'.format(column))
puzzle[row][column] = missing_numbers[0]
# zero_count -= 1
# print_board(puzzle)
# print
return puzzle
def check_rows(board):
# Define the list of required numbers
required_numbers = range(1,9+1)
# List of missing numbers for all rows in the board
missing_numbers = []
# Default result
result = True
# Validate all rows
for row in board:
# print('Row: <{}>'.format(row))
numbers_to_check = required_numbers[:]
for number in row:
if number == 0:
continue
elif number in numbers_to_check: # If the number we're checking hasn't been seen yet
numbers_to_check.remove(number) # Them remove it from the remaining numbers to check
else: # Otherwise, we're seeing a number we do not expect
print('???')
missing_numbers.append(numbers_to_check)
# print('--> numbers_to_check = <{}>'.format(numbers_to_check))
# print
return missing_numbers
def print_board(board):
print('-- print_board --')
for row in board:
print row
def print_dict(my_dict):
for key in sorted(my_dict):
print('{}: {}'.format(key, my_dict[key])) | pcampese/codewars | sudoku_solver_level_3_1.py | Python | gpl-3.0 | 6,456 |
from multiprocessing import Process, JoinableQueue, Manager, Lock, Value, Event
import wiringpi as wp
import RPi.GPIO as rpio
from slaveprocess import SlaveProcess
import time
rpio.setmode(rpio.BCM)
class PMWProcess(Process):
def __init__(self,**kwargs):
super(PWMProcess, self).__init__(**kwargs)
self.event_enable_pwm = kwargs['event_enable_pwm']
self.event_terminate = kwargs['event_terminate']
self.pwm_freq = kwargs['pwm_freq']
self.pwm_duty = kwargs['pwm_duty']
self.lock_freq = kwargs['lock_freq']
self.pin = kwargs['pin']
def run():
while self.event_enable_pwm.is_set():
start_clock = time.time()
with self.lock_freq:
pwm_freq = self.pwm_freq.value
pwm_duty = self.pwm_duty.value
period=1./pwm_freq
class DriveCtrl():
def __init__(self, **kwargs):
self.cfg = kwargs['config']
self.queues = kwargs['queues']
## motor parameters :
self.speeds = (10,20,50,100)
if self.cfg.lookup('drive.speeds') is not None:
self.speeds = tuple([max(100,x) for x in self.cfg.lookup('drive.speeds')])
self.max_speed = max(self.speeds)
self.nb_speeds = len(self.speeds)
self.current_speed = self.speeds[0]
self.queues['log'].put('drive:nb speeds : %d'%(self.nb_speeds))
## pins :
self.power_pins={'L':0,'R':0}
self.direction_pins = {'L':0,'R':0}
self.monitor_pins={'LF':0,'LB':0,'RB':0,'RF':0}
self.pin_power_left = 0
self.pin_power_right = 0
self.pin_direction_left = 0
self.pin_direction_right = 0
## PWM options :
if self.cfg.lookup('gpio.pwm_freq'):
self.pwm_freq = float(self.cfg.gpio.pwm_freq)
else:
self.pwm_freq = 50.0
###################### DEFAULT DRIVE VECTORS #######################
#################################
# COMMANDS
#################################
## Drive commands :
# North :
# _ _
# ^ | |_____| | ^ | |x| |
# | | | ^ | | | | | | |
# 1.0 | | |__^__| | | 1.0 | | | |
# | |_| |_| |
#
# North East :
# _ _
# ^ | |_ _ _| | | | |x|
# | | | ^ | | ^ | | | |
# 0.8 | | |__^__| | | 0.2 | | | |
# | |_| |_|
#
# East :
# _ _
# ^ | |_____| | | | | | |
# | | | ^ | | | | | |x|
# 1.0 | | |__^__| | | 1.0 | | | |
# | |_| |_| v
#
# South East :
# _ _
# | | |_____| | | | | |
# | | | ^ | | | | | | |
# 1.0 | | |__^__| | v 0.8 | | |x|
# v |_| |_|
#
# South :
# _ _
# | | |_____| | | | | | |
# | | | ^ | | | | | | |
# 1.0 | | |__^__| | | 1.0 | |x| |
# v |_| |_| v
#
# South West :
# _ _
# | |_____| | | | | | |
# | | ^ | | | | | | |
# 0.2 | | |__^__| | | 0.8 |x| | |
# v |_| |_| v
#
# West :
# _ _
# | | |_____| | ^ | | | |
# | | | ^ | | | |x| | |
# 1.0 | | |__^__| | | 1.0 | | | |
# v |_| |_| |
#
# North West :
# _ _
# ^ | |_____| | ^ |x| | |
# | | | ^ | | | | | | |
# 0.2 | |__^__| | | 0.8 | | | |
# |_| |_| |
#
# Full stop :
# _ _
# | |_____| | | | | |
# | | ^ | | | |x| |
# 0.0 | |__^__| | 0.0 | | | |
# |_| |_|
#
self.vec_north = (1.0,1.0,1,1,0,0)
self.vec_north_east = (0.8,0.2,1,1,0,0)
self.vec_east = (1.0,1.0,1,0,0,1)
self.vec_south_east = (0.8,0.2,0,0,1,1)
self.vec_south = (1.0,1.0,0,0,1,1)
self.vec_south_west = (0.2,0.8,0,0,1,1)
self.vec_west = (1.0,1.0,0,1,1,0)
self.vec_north_west = (0.2,0.8,1,1,0,0)
self.vec_full_stop = (0,0,0,0,0,0)
self.load_drive_vectors()
self.current_vector = self.vec_full_stop
## read the mapping of GPIO pins
self.read_gpio_map_from_config()
self.gpio_init()
self.dict_steer = {'8':self.vec_north, \
'9':self.vec_north_east, \
'6':self.vec_east,\
'3':self.vec_south_east,\
'2':self.vec_south,\
'1':self.vec_south_west,\
'4':self.vec_west,\
'7':self.vec_north_west,\
'5':self.vec_full_stop}
def load_drive_vectors(self):
for vecname in ['north','north_east','east','south_east','south','south_west','west','north_west']:
vecpath = 'drive.vectors.'+vecname
#self.queues['log'].put('drive: loading drive vector %s'%vecpath)
if self.cfg.lookup(vecpath) is not None:
vecarray = self.cfg.lookup(vecpath)
if len(vecarray) != 6:
self.queues['log'].put('drive:error: drive vector %s in config file'%(vecname))
setattr(self,'vec_'+vecname, tuple([x for x in vecarray]))
def read_gpio_map_from_config(self):
self.pin_power_left = self.cfg.gpio.pin_pwm_left
self.pin_power_right = self.cfg.gpio.pin_pwm_right
self.pin_direction_left_forward = self.cfg.gpio.pin_direction_left_forward
self.pin_direction_right_forward = self.cfg.gpio.pin_direction_right_forward
self.pin_direction_left_rear = self.cfg.gpio.pin_direction_left_rear
self.pin_direction_right_rear = self.cfg.gpio.pin_direction_right_rear
def gpio_init(self):
wp.wiringPiSetupSys()
# Set output for those pins :
wp.pinMode(self.pin_power_left, wp.OUTPUT)
wp.pinMode(self.pin_power_right, wp.OUTPUT)
wp.pinMode(self.pin_direction_left_forward, wp.OUTPUT)
wp.pinMode(self.pin_direction_right_forward, wp.OUTPUT)
wp.pinMode(self.pin_direction_left_rear, wp.OUTPUT)
wp.pinMode(self.pin_direction_right_rear, wp.OUTPUT)
## create the SoftPwm on power pins :
wp.softPwmCreate(self.pin_power_left, 0, self.max_speed)
wp.softPwmCreate(self.pin_power_right, 0, self.max_speed)
## reset everyone :
self.gpio_zero()
def rpio_init(self):
## open pins for output :
rpio.setup(self.pin_power_left, rpio.OUT)
rpio.setup(self.pin_power_right, rpio.OUT)
rpio.setup(self.pin_direction_left_forward, rpio.OUT)
rpio.setup(self.pin_direction_right_forward, rpio.OUT)
rpio.setup(self.pin_direction_left_rear, rpio.OUT)
rpio.setup(self.pin_direction_right_rear, rpio.OUT)
## open pins for input :
# disabled for now
## setup software pwm
self.pwm_left = rpio.PWM(self.pin_power_left, self.pwm_freq)
self.pwm_right = rpio.PWM(self.pin_power_right, self.pwm_freq)
self.pwm_left.start(0)
self.pwm_right.start(0)
def gpio_zero(self):
# set everyone to 0
wp.softPwmWrite(self.pin_power_left, 0)
wp.softPwmWrite(self.pin_power_right, 0)
wp.digitalWrite(self.pin_direction_left_forward, 0)
wp.digitalWrite(self.pin_direction_right_forward, 0)
wp.digitalWrite(self.pin_direction_left_rear, 0)
wp.digitalWrite(self.pin_direction_right_rear, 0)
def rpio_zero(self):
self.pwm_left.ChangeDutyCycle(0)
self.pwm_right.ChangeDutyCycle(0)
rpio.output(self.pin_direction_left_forward, 0)
rpio.output(self.pin_direction_right_forward, 0)
rpio.output(self.pin_direction_left_rear, 0)
rpio.output(self.pin_direction_right_rear, 0)
def gpio_steer(self, drive_vector):
wp.softPwmWrite(self.pin_power_left, int(self.current_speed*drive_vector[0]))
wp.softPwmWrite(self.pin_power_right, int(self.current_speed*drive_vector[1]))
wp.digitalWrite(self.pin_direction_left_forward, drive_vector[2])
wp.digitalWrite(self.pin_direction_right_forward, drive_vector[3])
wp.digitalWrite(self.pin_direction_left_rear, drive_vector[4])
wp.digitalWrite(self.pin_direction_right_rear, drive_vector[5])
actual_vec = (int(self.current_speed*drive_vector[0]), int(self.current_speed*drive_vector[1]),drive_vector[2], drive_vector[3], drive_vector[4], drive_vector[5])
msg='drive:steering, drive vector: %s, ppl %d ppr %d pdlf %d pdrf %d pdlr %d pdrr %d'%(str(actual_vec),self.pin_power_left, self.pin_power_right, self.pin_direction_left_forward, self.pin_direction_right_forward, self.pin_direction_left_rear, self.pin_direction_right_rear)
self.queues['tx_msg'].put(msg)
self.queues['log'].put(msg)
def rpio_steer(self,drive_vector):
self.pwm_left.ChangeDutyCycle(self.current_speed*drive_vector[0])
self.pwm_right.ChangeDutyCycle(self.current_speed*drive_vector[1])
rpio.output(self.pin_direction_left_forward, drive_vector[2])
rpio.output(self.pin_direction_right_forward, drive_vector[3])
rpio.output(self.pin_direction_left_rear, drive_vector[4])
rpio.output(self.pin_direction_right_rear, drive_vector[5])
actual_vec = (int(self.current_speed*drive_vector[0]), int(self.current_speed*drive_vector[1]),drive_vector[2], drive_vector[3], drive_vector[4], drive_vector[5])
msg='drive:steering, drive vector: %s, ppl %d ppr %d pdlf %d pdrf %d pdlr %d pdrr %d\n'%(str(actual_vec),self.pin_power_left, self.pin_power_right, self.pin_direction_left_forward, self.pin_direction_right_forward, self.pin_direction_left_rear, self.pin_direction_right_rear)
self.current_vector = drive_vector
self.queues['tx_msg'].put(msg)
self.queues['log'].put(msg)
def rpio_cleanup(self):
self.pwm_left.stop()
self.pwm_right.stop()
rpio.cleanup()
def execute_drive_cmd(self,raw_cmd):
self.queues['log'].put("drive:executing cmd :%s"%raw_cmd)
if len(raw_cmd)>2:
if raw_cmd[1] == 'G':
## command 'DG[1-9]' : steering command
if raw_cmd[2] in self.dict_steer:
self.gpio_steer(self.dict_steer[raw_cmd[2]])
else:
self.queues['tx_msg'].put('drive:unknown steering command key \"%s\" (available : [1-9]).\n'%(raw_cmd[2]))
elif raw_cmd[1] == 'S':
## command 'DS[0-9]' : change speed
speed_setting = int(raw_cmd[2:])
if speed_setting >= 0:
self.current_speed = self.speeds[min(self.nb_speeds-1,speed_setting)]
self.gpio_steer(self.current_vector)
self.queues['log'].put('drive:current speed set to %s'%(str(self.current_speed)))
else:
self.queues['tx_msg'].put('drive:could not change speed setting to %d, must be positive'%(speed_setting))
elif raw_cmd[1] == 'M':
## command 'DM' : requesting monitoring data
pass
else:
self.queues['tx_msg'].put('drive:discarding malformed speed setting command \"%s\"\n'%raw_cmd)
def checks(self, remote=False):
## check drive vectors :
for vecname in ['north','north_east','east','south_east','south','south_west','west','north_west']:
msg = 'drive:checking drive vector %s:%s'%(vecname,getattr(self,'vec_'+vecname).__repr__())
self.queues['log'].put(msg)
if remote:
self.queues['tx_msg'].put(msg)
## check speed settings
msg='drive:checking available speeds: %s'%(str(self.speeds))
self.queues['log'].put(msg)
if remote:
self.queues['tx_msg'].put(msg)
def shutdown(self):
self.gpio_zero()
#self.gpio_cleanup()
self.queues['log'].put('drive:stop.')
if __name__ == "__main__":
pwm_freq = 100
pin_power_left = 16
pin_power_right = 20
pin_direction_left_forward = 6
pin_direction_right_forward = 13
pin_direction_left_rear = 19
pin_direction_right_rear = 26
rpio.setmode(rpio.BCM)
## open pins for output :
rpio.setup(pin_power_left, rpio.OUT)
rpio.setup(pin_power_right, rpio.OUT)
rpio.setup(pin_direction_left, rpio.OUT)
rpio.setup(pin_direction_right, rpio.OUT)
## open pins for input :
# disabled for now
## setup software pwm
pwm_left = rpio.PWM(pin_power_left, pwm_freq)
pwm_right = rpio.PWM(pin_power_right, pwm_freq)
pwm_left.start(50)
pwm_right.start(50)
current_cycle_up = 50
current_cycle_down = 50
goon=True
periode=0.01
step=1
while goon:
try:
pwm_left.ChangeDutyCycle(current_cycle_up)
pwm_right.ChangeDutyCycle(current_cycle_down)
print current_cycle_up, current_cycle_down
current_cycle_up = abs((current_cycle_up + step)%100)
current_cycle_down = abs((current_cycle_down - step)%100)
time.sleep(periode)
except KeyboardInterrupt as e:
goon=False
rpio.cleanup()
| bcare/roverpi | roverserver/enginectrl.py | Python | gpl-3.0 | 14,624 |
from PySide import QtCore, QtGui
from pysql_browser import MainWindow
import sys
def main():
app = QtGui.QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| plumdog/pysql_browser | main.py | Python | gpl-3.0 | 243 |