repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
technosaurus/samba4-GPL2
|
webapps/qooxdoo-0.6.5-sdk/frontend/framework/tool/icon/modules/kde-to-freedesktop.py
|
Python
|
gpl-2.0
| 3,279
| 0.017688
|
#!/usr/bin/env python
################################################################################
#
# qooxdoo - the new era of web development
#
# http://qooxdoo.org
#
# Copyright:
# 2007 1&1 Internet AG, Germany, http://www.1and1.org
#
# License:
# LGPL: http://www.gnu.org/licenses/lgpl.html
# EPL: http://www.eclipse.org/org/documents/epl-v10.php
# See the LICENSE file in the project's top-level directory for details.
#
# Authors:
# * Sebastian Werner (wpbasti)
#
################################################################################
import os
imp
|
ort sys
im
|
port shutil
import optparse
def rmgeneric(path, __func__):
try:
__func__(path)
except OSError, (errno, strerror):
print ERROR_STR % {'path' : path, 'error': strerror }
def removeall(path):
if not os.path.isdir(path):
return
files=os.listdir(path)
for x in files:
fullpath=os.path.join(path, x)
if os.path.isfile(fullpath):
f=os.remove
rmgeneric(fullpath, f)
elif os.path.isdir(fullpath):
removeall(fullpath)
f=os.rmdir
rmgeneric(fullpath, f)
def copy_file(kde, fd, options):
img_sizes = [16, 22, 32, 48, 64, 72, 96, 128]
found = []
notfound = []
if options.verbose:
print " - Processing: %s -> %s" % (kde, fd)
for size in img_sizes:
kde_file = "%s/%sx%s/%s.png" % (options.input, size, size, kde)
fd_file = "%s/%sx%s/%s.png" % (options.output, size, size, fd)
if os.path.exists(kde_file):
fd_dir = os.path.dirname(fd_file)
if not os.path.exists(fd_dir):
os.makedirs(fd_dir)
shutil.copyfile(kde_file, fd_file)
found.append(size)
else:
notfound.append(size)
if options.verbose:
dbg = " "
for size in img_sizes:
if size in found:
ret = "Y"
else:
ret = "N"
dbg += " [%s] %s" % (ret, size)
print dbg
def main():
parser = optparse.OptionParser("usage: %prog [options]")
parser.add_option("-q", "--quiet", action="store_false", dest="verbose", default=False, help="Quiet output mode.")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", help="Verbose output mode.")
parser.add_option("--input", "-i", action="store", dest="input", metavar="DIRECTORY", help="Input directory")
parser.add_option("--output", "-o", action="store", dest="output", metavar="DIRECTORY", help="Output directory")
(options, args) = parser.parse_args(sys.argv[1:])
if options.input == None or options.output == None:
basename = os.path.basename(sys.argv[0])
print "You must define both, the input and output folders!"
print "usage: %s [options]" % basename
print "Try '%s -h' or '%s --help' to show the help message." % (basename, basename)
sys.exit(1)
print " - Cleaning up..."
removeall(options.output)
dat = open("%s/../data/kde_freedesktop.dat" % os.path.dirname(sys.argv[0]))
print " - Copying files..."
for line in dat.readlines():
line = line.strip();
if line == "" or line[0] == "#":
continue
if not line[0] in ["+", "*"]:
continue
line = line[1:]
(fd, kde) = map(lambda x: x.strip(), line.split("="))
copy_file(kde, fd, options)
if __name__ == "__main__":
sys.exit(main())
|
zothor/software
|
MiniprojectProgrammerenWeek3/buttons.py
|
Python
|
gpl-2.0
| 17,539
| 0.032898
|
__author__ = 'Bram + Frank + Ricardo + Kevin + Jouke-bouwe'
import csv
from tkinter import *
from MiniprojectProgrammerenWeek3 import FilmsOphalen, Aankoop
#Verwijst naar de csv bestanden die gebruikt worden. Moet worden aangepast als deze op een andere pc wordt uitgevoerd
csvKlantBestand = 'C:/Users/Rica/PycharmProjects/software/MiniprojectProgrammerenWeek3/klantBestand.csv'
csvFrankBestand = 'C:/Users/Rica/PycharmProjects/software/MiniprojectProgrammerenWeek3/frankBestand.csv'
csvKevinBestand = 'C:/Users/Rica/PycharmProjects/software/MiniprojectProgrammerenWeek3/kevinBestand.csv'
csvBramBestand = 'C:/Users/Rica/PycharmProjects/software/MiniprojectProgrammerenWeek3/bramBestand.csv'
csvJoukeBestand = 'C:/Users/Rica/PycharmProjects/software/MiniprojectProgrammerenWeek3/joukeBestand.csv'
csvRicardoBestand = 'C:/Users/Rica/PycharmProjects/software/MiniprojectProgrammerenWeek3/ricardoBestand.csv'
'''Variabelen die gebruikt werden in de loop die buttons zou maken
#variablen om films op te halen
naarbeneden = 330
opgehaaldefilm=0
alleFilmButtons = []
'''
#variabelen voor het opslaan wie is ingelogd
huidigEmail = ""
huidigNaam = ""
huidigAchternaam = ""
ingelogdOfNiet = False
#Variabelen die gebruikt worden om film informatie te tonen
labelnaam = ""
labelfilm = ""
labelduur = ""
#maakt het hoofdscherm aan
root = Tk()
root["bg"] = "grey"
root.wm_title("Movie-Net")
root.resizable(width= False, height= True)
root.geometry("1200x720+0+0")
#fieldnames voor het opslaan van key values in csv bestand
fieldnames = ["email", "wachtwoord", "naam", "achternaam"]
#maakt header aan van het bestand waar klantinformatie wordt opgeslagen
def maakDictionaryHeaders():
klantBestand = open(csvKlantBestand, 'a', newline='')
writer = csv.writer(klantBestand,dialect='excel')
writer.writerow(fieldnames)
maakDictionaryHeaders()
#slaat de textvariable op als een string
Email = StringVar()
Name = StringVar()
Surname = StringVar()
Password = StringVar()
Password2 = StringVar()
login1 = StringVar()
login2 = StringVar()
#het achtergrond plaatje
canvas = Canvas( width = 300, height = 200, bg = 'Gray')
canvas.pack( expand = YES, fill = BOTH)
gif1 = PhotoImage(file = 'achtergrond.gif')
canvas.create_image(0, 0, image = gif1, anchor = NW)
#de werking van de knoppen op het inlogscherm
def inlogscherm():
top = Toplevel()
top.title("login")
top["bg"] = "grey"
#Leest de inputs van de gebruiker
login = Button(top, text = "login", command = (lambda: leesUit(top)))
login.grid(row = 3, columnspan = 2)
label_1 = Label(top, text = "Email", bg = "grey", fg = "white")
label_2 = Label(top, text = "Password", bg = "grey", fg = "white")
entry_1 = Entry(top, textvariable = login1)
entry_2 = Entry(top, textvariable = login2)
label_1.grid(row = 0, sticky = W)
label_2.grid(row = 1, sticky = W)
entry_1.grid(row = 0, column = 1)
entry_2.grid(row = 1, column = 1)
#scherm voor het invullen van gegevens voor registreren
def registreren():
top = Toplevel()
top.title("Register")
top.resizable(width= False, height= False)
to
|
p.geometry("500x250+50+70")
top["bg"] = "grey"
helptekst = Canvas(top, width = 200, height = 50)
helptekst.create_rectangle(200,50,0,0, fill = "red")
helptekst.create_text(100, 25, text = "Fill in the blanks", fill = "white", font = ("broadway", 12))
helptekst.place(relx= 0.3,rely = 0.05)
bottomframe = Frame(top)
bottomframe.pack(side = BOTTOM)
botto
|
mframe["bg"] = "grey"
label_1 = Label(bottomframe, text = "Email", bg = "grey", fg = "white")
label_2 = Label(bottomframe, text = "Name", bg = "grey", fg = "white")
label_3 = Label(bottomframe, text = "Surname", bg = "grey", fg = "white")
label_4 = Label(bottomframe, text = "Password", bg = "grey", fg = "white")
label_5 = Label(bottomframe, text = "Repeat password", bg = "grey", fg = "white")
entry_1 = Entry(bottomframe, textvariable = Email)
entry_2 = Entry(bottomframe, textvariable = Name)
entry_3 = Entry(bottomframe, textvariable = Surname)
entry_4 = Entry(bottomframe, textvariable = Password)
entry_5 = Entry(bottomframe, textvariable = Password2)
label_1.grid(row = 1, sticky = W)
label_2.grid(row = 2, sticky = W)
label_3.grid(row = 3, sticky = W)
label_4.grid(row = 4, sticky = W)
label_5.grid(row = 5, sticky = W)
entry_1.grid(row = 1, column = 1)
entry_2.grid(row = 2, column = 1)
entry_3.grid(row = 3, column = 1)
entry_4.grid(row = 4, column = 1)
entry_5.grid(row = 5, column = 1)
submit = Button(bottomframe, text = "submit", command = (lambda: opslaan(top)))
submit.grid(row = 6, columnspan = 2)
#slaat de gebruikersinformatie op in csv bestand
def opslaan(top):
global naam
top.destroy()
email = Email.get()
naam = Name.get()
achternaam = Surname.get()
wachtwoord = Password.get()
wachtwoord2 = Password2.get()
#Controleert of het wachtwoord 2 keer goed is ingevoerd
if wachtwoord == wachtwoord2:
try:
klantBestand = open(csvKlantBestand, 'a', newline='')
writer = csv.DictWriter(klantBestand,delimiter=',', fieldnames=fieldnames)
writer.writerow({"email": email, "wachtwoord": wachtwoord, "naam":naam, "achternaam": achternaam})
finally:
klantBestand.close()
else:
print("Wachtwoord komt niet overeen")
#Nieuw hoofdscherm na inloggen
def welkomtekst():
user = Canvas(root,width = 400, height = 100)
user.create_rectangle(400, 200, 0 , 0, fill = "red")
user.create_text(190, 50,text = ("Welcome " + huidigNaam ), fill = "white", font = ("broadway", 20))
user.place(x = 10, y = 10)
#Leest de opgeslagen gegevens uit het csv bestand
def leesUit(top):
global inlogEmail
inlogEmail = login1.get()
inlogWachtwoord = login2.get()
print(inlogEmail)
print(inlogWachtwoord)
try:
leesKlantUit = open(csvKlantBestand, 'r')
reader = csv.DictReader(leesKlantUit, delimiter=',')
for row in reader:
if row["email"] == inlogEmail and row["wachtwoord"] == inlogWachtwoord:
print("Inloggen is een succes!")
print ("Ingeloggen is een succes")
global huidigEmail, huidigNaam, huidigAchternaam
huidigEmail = row["email"]
huidigNaam = row["naam"]
huidigAchternaam = row["achternaam"]
#cleart het scherm
global buton_1
global buton_2
global today
global w
buton_1.destroy()
buton_2.destroy()
top.destroy()
w.destroy()
global ingelogdOfNiet
ingelogdOfNiet = True
welkomtekst()
else:
print("Inloggen is niet gelukt")
finally:
leesKlantUit.close()
#de knoppen voor het inloggen en registreren aanmaken op het hoofdscherm
buton_1 = Button(root, text = " login ", command = inlogscherm, bg = "red", fg = "white", width = 10 ,height = 1, font = ("broadway", 12))
buton_2 = Button(root, text = "register", command = registreren, bg = "red", fg = "white", width = 10, height = 1, font = ("broadway", 12))
buton_1.place(x = 800, y = 30)
buton_2.place(x = 800, y = 80)
#De layout instellen van het hoofdscherm
w = Canvas(root,width = 400, height = 100)
w.create_rectangle(400, 200, 0 , 0, fill = "red")
w.create_text(190, 50,text = "Welcome to Movie-Net", fill = "white", font = ("broadway", 20))
w.place(x = 10, y = 10)
#de balk met het aan bod vandaag
def today():
today = Canvas(root,width = 1100, height = 50)
today.create_rectangle(1100, 50, 0, 0, fill = "red")
today.create_text(100, 25,text = "Today on Movie-Net", fill = "white", font = ("broadway", 12))
today.place(x = 10, y = 150)
def filmsOphalen():
''' Dit was de code om de buttons met een loop te maken, maar het is ons niet gelukt om hier mee te werken
global opgehaaldefilm, naarbeneden,alleFilmButtons
attributen voor het ophalen van de films
nummersToevoegen =
|
erdc-cm/air-water-vv
|
2d/waveTransformation/Dingemans_wave_shoaling/DingemansWaveShoaling.py
|
Python
|
mit
| 13,687
| 0.016001
|
"""
Dingemans Wave Shoaling
"""
from proteus import Domain, Context
from proteus.mprans import SpatialTools as st
from proteus import WaveTools as wt
from math import *
import numpy as np
opts=Context.Options([
# predefined test cases
("water_level", 0.86, "Height of free surface above seabed"),
# tank
("tank_dim", (58., 3.), "Dimensions of the tank"),
("generation", True, "Generate waves at the left boundary (True/False)"),
("absorption", True, "Absorb waves at the right boundary (True/False)")
|
,
("tank_sponge", (5., 5.), "Length of relaxation zones zones (left, right)"),
("free_slip", True, "Should tank walls have free slip conditions "
"(otherwise, no slip conditions will be applied)."),
# waves
("waves", True, "Generate waves (True/False)"),
("wave_period", 2.02, "Period of the waves"),
("
|
wave_height", 0.02, "Height of the waves"),
("wave_depth", 0.86, "Wave depth"),
("wave_dir", (1.,0.,0.), "Direction of the waves (from left boundary)"),
("wave_wavelength", 5.037, "Direction of the waves (from left boundary)"), #calculated by FFT
("wave_type", 'Fenton', "type of wave"),
("Bcoeff", np.array([0.01402408, 0.00008097, 0.00000013, 0.00000000, 0.00000000,
0.00000000, 0.00000000, 0.00000000]), "Bcoeffs"),
("Ycoeff", np.array([0.01246994, 0.00018698, 0.00000300, 0.00000006, 0.00000000,
0.00000000, 0.00000000, 0.00000000]), "Ycoeffs"),
("fast", True, "switch for fast cosh calculations in WaveTools"),
# mesh refinement
("refinement", False, "Gradual refinement"),
("he", 0.04, "Set characteristic element size"),
("he_max", 10, "Set maximum characteristic element size"),
("he_max_water", 10, "Set maximum characteristic in water phase"),
("refinement_freesurface", 0.1,"Set area of constant refinement around free surface (+/- value)"),
("refinement_caisson", 0.,"Set area of constant refinement (Box) around caisson (+/- value)"),
("refinement_grading", np.sqrt(1.1*4./np.sqrt(3.))/np.sqrt(1.*4./np.sqrt(3)), "Grading of refinement/coarsening (default: 10% volume)"),
# numerical options
("gen_mesh", True, "True: generate new mesh every time. False: do not generate mesh if file exists"),
("use_gmsh", True, "True: use Gmsh. False: use Triangle/Tetgen"),
("movingDomain", False, "True/False"),
("T", 30.0, "Simulation time"),
("dt_init", 0.001, "Initial time step"),
("dt_fixed", None, "Fixed (maximum) time step"),
("timeIntegration", "backwardEuler", "Time integration scheme (backwardEuler/VBDF)"),
("cfl", 0.5 , "Target cfl"),
("nsave", 5, "Number of time steps to save per second"),
("useRANS", 0, "RANS model"),
])
# ----- CONTEXT ------ #
# waves
omega = 1.
if opts.waves is True:
period = opts.wave_period
omega = 2*np.pi/opts.wave_period
height = opts.wave_height
mwl = opts.water_level
depth = opts.wave_depth
direction = opts.wave_dir
waves = wt.MonochromaticWaves(period=period, waveHeight=height, mwl=mwl, depth=depth,
g=np.array([0., -9.81, 0.]), waveDir=direction,
wavelength=opts.wave_wavelength,
waveType=opts.wave_type,
Ycoeff=np.array(opts.Ycoeff),
Bcoeff=np.array(opts.Bcoeff),
Nf=len(opts.Bcoeff),
fast=opts.fast)
wavelength = waves.wavelength
# tank options
waterLevel = opts.water_level
tank_dim = opts.tank_dim
tank_sponge = opts.tank_sponge
# ----- DOMAIN ----- #
domain = Domain.PlanarStraightLineGraphDomain()
# refinement
he = opts.he
smoothing = he*3.
# ----- TANK ------ #
sloped_shore = [[[9.22, 0.],
[9.64, 0.06],
[15.01, 0.06],
[27.04, 0.66],
[31.04, 0.66],
[37.07, 0.06],
[45.39, 0.06],
[45.81, 0.]],]
tank = st.TankWithObstacles2D(domain=domain,
dim=tank_dim,
obstacles=sloped_shore)
# ----- GENERATION / ABSORPTION LAYERS ----- #
tank.setSponge(x_n=tank_sponge[0], x_p=tank_sponge[1])
dragAlpha = 10.*omega/1e-6
if opts.generation:
tank.setGenerationZones(x_n=True, waves=waves, dragAlpha=dragAlpha, smoothing = smoothing)
if opts.absorption:
tank.setAbsorptionZones(x_p=True, dragAlpha = dragAlpha)
# ----- BOUNDARY CONDITIONS ----- #
# Waves
tank.BC['x-'].setUnsteadyTwoPhaseVelocityInlet(waves, smoothing=smoothing, vert_axis=1)
# open top
tank.BC['y+'].setAtmosphere()
if opts.free_slip:
tank.BC['y-'].setFreeSlip()
tank.BC['x+'].setFreeSlip()
if not opts.generation:
tank.BC['x-'].setFreeSlip()
else: # no slip
tank.BC['y-'].setNoSlip()
tank.BC['x+'].setNoSlip()
# sponge
tank.BC['sponge'].setNonMaterial()
for bc in tank.BC_list:
bc.setFixedNodes()
# ----- GAUGES ----- #
gauge_x = [6.26, 10.26, 12.66, 23.26, 27.26, 29.26, 31.26, 33.66, 36.86, 40.26, 44.26]
gauge_y = []
column_gauge_locations = []
for i in range(len(gauge_x)):
if 9.22 < gauge_x[i] < 9.64:
gauge_y.append( (gauge_x[i]-9.22)*0.06/(9.64-9.22) )
elif 9.64 <= gauge_x[i] <= 15.01:
gauge_y.append(0.06)
elif 15.01 < gauge_x[i] < 27.04:
gauge_y.append( 0.06+(gauge_x[i]-15.01)*(0.66-0.06)/(27.04-15.01) )
elif 27.04 <= gauge_x[i] <= 31.04:
gauge_y.append(0.66)
elif 31.04 < gauge_x[i] < 37.07:
gauge_y.append( 0.66+(gauge_x[i]-31.04)*(0.06-0.66)/(37.07-31.04) )
elif 37.07 <= gauge_x[i] <= 45.39:
gauge_y.append(0.06)
elif 45.39 < gauge_x[i] < 45.81:
gauge_y.append( 0.06+(gauge_x[i]-45.39)*(0.-0.06)/(45.81-45.39) )
else:
gauge_y.append(0.)
column_gauge_locations.append(((gauge_x[i], gauge_y[i], 0.), (gauge_x[i], tank_dim[1], 0.)))
tank.attachLineIntegralGauges('vof', gauges=((('vof',),column_gauge_locations),), fileName='column_gauges.csv')
tank.facets = np.array([[[i for i in range(12)]]]+[[[11, 12, 13, 10]]]+[[[8, 14, 15, 9]]])
# ----- ASSEMBLE DOMAIN ----- #
domain.MeshOptions.use_gmsh = opts.use_gmsh
domain.MeshOptions.genMesh = opts.gen_mesh
domain.MeshOptions.he = he
domain.use_gmsh = opts.use_gmsh
st.assembleDomain(domain)
# ----- REFINEMENT OPTIONS ----- #
import py2gmsh
from MeshRefinement import geometry_to_gmsh
mesh = geometry_to_gmsh(domain)
field_list = []
box = 0.1001
box1 = py2gmsh.Fields.Box(mesh=mesh)
box1.VIn = 0.03
box1.VOut = he
box1.XMin = -tank_sponge[0]
box1.XMax = tank_dim[0]+tank_sponge[1]
box1.YMin = waterLevel-box
box1.YMax = waterLevel+box
field_list += [box1]
p0 = py2gmsh.Entity.Point([-tank_sponge[0], waterLevel+box, 0.], mesh=mesh)
p1 = py2gmsh.Entity.Point([tank_dim[0]+tank_sponge[1], waterLevel+box, 0.], mesh=mesh)
p2 = py2gmsh.Entity.Point([-tank_sponge[0], waterLevel-box, 0.], mesh=mesh)
p3 = py2gmsh.Entity.Point([tank_dim[0]+tank_sponge[1], waterLevel-box, 0.], mesh=mesh)
l1 = py2gmsh.Entity.Line([p0, p1], mesh=mesh)
l2 = py2gmsh.Entity.Line([p2, p3], mesh=mesh)
grading = 1.05
bl2 = py2gmsh.Fields.BoundaryLayer(mesh=mesh)
bl2.hwall_n = 0.03
bl2.ratio = grading
bl2.EdgesList = [l1, l2]
field_list += [bl2]
fmin = py2gmsh.Fields.Min(mesh=mesh)
fmin.FieldsList = field_list
mesh.setBackgroundField(fmin)
mesh.Options.Mesh.CharacteristicLengthMax = he
domain.MeshOptions.genMesh = opts.gen_mesh
domain.MeshOptions.use_gmsh = opts.use_gmsh
domain.use_gmsh = opts.use_gmsh
geofile = 'mesh'
mesh.writeGeo(geofile+'.geo')
domain.geofile = geofile
##########################################
# Numerical Options and other parameters #
##########################################
rho_0=998.2
nu_0 =1.004e-6
rho_1=1.205
nu_1 =1.500e-5
sigma_01=0.0
g = [0., -9.81]
from math import *
from proteus import MeshTools, AuxiliaryVariables
import numpy
import proteus.MeshTools
from proteus import Domain
from proteus.Profiling import logEvent
from proteus.default_n import *
from proteus.ctransportCoeffi
|
omererdem/honeything
|
src/cwmp/google3.py
|
Python
|
gpl-3.0
| 964
| 0.001037
|
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicab
|
le law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fix sys.path so it can find our libraries.
This fi
|
le is named google3.py because gpylint specifically ignores it when
complaining about the order of import statements - google3 should always
come before other non-python-standard imports.
"""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import tr.google3 #pylint: disable-msg=C6204,W0611
|
nop33/indico
|
indico/modules/events/features/controllers.py
|
Python
|
gpl-3.0
| 4,902
| 0.003468
|
# This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from flask import flash, request, session
from werkzeug.exceptions import Forbidden
from wtforms.fields import BooleanField
from indico.modules.events.features import logger
from indico.modules.events.features.util import (format_feature_names, get_disallowed_features, get_enabled_features,
get_feature_definition, get_feature_definitions, set_feature_enabled)
from indico.modules.events.features.views import WPFeatures
from indico.modules.events.logs import EventLogKind, EventLogRealm
from indico.modules.events.management.controllers import RHManageEventBase
from indico.util.i18n import _, ngettext
from indico.web.forms.base import FormDefaults, IndicoForm
from indico.web.forms.widgets import SwitchWidget
from indico.web.menu import render_sidemenu
from indico.web.util import jsonify_data
class RHFeaturesBase(RHManageEventBase):
pass
class RHFeatures(RHFeaturesBase):
"""Shows the list of available event features"""
def _make_form(self):
form_class = type(b'FeaturesForm', (IndicoForm,), {})
disallowed = get_disallowed_features(self.event)
for name, feature in sorted(get_feature_definitions().iteritems(), key=lambda x: x[1].friendly_
|
name):
if name in disallowed:
continue
field = BooleanField(feature.friendly_name, widget=SwitchWidget(on_label=_('On'), off_label=_('Off')),
description=feature.description)
set
|
attr(form_class, name, field)
defaults = {name: True for name in get_enabled_features(self.event)}
return form_class(csrf_enabled=False, obj=FormDefaults(defaults))
def _process(self):
form = self._make_form()
widget_attrs = {field.short_name: {'disabled': True} for field in form} if self.event.is_locked else {}
return WPFeatures.render_template('features.html', self.event, form=form, widget_attrs=widget_attrs)
class RHSwitchFeature(RHFeaturesBase):
"""Enables/disables a feature"""
def render_event_menu(self):
return render_sidemenu('event-management-sidemenu', active_item=WPFeatures.sidemenu_option,
event=self.event)
def _process_PUT(self):
prev = get_enabled_features(self.event)
feature = get_feature_definition(request.view_args['feature'])
if feature.name in get_disallowed_features(self.event):
raise Forbidden('Feature not available')
changed = set()
if set_feature_enabled(self.event, feature.name, True):
current = get_enabled_features(self.event)
changed = current - prev
flash(ngettext('Feature enabled: {features}', 'Features enabled: {features}', len(changed))
.format(features=format_feature_names(changed)), 'success')
logger.info("Feature '%s' for event %s enabled by %s", feature.name, self.event, session.user)
self.event.log(EventLogRealm.management, EventLogKind.positive, 'Features',
'Enabled {}'.format(feature.friendly_name), session.user)
return jsonify_data(enabled=True, event_menu=self.render_event_menu(), changed=list(changed))
def _process_DELETE(self):
prev = get_enabled_features(self.event)
feature = get_feature_definition(request.view_args['feature'])
changed = set()
if set_feature_enabled(self.event, feature.name, False):
current = get_enabled_features(self.event)
changed = prev - current
flash(ngettext('Feature disabled: {features}', 'Features disabled: {features}', len(changed))
.format(features=format_feature_names(changed)), 'warning')
logger.info("Feature '%s' for event %s disabled by %s", feature.name, self.event, session.user)
self.event.log(EventLogRealm.management, EventLogKind.negative, 'Features',
'Disabled {}'.format(feature.friendly_name), session.user)
return jsonify_data(enabled=False, event_menu=self.render_event_menu(), changed=list(changed))
|
trivoldus28/pulsarch-verilog
|
tools/local/bas-release/bas,3.9-SunOS-i386/lib/python/lib/python2.4/subprocess.py
|
Python
|
gpl-2.0
| 39,931
| 0.001928
|
# subprocess - Subprocesses with accessible I/O streams
#
# For more information about this module, see PEP 324.
#
# This module should remain compatible with Python 2.2, see PEP 291.
#
# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
#
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/2.4/license for licensing details.
r"""subprocess - Subprocesses with accessible I/O streams
This module allows you to spawn processes, connect to their
input/output/error pipes, and obtain their return codes. This module
intends to replace several other, older modules and functions, like:
os.system
os.spawn*
os.popen*
popen2.*
commands.*
Information about how the subprocess module can be used to replace these
modules and functions can be found below.
Using the subprocess module
===========================
This module defines one class called Popen:
class Popen(args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
Arguments are:
args should be a string, or a sequence of program arguments. The
program to execute is normally the first item in the args sequence or
string, but can be explicitly set by using the executable argument.
On UNIX, with shell=False (default): In this case, the Popen class
uses os.execvp() to execute the child program. args should normally
be a sequence. A string will be treated as a sequence with the string
as the only item (the program to execute).
On UNIX, with shell=True: If args is a string, it specifies the
command string to execute through the shell. If args is a sequence,
the first item specifies the command string, and any additional items
will be treated as additional shell arguments.
On Windows: the Popen class uses CreateProcess() to execute the child
program, which operates on strings. If args is a sequence, it will be
converted to a string using the list2cmdline method. Please note that
not all MS Windows applications interpret the command line the same
way: The list2cmdline is designed for applications using the same
rules as the MS C runtime.
bufsize, if given, has the same meaning as the corresponding argument
to the built-in open() function: 0 means unbuffered, 1 means line
buffered, any other positive value means use a buffer of
(approximately) that size. A negative bufsize means to use the system
default, which usually means fully buffered. The default value for
bufsize is 0 (unbuffered).
stdin, stdout and stderr specify the executed programs' standard
input, standard output and standard error file handles, respectively.
Valid values are PIPE, an existing file descriptor (a positive
integer), an existing file object, and None. PIPE indicates that a
new pipe to the child should be created. With None, no redirection
will occur; the child's file handles will be inherited from the
parent. Additionally, stderr can be STDOUT, which indicates that the
stderr data from the applications should be captured into the same
file handle as for stdout.
If preexec_fn is set to a callable object, this object will be called
in the child process just before the child is executed.
If close_fds is true, all file descriptors except 0, 1 and 2 will be
closed before the child process is executed.
if shell is true, the specified command will be executed through the
shell.
If cwd is not None, the current directory will be changed to cwd
before the child is executed.
If env is not None, it defines the environment variables for the new
process.
If universal_newlines is true, the file objects stdout and stderr are
opened as a text files, but lines may be terminated by any of '\n',
the Unix end-of-line convention, '\r', the Macintosh convention or
'\r\n', the Windows convention. All of these external representations
are seen as '\n' by the Python program. Note: This feature is only
available if Python is built with universal newline support (the
default). Also, the newlines attribute of the file objects stdout,
stdin and stderr are not updated by the communicate() method.
The startupinfo and creationflags, if given, will be passed to the
underlying CreateProcess() function. They can specify things such as
appearance of the main window and priority for the new process.
(Windows only)
This module also defines two shortcut functions:
call(*args, **kwargs):
Run command with arguments. Wait for command to complete, then
return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
retcode = call(["ls", "-l"])
Exceptions
----------
Exceptions raised in the child process, before the new program has
started to execute, will be re-raised in the parent. Additionally,
the exception object will have one extra attribute called
'child_traceback', which is a string containing traceback information
from the childs point of view.
The most common exception raised is OSError. This occurs, for
example, when trying to execute a non-existent file. Applications
should prepare for OSErrors.
A ValueError will be raised if Popen is called with invalid arguments.
Security
--------
Unlike some other popen functions, this implementation will never call
/bin/sh implicitly. This means that all characters, including shell
metacharacters, can safely be passed to child processes.
Popen objects
=============
Instances of the Popen class have the following methods:
poll()
Check if child process has terminated. Returns returncode
attribute.
wait()
Wait for child process to terminate. Returns returncode attribute.
communicate(input=None)
Interact with process: Send data to stdin. Read data from stdout
and stderr, until end-of-file is reached. Wait for process to
terminate. The optional stdin argument should be a string to be
sent to the child process, or None, if no data should be sent to
the child.
communicate() returns a tuple (stdout, stderr).
|
Note: The data read is buffered in memory, so do not use this
method if the data siz
|
e is large or unlimited.
The following attributes are also available:
stdin
If the stdin argument is PIPE, this attribute is a file object
that provides input to the child process. Otherwise, it is None.
stdout
If the stdout argument is PIPE, this attribute is a file object
that provides output from the child process. Otherwise, it is
None.
stderr
If the stderr argument is PIPE, this attribute is file object that
provides error output from the child process. Otherwise, it is
None.
pid
The process ID of the child process.
returncode
The child return code. A None value indicates that the process
hasn't terminated yet. A negative value -N indicates that the
child was terminated by signal N (UNIX only).
Replacing older functions with the subprocess module
====================================================
In this section, "a ==> b" means that b can be used as a replacement
for a.
Note: All functions in this section fail (more or less) silently if
the executed program cannot be found; this module raises an OSError
exception.
In the following examples, we assume that the subprocess module is
imported with "from subprocess import *".
Replacing /bin/sh shell backquote
---------------------------------
output=`mycmd myarg`
==>
output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0]
Replacing shell pipe line
-------------------------
output=`dmesg | grep hda`
==>
p1 = Popen(["dmesg"], stdout=PIPE)
p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
output = p2.communicate()[0]
Replacing os.system()
---------------------
sts = os.system("mycmd" + " myarg")
==>
p = Popen("mycmd" + " myarg", shell=True)
sts = os.waitpid(p.pid, 0)
Note:
* Calling the program through the shell is usually not required.
* It's easier to look at the returncode attribute than the
exitstatus.
A more real-world example would look like this:
try:
retcode = call("mycmd" + " myarg", shell=True
|
bjodah/sym
|
sym/tests/test_Dummy.py
|
Python
|
bsd-2-clause
| 339
| 0
|
# -*- coding: utf-8 -*-
from __future_
|
_ import (absolute_import, division, print_function)
import pytest
from .. import Backend
from . import AVAILABLE_BACKENDS
@pytest.mark.parametrize('key', AVAILABLE_BACKENDS)
def test_Dummy(key):
be = Backend(key)
d0 = be.Dummy()
d1 = be.Dummy()
a
|
ssert d0 == d0
assert d0 != d1
|
Anmol-Singh-Jaggi/gDrive-auto-sync
|
gDrive-auto-sync/api_boilerplate.py
|
Python
|
mit
| 1,850
| 0
|
"""
This module is responsible for doing all the authentication.
Adapted from the Google API Documentation.
"""
from __future__ import print_function
import os
import httplib2
import apiclient
import oauth2client
try:
import argparse
flags = argparse.ArgumentParser(
parents=[oauth2client.tools.argparser]).parse_args()
except ImportError:
flags = None
SCOPES = 'https://www.googleapis.com/auth/drive'
CLIENT_SECRET_FILE = 'client_secret.json'
# Enter your project name here!!
APPLICATION_NAME = 'API Project'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.gdrive-credentials-cache')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'
|
gdrive-credentials.json')
store = oauth2client.file.Storage(cr
|
edential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = oauth2client.client.flow_from_clientsecrets(
CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = oauth2client.tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = oauth2client.tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
file_service = apiclient.discovery.build('drive', 'v3', http=http).files()
|
Azure/azure-sdk-for-python
|
sdk/testbase/azure-mgmt-testbase/azure/mgmt/testbase/aio/operations/_favorite_processes_operations.py
|
Python
|
mit
| 17,645
| 0.005101
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class FavoriteProcessesOperations:
"""FavoriteProcessesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~test_base.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
test_base_account_name: str,
package_name: str,
**kwargs: Any
) -> AsyncIterable["_models.FavoriteProcessListResult"]:
"""Lists the favorite processes for a specific package.
:param resource_group_name: The name of the resource group that contains the resource.
:type resource_group_name: str
:param test_base_account_name: The resource name of the Test Base Account.
:type test_base_account_name: str
:param package_name: The resource name of the Test Base Package.
:type package_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either FavoriteProcessListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~test_base.models.FavoriteProcessListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FavoriteProcessListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-16-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'testBaseAccountName': self._serialize.url("test_base_account_name", test_base_account_name, 'str'),
'packageName': self._serialize.url("package_name", package_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('FavoriteProcessListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TestBase/testBaseAccounts/{testBaseAccountName}/packages/{packageName}/favoriteProcesses'} # type: ignore
async def create(
self,
resource_group_name: str,
test_base_account_name: str,
package_name: str,
favorite_process_resource_name: str,
parameters: "_models.FavoriteProcessResource",
**kwargs: Any
) -> "_models.FavoriteProcessResource":
"""Create or replace a favorite process for a Test Base Package.
:param resource_group_name: The name of the resource group that contains the resource.
:type resource_group_name: str
:param test_base_account_name: The resource name of the Test Base Account.
:type test_base_account_name: str
:param package_name: The resource name of the Test Base Package.
:type package_name: str
:param favorite_process_resource_name: The resource name of a favorite process in a package. If
the process name contains characters that are not allowed in Azure Resource Name, we use
'actualProcessName' in request body to submit the name.
:type favorite_process_resource_name: str
:param parameters: Parameters supplied to create a favorite process in a package.
:type parameters: ~test_base.models.FavoriteProcessResource
:keyword callable cls: A custom type or f
|
unction that will be passed the direct response
:return: FavoriteProcessResource, or the result of cls(response)
:rtype: ~test_base.models.FavoriteP
|
rocessResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FavoriteProcessResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-16-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
|
csparkresearch/eyes-online
|
app/static/scripts/Maths/numpy2.py
|
Python
|
gpl-3.0
| 203
| 0.029557
|
#Example numpy2.py
from numpy import *
a = arange(1.0, 2.0, 0.1) #
|
start, stop & step
pr
|
int a
b = linspace(1,2,11)
print b
c = ones(5)
print c
d = zeros(5)
print d
e = random.rand(5)
print e
|
fuzzy/mvm
|
mvm/term.py
|
Python
|
bsd-2-clause
| 2,114
| 0.009461
|
# Author: Mike 'Fuzzy' Partin
# Copyright: (c) 2016-2018
# Email: fuzzy@fumanchu.org
# License: See LICENSE.md for details
# Stdlib imports
import math
import datetime
def red(txt):
return '\033[1;31m%s\033[0m' % txt
def green(txt):
return '\033[1;32m%s\033[0m' % txt
def yellow(txt):
return '\033[1;33m%s\033[0m' % txt
def blue(txt):
return '\033[1;34m%s\033[0m' % txt
def purple(txt):
return '\033[1;35m%s\033[0m' % txt
def cyan(tx
|
t):
return '\033[1;36m%s\033[0m' % txt
def white(txt):
return '\033[1;37m%s\033
|
[0m' % txt
# Console messages
# TODO: the framework for this, should take cues from the config file
# and if no console output is desired, do the right thing, as
# well as logging to file. But that will come later.
def debug(txt):
print('%s: %s' % (cyan('DEBUG'), txt))
def info(txt):
print('%s: %s' % (green('INFO'), txt))
def warn(txt):
print('%s: %s' % (yellow('WARN'), txt))
def error(txt):
print('%s: %s' % (red('ERROR'), txt))
def fatal(txt):
error(txt)
def OutputWord(word):
return '%s%s' % (cyan(word[0].upper()), white(word[1:].lower()))
def humanTime(amount):
secs = float(datetime.timedelta(seconds=amount).total_seconds())
units = [("d", 86400), ("h", 3600), ("m", 60), ("s", 1)]
parts = []
for unit, mul in units:
if secs / mul >= 1 or mul == 1:
if mul > 1:
n = int(math.floor(secs / mul))
secs -= n * mul
else:
n = secs if secs != int(secs) else int(secs)
parts.append("%s%s" % (n, unit)) #, "" if n == 1 else "s"))
return "".join(parts)
# This function turns a size (given in bytes) into
# a human readable string
def humanSize(size):
if size <= 1024:
return '%dB' % size
else:
smap = {1: 'KB', 2: 'MB', 3: 'GB', 4: 'TB', 5: 'PB'}
mod = 1
while mod <= len(smap.keys()):
if size >= (1024 ** mod) and size < (1024 ** (mod+1)):
return '%.02f%s' % ((float(size) / float(1024.00 ** float(mod))), smap[mod])
mod += 1
|
TheAlgorithms/Python
|
strings/word_patterns.py
|
Python
|
mit
| 1,293
| 0.000773
|
def get_word_pattern(word: str) -> str:
"""
>>> get_word_pattern("pattern")
'0.1.2.2.3.4.5'
>>> get_word_pattern("word pattern")
'0.1.2.3.4.5.6.7.7.8.2.9'
>>> get_word_pattern("get word pattern")
'0.1.2.3.4.5.6.7.3.8.9.2.2.1.6.10'
"""
word = word.upper()
next_num = 0
letter_nums = {}
word_pattern = []
for letter in word:
if
|
letter not in letter_nums:
letter_nums[letter] = str(next_num)
next_num += 1
word_pattern.append(letter_nums[letter])
return ".".join(word_pattern)
if __name__ == "__main__":
import pprint
import time
start_time = time.time()
with open("dictionary.txt") as in_file:
wordList = in_file.read().splitlines()
all_patterns: dict = {}
for word in wordList:
pattern = get_word_pattern(w
|
ord)
if pattern in all_patterns:
all_patterns[pattern].append(word)
else:
all_patterns[pattern] = [word]
with open("word_patterns.txt", "w") as out_file:
out_file.write(pprint.pformat(all_patterns))
totalTime = round(time.time() - start_time, 2)
print(f"Done! {len(all_patterns):,} word patterns found in {totalTime} seconds.")
# Done! 9,581 word patterns found in 0.58 seconds.
|
keras-team/keras
|
keras/distribute/dataset_creator_model_fit_test.py
|
Python
|
apache-2.0
| 10,240
| 0.004297
|
# Lint as: python3
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `DatasetCreator` with `Model.fit` across usages and strategies."""
import tensorflow.compat.v2 as tf
import numpy as np
from tensorflow.python.framework import test_util as tf_test_utils # pylint: disable=g-direct-tensorflow-import
from keras.testing_infra import test_utils
from keras.distribute import dataset_creator_model_fit_test_base as test_base
from keras.distribute import strategy_combinations
from keras.utils import dataset_creator
# TODO(rchao): Investigate why there cannot be single worker and multi worker
# PS strategies running in the same shard.
@test_utils.run_v2_only
@tf.__internal__.distribute.combinations.generate(
tf.__internal__.test.combinations.combine(
strategy=strategy_combinations.all_strategies +
strategy_combinations.multi_worker_mirrored_strategies +
strategy_combinations.parameter_server_strategies_multi_worker,
mode="eager"))
class DatasetCreatorModelFitTest(test_base.DatasetCreatorModelFitTestBase):
def setUp(self):
super().setUp()
if tf_test_utils.is_xla_enabled():
self.skipTest("model.optimizer.iterations values is not as expected "
"with XLA: b/184384487")
def testModelFit(self, strategy):
model = self._model_fit(strategy)
self.assertEqual(model.optimizer.iterations, 100)
def testModelFitwithStepsPerEpochNegativeOne(self, strategy):
def dataset_fn(input_context):
del input_context
x = tf.random.uniform((10, 10))
y = tf.random.uniform((10,))
return tf.data.Dataset.from_tensor_slices(
(x, y)).shuffle(10).batch(2)
if strategy._should_use_with_coordinator:
with self.assertRaises((tf.errors.OutOfRangeError,
tf.errors.CancelledError)):
self._model_fit(
strategy,
steps_per_epoch=-1,
x=dataset_creator.DatasetCreator(dataset_fn),
validation_data=dataset_creator.DatasetCreator(dataset_fn),
)
else:
self._model_fit(
strategy,
steps_per_epoch=-1,
x=dataset_creator.DatasetCreator(dataset_fn),
validation_data=dataset_creator.DatasetCreator(dataset_fn),
)
def testModelFitWithNumpyData(self, strategy):
x = np.random.rand(100, 10)
y = np.random.rand(100, 1)
model = self._model_fit(
strategy,
x=x,
y=y,
batch_size=1,
validation_data=(x, y),
)
self.assertEqual(model.optimizer.iterations, 100)
def testModelFitWithTensorData(self, strategy):
x = tf.random.uniform((100, 10))
y = tf.random.uniform((100,))
model = self._model_fit(
strategy,
x=x,
y=y,
batch_size=1,
validation_data=(x, y),
)
self.assertEqual(model.optimizer.iterations, 100)
def testModelFitWithLookupLayer(self, strategy):
model = self._model_fit(strategy, use_lookup_layer=True)
self.assertEqual(model.optimizer.iterations, 100)
def testModelFitWithNormalizationLayer(self, strategy):
model = self._model_fit(strategy, with_normalization_layer=True)
self.assertEqual(model.optimizer.iterations, 100)
def testModelFitWithStepsPerExecution(self, strategy):
model = self._model_fit(strategy, steps_per_execution=10)
self.assertEqual(model.optimizer.iterations, 100)
def testModelFitWithNoStepsPerEpoch(self, strategy):
with self.assertRaisesRegex(
ValueError,
"When using a `tf.keras.utils.experimental.DatasetCreator`, "
"`steps_per_epoch`, `validation_steps` or `steps` argument must be "
"provided in `Model.fit`, `Model.evaluate`, or `Model.predict`."):
self._model_fit(strategy, steps_per_epoch=None)
def testModelEvaluate(self, strategy):
self._model_evaluate(strategy)
self.assertGreaterEqual(self._accuracy_metric.result(), 0.0)
def testModelEvaluateWithNumpyData(self, strategy):
x = np.random.rand(100, 10)
y = np.random.rand(100, 1)
self._model_evaluate(
strategy,
x=x,
y=y,
batch_size=1,
)
self.assertGreaterEqual(self._accuracy_metric.result(), 0.0)
def testModelEvaluateWithTensorData(self, strategy):
x = tf.random.uniform((100, 10))
y = tf.random.uniform((100,))
self._model_evaluate(
strategy,
x=x,
y=y,
batch_size=1,
)
self.assertGreaterEqual(self._accuracy_metric.result(), 0.0)
def testModelEvaluateWithNormalizationLayer(self, strategy):
self._model_evaluate(strategy, with_normalization_layer=True)
self.assertGreaterEqual(self._accuracy_metric.result(), 0.0)
def testModelEvaluateWithStepsPerExecution(self, strategy):
self._model_evaluate(strategy, steps_per_execution=10)
self.assertGreaterEqual(self._accuracy_metric.result(), 0.0)
def testModelEvaluateWithNoStepsPerEpoch(self, strategy):
with self.assertRaisesRegex(
ValueError,
"When using a `tf.keras.utils.experimental.DatasetCreator`, "
"`steps_per_epoch`, `validation_steps` or `steps` argument must be "
"provided in `Model.fit`, `Model.evaluate`, or `Model.predict`."):
self._model_evaluate(strategy, steps=None)
def testModelPredict(self, strategy):
_, predictions = self._model_predict(strategy, steps=3)
# Check the first (0th index), fourth (3rd index) and the last predictions
# because the first, fourth and the last input are the same in
# `model.predict` so there predictions should match.
self.assertTrue(all(predictions[0] == predictions[i] for i in [0, 3, 5]))
self.assertFalse(
all(predictions[0] == predictions[i] for i in [0, 1, 2, 4]))
def testModelPredictWithNumpyData(self, strategy):
x = np.array([[1.], [2.], [3.], [1.], [5.], [1.]])
_, predictions = self._model_predict(strategy, test_data=x)
self.assertTrue(all(predictions[0] == predictions[i] for i in [0, 3, 5]))
self.assertFalse(
all(predictions[0] == predictions[i] for i in [0, 1, 2, 4]))
def testModelPredictWithTensorData(self, strategy):
x = tf.constant([[1.], [2.], [3.], [1.], [5.], [1.]])
_, predictions = self._model_predict(strategy, test_data=x)
self.assertTrue(all(predictions[0] == predictions[i] for i in [0, 3, 5]))
self.assertFalse(
all(predictions[0] == predictions[i] for i in [0, 1, 2, 4]))
def testModelPredictWithNormalizationLayer(self, strategy):
_, predictions = self._model_predict(
strategy, with_normalization_layer=True, steps=3)
# Check the first (0th index), fourth (3rd index) and the last predictions
# because the first, fourth and the last input is the same
|
in
# `model.predict` so there predictions should match.
self.assertTrue(all(predictions[0] == predictions[i] for i in [0, 3, 5
|
]))
self.assertFalse(
all(predictions[0] == predictions[i] for i in [0, 1, 2, 4]))
def testModelPredictWithStepsPerExecution(self, strategy):
_, predictions = self._model_predict(
strategy, steps_per_execution=3, steps=3)
# Check the first (0th index), fourth (3rd index) and the last predictions
# because the first, fourth and the last input is the same in
# `model.predict` so there predictions should match.
self.assertTrue(all(predictions[0] == predictions[i] for i in [0, 3, 5]))
self.assertFalse(
all(predictions[0] == predictions[i] for i in [0, 1, 2, 4]))
def testModelFitAndPredict(self, strategy)
|
AtScaleInc/Impala
|
tests/shell/test_shell_interactive.py
|
Python
|
apache-2.0
| 3,707
| 0.006474
|
#!/usr/bin/env python
# encoding=utf-8
# Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pytest
import shlex
import signal
from impala_shell_results import get_shell_cmd_result, cancellation_helper
from subprocess import Popen, PIPE
from time import sleep
SHELL_CMD = "%s/bin/impala-shell.sh" % os.environ['IMPALA_HOME']
class TestImpalaShellInteractive(object):
"""Test the impala shell interactively"""
# TODO: Add cancellation tests
@pytest.mark.execute_serially
def test_escaped_quotes(self):
"""Test escaping quotes"""
# test escaped quotes outside of quotes
result = run_impala_shell_interactive("select \\'bc';")
assert "could not match input" in result.stderr
result = run_impala_shell_interactive("select \\\"bc\";")
assert "could not match input" in result.stderr
# test escaped quotes within quotes
result = run_impala_shell_interactive("select 'ab\\'c';")
assert "Fetched 1 row(s)" in result.stderr
result = run_impala_shell_interactive("select \"ab\\\"c\";")
assert "Fetched 1 row(s)" in result.stderr
@pytest.mark.execute_serially
def test_cancellation(self):
command = "select sleep(10000);"
p = Popen(shlex.split(SHELL_CMD), shell=True,
stdout=PIPE, stdin=PIPE, stderr=PIPE)
p.stdin.write(command + "\n")
p.stdin.flush()
sleep(1)
# iterate through all processes with psutil
shell_pid = cancellation_helper()
sleep(2)
os.kill(shell_pid, signal.SIGINT)
result = get_shell_cmd_result(p)
assert "Cancelling Query" in result.stderr
@pytest.mark.execute_serially
def test_unicode_input(self):
"Test queries containing non-ascii input"
# test a unicode query spanning multiple lines
unicode_text = u'\ufffd'
args = "select '%s'\n;" % unicode_text.encode('utf-8')
result = run_impala_shell_interactive(args)
assert "Fetched 1 row(s)" in result.stderr
@pytest.mark.execute_serially
def test_welcome_string(self):
"""Test that the shell's welcome message is only printed once
when the shell is started. Ensure it is not reprinted on errors.
Regression test for IMPALA-1153
"""
result = run_impala_shell_interactive('asdf;')
assert result.stdout.count("Welcome to the Impala shell") == 1
result = run_impala_shell_interactive('select * from non_existent_table;')
assert result.stdout.count("Welcome to the Impala shell") == 1
@pytest.mark.execute_serially
def test_bash_cmd_timing(self):
"""Test existence of time output in bash commands run from shell"""
args = "! ls;"
result = run_impala_shell_interactive(args)
assert "Executed in" in result.stderr
def run_impala_shell_int
|
eractive(command, shell_args=''):
"""Runs a command in the Impala shell interactively."""
cmd = "%s %s" % (SHELL_CMD, shell_args)
# workaround to make Popen environment 'utf-8' compati
|
ble
# since piping defaults to ascii
my_env = os.environ
my_env['PYTHONIOENCODING'] = 'utf-8'
p = Popen(shlex.split(cmd), shell=True, stdout=PIPE,
stdin=PIPE, stderr=PIPE, env=my_env)
p.stdin.write(command + "\n")
p.stdin.flush()
return get_shell_cmd_result(p)
|
mishka28/NYU-Python
|
advance_python_class_3/Homework1/temptest.py
|
Python
|
mit
| 307
| 0.026059
|
#!/usr/bin/env python3
# sequence = []
defence = 0.4
for x in range(1):
print(x)
for x in range(10):
if x == 0:
defence = defence
else:
defence = defence + (1 - defence) * (1 / 2)
print(defence)
# print(defence)
# print(sequence)
# print(sum(sequence))
# x = input()
#
|
print(x)
|
|
thorwhalen/ut
|
slurp/simple_request.py
|
Python
|
mit
| 787
| 0.007624
|
__author__ = 'thorwhalen'
import requests
from serialize.khan_logger import KhanLogger
import logging
class SimpleRequest(object):
def __init__(self, log_file_name=None, log_level=lo
|
gging.INFO):
full_log_path_and_name = KhanLogger.default_log_path_with_unique_name(log_file_name)
self.logger = KhanLogger(file_path_and_name=full_log_path_and_name, level=log_level)
def slurp(self, url):
r = requests.get(url, timeout=30.0)
if not r.ok:
self.logger.log(level=logging.WARN, simple_request="HTTP Error: {} for u
|
rl {}".format(r.status_code, url))
else:
self.logger.log(level=logging.INFO, simple_request="Slurped url {}".format(url))
return r.text
if __name__ == '__main__':
sr = SimpleRequest()
|
mlperf/training_results_v0.7
|
Inspur/benchmarks/dlrm/implementations/implementation_closed/dlrm/nn/__init__.py
|
Python
|
apache-2.0
| 70
| 0
|
from .modules.buckle_embedding import *
from .modules.gather impo
|
rt *
|
|
venicegeo/eventkit-cloud
|
eventkit_cloud/auth/migrations/0003_auto_20210519_2024.py
|
Python
|
bsd-3-clause
| 399
| 0
|
# Generated by Django 3.1.2 on 2021-05-19 20:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('eventkit_cloud_aut
|
h', '0002_auto_20191127_1425'),
]
operations = [
migrations.
|
AlterField(
model_name='oauth',
name='user_info',
field=models.JSONField(default=dict),
),
]
|
kilon/sverchok
|
old_nodes/__init__.py
|
Python
|
gpl-3.0
| 4,863
| 0.008842
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
old_bl_idnames = {
'CentersPolsNode' : "centers",
# 'BakeryNode' : "bakery",
'CircleNode' : "circle",
'ListItemNode' : "list_item",
'GenRangeNode' : "range",
'GenSeriesNode' : "series",
# 'Test1Node' : "test",
# 'Test2Node' : "test",
# 'ToolsNode' : "tools",
'SvReRouteNode': "reroute",
'VoronoiNode': "voronoi",
'ViewerNode': "viewer",
'EvalKnievalNode': "eval_knieval",
'FormulaNode': 'formula',
}
# we should add some functions to load things there
import importlib
import inspect
import traceback
import bpy
from sverchok.node_tree import SverchCustomTreeNode
imported_mods = {}
def is_old(node_info):
'''
Check if node or node.bl_idname is among
the old nodes
'''
if isinstance(node_info, str):
# assumes bl_idname
return node_info in old_bl_idnames
elif isinstance(node_info, bpy.types.Node):
return node_info.bl_idname in old_bl_idnames
else:
return False
def scan_for_old(ng):
nodes = [n for n in ng.nodes if n.bl_idname in old_bl_idnames]
for node in nodes:
mark_old(node)
def mark_old(node):
if node.parent and node.parent.label == "Deprecated node!":
return
ng = node.id_data
frame = ng.nodes.new("NodeFrame")
if node.parent:
frame.parent = node.parent
node.parent = frame
frame.label = "Deprecated node!"
frame.use_custom_color = True
frame.color = (.8, 0, 0)
frame.shrink = True
def reload_old(ng=False):
if ng:
bl_idnames = {n.bl_idname for n in ng.nodes if n.bl_idname in old_bl_idnames}
for bl_id in bl_idnames:
mod = register_old(bl_id)
if mod:
importlib.reload(mod)
else:
print("Couldn't reload {}".format(bl_id))
else:
for ng in bpy.data.node_groups:
reload_old(ng)
#if ng.bl_idname in { 'SverchCustomTreeType', 'SverchGroupTreeType'}:
# reload_old(ng)
def load_old(ng):
"""
This approach didn't work, bl_idname of undefined node isn't as I expected
bl_idnames = {n.bl_idname for n in ng.nodes}
old_bl_ids = bl_idnames.intersection(old_bl_idnames)
if old_bl_ids:
"""
not_reged_nodes = list(n for n in ng.nodes if not n.is_registered_node_type())
if not_reged_nodes:
for bl_id in old_bl_idnames:
register_old(bl_id)
nodes = [n for n in ng.nodes if n.bl_idname == bl_id]
if nodes:
for node in nodes:
mark_old(node)
not_reged_nodes = list(n for n in ng.nodes if not n.is_registered_node_type())
node_count = len(not_reged_nodes)
print("Loaded {}. {} nodes are left unregisted.".format(bl_id, node_count))
if node_count == 0:
return
else: # didn't help remove
unregister_old(bl_id)
def register_old(bl_id):
if bl_id in old_bl_idnames:
|
mod = importlib.import_module(".{}".format(old_bl_idnames[bl_id]), __name__)
res = inspect.getmembers(mod)
for name, cls in res:
if inspect.isclass(cls):
if issubclass(cls, bpy.types.Node) and cls.bl_idname == bl_id:
if bl_id no
|
t in imported_mods:
try:
mod.register()
except:
traceback.print_exc()
imported_mods[bl_id] = mod
return mod
print("Cannot find {} among old nodes".format(bl_id))
return None
def unregister_old(bl_id):
global imported_mods
mod = imported_mods.get(bl_id)
if mod:
#print("Unloaded old node type {}".format(bl_id))
mod.unregister()
del imported_mods[bl_id]
def unregister():
global imported_mods
print(imported_mods)
for mod in imported_mods.values():
mod.unregister()
imported_mods = {}
|
sggottlieb/cmfieldguide
|
cmfieldguide/cmsdetector/signatures/expressionengine.py
|
Python
|
unlicense
| 1,264
| 0.011867
|
"""
This signature containts test to see if the site is running on ExpressionEngine.
"""
__author__ = "Seth Gottlieb"
__copyright__ = "CM Fieldguide"
__credits__ = ["Seth Gottlieb",]
__license__ = "Unlicense"
__version__ = "0.1"
__maintainer__ = "Seth Gottlieb"
__email__ = "sgottli
|
eb@alumni.duke.edu"
__status__ = "Experimental"
from cmfieldguide.cmsdetector.signatures import BaseSignature
class Signature(BaseSignature):
NAME = 'ExpressionEngi
|
ne'
WEBSITE = 'http://expressionengine.com/'
KNOWN_POSITIVE = 'http://expressionengine.com/'
TECHNOLOGY = 'PHP'
def test_has_ee_login(self, site):
"""
By default, Expression Engine ships with a login page at /admin.php
"""
if site.page_cache[site.url_stem + '/admin.php'].contains_pattern('http://expressionengine.com'):
return 1
else:
return 0
def test_has_css_loader_script(self, site):
"""
ExpressionEngine loads CSS files with a query string off the root of the site like
?css=something.css
"""
if site.home_page.has_matching_tag('link', {'rel':'stylesheet','href': '/\?css=\w+[\.|/]'}):
return 1
else:
return 0
|
dtrodrigues/nifi-minifi-cpp
|
python/getFile.py
|
Python
|
apache-2.0
| 3,036
| 0.000988
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information re
|
garding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from minifi import * # noqa F403
from argparse import ArgumentParser
from ctypes import cdll # noqa F401
import ctypes # noqa F401
import sys
from _cffi_backend import callback # noqa F401
class GetFilePrinterProcessor(PyProcessor): # noqa F405
def __init__(self, minifi, flow):
PyProcessor.__init__(self, minifi, flow) # noqa F405
self._callback = None
def _onTriggerCallback(self):
def onTrigger(session, context):
flow_file = self.get(session, context)
if flow_file:
if flow_file.add_attribute("python_test", "value"):
print("Add attribute succeeded")
if not flow_file.add_attribute("python_test", "value2"):
print("Cannot add the same attribute twice!")
print("original file name: " + flow_file.get_attribute("filename"))
target_relationship = "success"
if not self.transfer(session, flow_file, target_relationship):
print("transfer to relationship " + target_relationship + " failed")
return CALLBACK(onTrigger) # noqa F405
parser = ArgumentParser()
parser.add_argument("-s", "--dll", dest="dll_file",
help="DLL filename", metavar="FILE")
parser.add_argument("-n", "--nifi", dest="nifi_instance",
help="NiFi Instance")
parser.add_argument("-i", "--input", dest="input_port",
help="NiFi Input Port")
parser.add_argument("-d", "--dir", dest="dir",
help="GetFile Dir to monitor", metavar="FILE")
args = parser.parse_args()
""" dll_file is the path to the shared object """
minifi = MiNiFi(dll_file=args.dll_file, url=args.nifi_instance.encode('utf-8'), port=args.input_port.encode('utf-8')) # noqa F405
minifi.set_property("nifi.remote.input.http.enabled", "true")
processor = minifi.add_processor(GetFile()) # noqa F405
processor.set_property("Input Directory", args.dir)
processor.set_property("Keep Source File", "true")
current_module = sys.modules[__name__]
processor = minifi.create_python_processor(current_module, "GetFilePrinterProcessor")
ff = minifi.get_next_flowfile()
if ff:
minifi.transmit_flowfile(ff)
|
DonBeo/statsmodels
|
statsmodels/examples/ex_sandwich3.py
|
Python
|
bsd-3-clause
| 1,967
| 0.008134
|
# -*- coding: utf-8 -*-
"""Cluster Robust Standard Errors with Two Clusters
Created on Sat Dec 17 08:39:16 2011
Author: Josef Perktold
"""
from statsmodels.compat.python import urlretrieve
import numpy as np
from numpy.testing import assert_almost_equal
import statsmodels.api as sm
import statsmodels.stats.sandwich_covariance as sw
#requires Petersen's test_data
#http://www.kellogg.northwestern.edu/faculty/petersen/htm/papers/se/test_data.txt
try:
pet = np.genfromtxt("test_data.txt")
print('using local file')
except IOError:
urlretrieve('http://www.kellogg.northwestern.edu/fa
|
culty/petersen/htm/papers/se/test_data.txt',
'test_data.txt')
print('downloading file')
pet = np.genfromtxt("test_data.txt")
endog = pet[:,-1]
group = pet[:,0].astype(int)
time = pet[:,1].astype(int)
exog = sm.add_constant(pet[:,2])
res =
|
sm.OLS(endog, exog).fit()
cov01, covg, covt = sw.cov_cluster_2groups(res, group, group2=time)
#Reference number from Petersen
#http://www.kellogg.northwestern.edu/faculty/petersen/htm/papers/se/test_data.htm
bse_petw = [0.0284, 0.0284]
bse_pet0 = [0.0670, 0.0506]
bse_pet1 = [0.0234, 0.0334] #year
bse_pet01 = [0.0651, 0.0536] #firm and year
bse_0 = sw.se_cov(covg)
bse_1 = sw.se_cov(covt)
bse_01 = sw.se_cov(cov01)
print('OLS ', res.bse)
print('het HC0 ', res.HC0_se, bse_petw - res.HC0_se)
print('het firm ', bse_0, bse_0 - bse_pet0)
print('het year ', bse_1, bse_1 - bse_pet1)
print('het firm & year', bse_01, bse_01 - bse_pet01)
print('relative difference standard error het firm & year to OLS')
print(' ', bse_01 / res.bse)
#From the last line we see that the cluster and year robust standard errors
#are approximately twice those of OLS
assert_almost_equal(bse_petw, res.HC0_se, decimal=4)
assert_almost_equal(bse_0, bse_pet0, decimal=4)
assert_almost_equal(bse_1, bse_pet1, decimal=4)
assert_almost_equal(bse_01, bse_pet01, decimal=4)
|
Chaffelson/whoville
|
whoville/cloudbreak/models/recipe_request.py
|
Python
|
apache-2.0
| 7,284
| 0.002059
|
# coding: utf-8
"""
Cloudbreak API
Cloudbreak is a powerful left surf that breaks over a coral reef, a mile off southwest the island of Tavarua, Fiji. Cloudbreak is a cloud agnostic Hadoop as a Service API. Abstracts the provisioning and ease management and monitoring of on-demand clusters. SequenceIQ's Cloudbreak is a RESTful application development platform with the goal of helping developers to build solutions for deploying Hadoop YARN clusters in different environments. Once it is deployed in your favourite servlet container it exposes a REST API allowing to span up Hadoop clusters of arbitary sizes and cloud providers. Provisioning Hadoop has never been easier. Cloudbreak is built on the foundation of cloud providers API (Amazon AWS, Microsoft Azure, Google Cloud Platform, Openstack), Apache Ambari, Docker lightweight containers, Swarm and Consul. For further product documentation follow the link: <a href=\"http://hortonworks.com/apache/cloudbreak/\">http://hortonworks.com/apache/cloudbreak/</a>
OpenAPI spec version: 2.9.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class RecipeRequest(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'description': 'str',
'recipe_type': 'str',
'content': 'str'
}
attribute_map = {
'name': 'name',
'description': 'description',
'recipe_type': 'recipeType',
'content': 'content'
}
def __init__(self, name=None, description=None, recipe_type=None, content=None):
"""
RecipeRequest - a model defined in Swagger
"""
self._name = None
self._description = None
self._recipe_type = None
self._content = None
if name is not None:
self.name = name
if description is not None:
self.description = description
self.recipe_type = recipe_type
if content is not None:
self.content = content
@property
def name(self):
"""
Gets the name of this RecipeRequest.
name of the resource
:return: The name of this RecipeRequest.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this RecipeRequest.
name of the resource
:param name: The name of this RecipeRequest.
:type: str
"""
if name is not None and len(name) > 100:
raise ValueError("Invalid value for `name`, length must be less than or equal to `100`")
if name is not None and len(name) < 5:
raise ValueError("Invalid value for `name`, length must be greater than or equal to `5`")
if name is not None and not re.search('(^[a-z][-a-z0-9]*[a-z0-9]$)', name):
raise ValueError("Invalid value for `name`, must be a follow pattern or equal to `/(^[a-z][-a-z0-9]*[a-z0-9]$)/`")
self._name = name
@property
def description(self):
"""
Gets the description of this RecipeRequest.
description
|
of the resource
:return: The description of this RecipeRequest.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""
Sets the description of this RecipeRequest.
description of the resource
:param description: The description of this RecipeRequest.
:type: str
"""
if description is not None and len(description) > 1000:
raise Valu
|
eError("Invalid value for `description`, length must be less than or equal to `1000`")
if description is not None and len(description) < 0:
raise ValueError("Invalid value for `description`, length must be greater than or equal to `0`")
self._description = description
@property
def recipe_type(self):
"""
Gets the recipe_type of this RecipeRequest.
type of recipe
:return: The recipe_type of this RecipeRequest.
:rtype: str
"""
return self._recipe_type
@recipe_type.setter
def recipe_type(self, recipe_type):
"""
Sets the recipe_type of this RecipeRequest.
type of recipe
:param recipe_type: The recipe_type of this RecipeRequest.
:type: str
"""
if recipe_type is None:
raise ValueError("Invalid value for `recipe_type`, must not be `None`")
allowed_values = ["PRE_AMBARI_START", "PRE_TERMINATION", "POST_AMBARI_START", "POST_CLUSTER_INSTALL"]
if recipe_type not in allowed_values:
raise ValueError(
"Invalid value for `recipe_type` ({0}), must be one of {1}"
.format(recipe_type, allowed_values)
)
self._recipe_type = recipe_type
@property
def content(self):
"""
Gets the content of this RecipeRequest.
content of recipe
:return: The content of this RecipeRequest.
:rtype: str
"""
return self._content
@content.setter
def content(self, content):
"""
Sets the content of this RecipeRequest.
content of recipe
:param content: The content of this RecipeRequest.
:type: str
"""
self._content = content
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, RecipeRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
michielkauwatjoe/Meta
|
meta/objects/cubicbezier.py
|
Python
|
mit
| 392
| 0.002558
|
#!/usr/bin
|
/env python
# -*- coding: utf-8 -*-
#
# https://github.com/michielkauwatjoe/Meta
class CubicBezier:
def __init__(self, bezierId=None, points=None, parent=None, isClosed=False):
u"""
Stores points of the cubic Bézier curve.
"""
self.bezierId = bezierId
self.points = points
self.parent = parent
self.isClosed = isClosed
| |
pixbuffer/django-cms
|
cms/utils/permissions.py
|
Python
|
bsd-3-clause
| 11,836
| 0.00414
|
# -*- coding: utf-8 -*-
from cms.exceptions import NoPermissionsException
from cms.models import Page, PagePermission, GlobalPagePermission
from cms.plugin_pool import plugin_pool
from cms.utils import get_cms_setting
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.contrib.sites.models import Site
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _
try:
from threading import local
except ImportError:
from django.utils._threading_local import local
# thread local support
_thread_locals = local()
def set_current_user(user):
"""
Assigns current user from request to thread_locals, used by
CurrentUserMiddleware.
"""
_thread_locals.user = user
def get_current_user():
"""
Returns current user, or None
"""
return getattr(_thread_locals, 'user', None)
def has_page_add_permission(request):
"""
Return true if the current user has permission to add a new page. This is
just used for general add buttons - only superuser, or user with can_add in
globalpagepermission can add page.
Special case occur when page is going to be added from add page button in
change list - then we have target and position there, so check if user can
add page under target page will occur.
"""
opts = Page._meta
if request.user.is_superuser:
return True
# if add under page
target = request.GET.get('target', None)
position = request.GET.get('position', None)
if target is not None:
try:
page = Page.objects.get(pk=target)
except Page.DoesNotExist:
return False
if (request.user.has_perm(opts.app_label + '.' + opts.get_add_permission()) and
has_global_page_permission(request, page.site_id, can_add=True)):
return True
if position in ("first-child", "last-child"):
return page.has_add_permission(request)
elif position in ("left", "right"):
if page.parent_id:
return has_generic_permission(page.parent_id, request.user, "add", page.site)
else:
from cms.utils.plugins import current_site
site = current_site(request)
if (request.user.has_perm(opts.app_label + '.' + opts.get_add_permission()) and
has_global_page_permission(request, site, can_add=True)):
return True
return False
def has_any_page_change_permissions(request):
from cms.utils.plugins import current_site
if not request.user.is_authenticated():
return False
return request.user.is_superuser or PagePermission.objects.filter(
page__site=current_site(request)
).filter((
Q(user=request.user) |
Q(group__in=request.user.groups.all())
)).exists()
def has_page_change_permission(request):
"""
Return true if the current user has permission to change this page.
To be granted this permission, you need the cms.change_page permission.
In addition, if CMS_PERMISSION is enabled you also need to either have
global can_change permission or just on this page.
"""
from cms.utils.plugins import current_site
opts = Page._meta
return request.user.is_superuser or (
request.user.has_perm(opts.app_label + '.' + opts.get_change_permission())
and (
not get_cms_setting('PERMISSION') or
has_global_page_permission(request, current_site(request),
can_change=True) or
has_any_page_change_permissions(request)))
def has_global_page_permission(request, site=None, **filters):
"""
A helper function to check for global page permissions for the current user
and site. Caches the result on a request basis, so multiple calls to this
function inside of one request/response cycle only generate one query.
:param request: the Request object
:param site: the Site object or ID
:param filters: queryset filters, e.g. ``can_add = True``
:return: ``True`` or ``False``
"""
if request.user.is_superuser:
return True
if not hasattr(request, '_cms_global_perms'):
request._cms_global_perms = {}
key = tuple((k, v) for k, v in filters.iteritems())
if site:
key = (('site', site.pk if hasattr(site, 'pk') else int(site)),) + key
if key not in request._cms_global_perms:
qs = GlobalPagePermission.objects.with_user(request.user).fi
|
lter(**filters)
if site:
qs = q
|
s.filter(Q(sites__in=[site]) | Q(sites__isnull=True))
request._cms_global_perms[key] = qs.exists()
return request._cms_global_perms[key]
def get_any_page_view_permissions(request, page):
"""
Used by the admin template tag is_restricted
"""
return PagePermission.objects.for_page(page=page).filter(can_view=True)
def get_user_permission_level(user):
"""
Returns highest user level from the page/permission hierarchy on which
user haves can_change_permission. Also takes look into user groups. Higher
level equals to lover number. Users on top of hierarchy have level 0. Level
is the same like page.level attribute.
Example:
A,W level 0
/ \
user B,GroupE level 1
/ \
C,X D,Y,W level 2
Users A, W have user level 0. GroupE and all his users have user level 1
If user D is a member of GroupE, his user level will be 1, otherwise is
2.
"""
if (user.is_superuser or
GlobalPagePermission.objects.with_can_change_permissions(user).exists()):
# those
return 0
try:
permission = PagePermission.objects.with_can_change_permissions(user).order_by('page__level')[0]
except IndexError:
# user isn't assigned to any node
raise NoPermissionsException
return permission.page.level
def get_subordinate_users(user):
"""
Returns users queryset, containing all subordinate users to given user
including users created by given user and not assigned to any page.
Not assigned users must be returned, because they shouldn't get lost, and
user should still have possibility to see them.
Only users created_by given user which are on the same, or lover level are
returned.
If user haves global permissions or is a superuser, then he can see all the
users.
This function is currently used in PagePermissionInlineAdminForm for limit
users in permission combobox.
Example:
A,W level 0
/ \
user B,GroupE level 1
Z / \
C,X D,Y,W level 2
Rules: W was created by user, Z was created by user, but is not assigned
to any page.
Will return [user, C, X, D, Y, Z]. W was created by user, but is also
assigned to higher level.
"""
# TODO: try to merge with PagePermissionManager.subordinate_to_user()
if user.is_superuser or \
GlobalPagePermission.objects.with_can_change_permissions(user):
return get_user_model().objects.all()
site = Site.objects.get_current()
page_id_allow_list = Page.permissions.get_change_permissions_id_list(user, site)
try:
user_level = get_user_permission_level(user)
except NoPermissionsException:
# no permission so only staff and no page permissions
qs = get_user_model().objects.distinct().filter(
Q(is_staff=True) &
Q(pageuser__created_by=user) &
Q(pagepermission__page=None)
)
qs = qs.exclude(pk=user.id).exclude(groups__user__pk=user.id)
return qs
# normal query
qs = get_user_model().objects.distinct().filter(
Q(is_sta
|
apark263/tensorflow
|
tensorflow/contrib/tensorrt/test/concatenation_test.py
|
Python
|
apache-2.0
| 3,388
| 0.010035
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Model script to test TF-TensorRT integration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.tensorrt.test import tf_trt_integration_test_base as trt
|
_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.platform import test
class ConcatenationT
|
est(trt_test.TfTrtIntegrationTestBase):
def GetParams(self):
"""Testing Concatenation in TF-TRT conversion."""
dtype = dtypes.float32
input_name = "input"
input_dims = [2, 3, 3, 1]
output_name = "output"
g = ops.Graph()
with g.as_default():
x = array_ops.placeholder(dtype=dtype, shape=input_dims, name=input_name)
# scale
a = constant_op.constant(np.random.randn(3, 1, 1), dtype=dtype)
r1 = x / a
a = constant_op.constant(np.random.randn(3, 1, 1), dtype=dtype)
r2 = a / x
a = constant_op.constant(np.random.randn(1, 3, 1), dtype=dtype)
r3 = a + x
a = constant_op.constant(np.random.randn(1, 3, 1), dtype=dtype)
r4 = x * a
a = constant_op.constant(np.random.randn(3, 1, 1), dtype=dtype)
r5 = x - a
a = constant_op.constant(np.random.randn(3, 1, 1), dtype=dtype)
r6 = a - x
a = constant_op.constant(np.random.randn(3, 1), dtype=dtype)
r7 = x - a
a = constant_op.constant(np.random.randn(3, 1), dtype=dtype)
r8 = a - x
a = constant_op.constant(np.random.randn(3, 1, 1), dtype=dtype)
r9 = gen_math_ops.maximum(x, a)
a = constant_op.constant(np.random.randn(3, 1), dtype=dtype)
r10 = gen_math_ops.minimum(a, x)
a = constant_op.constant(np.random.randn(3), dtype=dtype)
r11 = x * a
a = constant_op.constant(np.random.randn(1), dtype=dtype)
r12 = a * x
concat1 = array_ops.concat([r1, r2, r3, r4, r5, r6], axis=-1)
concat2 = array_ops.concat([r7, r8, r9, r10, r11, r12], axis=3)
x = array_ops.concat([concat1, concat2], axis=-1)
gen_array_ops.reshape(x, [2, -1], name=output_name)
return trt_test.TfTrtIntegrationTestParams(
gdef=g.as_graph_def(),
input_names=[input_name],
input_dims=[[input_dims]],
output_names=[output_name],
expected_output_dims=[[[2, 126]]])
def ExpectedEnginesToBuild(self, run_params):
"""Return the expected engines to build."""
return ["TRTEngineOp_0"]
if __name__ == "__main__":
test.main()
|
jamy015/slack-latex-bot
|
application.py
|
Python
|
gpl-3.0
| 2,527
| 0.001979
|
import os
import requests
from flask import Flask, request, abort, redirect, jsonify
from requests.exceptions import RequestException
import quicklatex
application = Flask(__name__)
slack_client_id = os.environ['SLACK_CLIENT_ID']
slack_client_secret = os.environ['SLACK_CLIENT_SECRET']
slack_verification_token = os.environ['SLACK_VERIFICATION_TOKEN']
@application.route('/ping')
def ping():
"""Return a short message so it's easy to check whether the bot is up and running"""
return 'pong'
@application.route('/slack/oauth', methods=['GET'])
def oauth():
"""Auth a Slack team"""
try:
r = requests.post('https://slack.com/api/oauth.access', data={
'client_id': slack_client_id,
'client_secret': slack_client_secret,
'code': request.args['code'],
})
if r.status_code != 200:
abort(400) # Bad Request
except RequestException:
abort(500) # Internal Server Error
return redirect('https://github.com/jamy015/slack-latex-bot/blob/master/SUCCESS_PAGE.md', 303)
@application.route('/slack/latex', methods=['POST'])
def slash_latex():
"""Handle an incoming LaTeX slash command from Slack"""
if request.form.get('ssl_check') == '1': # Slack SSL ping
return '' # Empty 200 OK
if request.form['token'] != slack_verification_token:
abort(401) # Unauthorized
if request.form['text'] == 'help':
return ('Give me a LaTeX formula and I\'ll show it to the channel!\r\n\r\n'
'Not quite ready for prime time? Just use me in the conversation with yourself!')
try:
image = quicklatex.quicklatex(request.form['text'], show_errors=True)
err_attachment = None
except ValueError as err:
image = quicklatex.quicklatex(request.form['text'], show_errors=False)
err_attachment = {
'fallback': str(err),
'color': 'warning',
'text': str(err),
}
except quicklatex.HTTPError as err:
return jsonify({
'response_type': 'in_channel',
|
'text': 'QuickLaTeX seems to be having some trouble (HTTP {}). Please try again later.'.format(err)
})
return jsonify({
'response_type': 'in_channel',
'attachments': [
err_attachment,
{
'fallback': 'LaTeX',
'text': 'LaTeX',
'image_url': image,
},
],
})
if _
|
_name__ == "__main__":
application.run()
|
refeed/coala-bears
|
bears/c_languages/GNUIndentBear.py
|
Python
|
agpl-3.0
| 7,940
| 0
|
import platform
import shlex
from coalib.bearlib import deprecate_settings
from coalib.bearlib.abstractions.Linter import linter
from coalib.bearlib.spacing.SpacingHelper import SpacingHelper
from dependency_management.requirements.DistributionRequirement import (
DistributionRequirement)
@linter(executable='indent' if platform.system() != 'Darwin' else 'gindent',
use_stdin=True,
output_format='corrected',
result_message='Indentation can be improved.')
class GNUIndentBear:
"""
This bear checks and corrects spacing and indentation via the well known
Indent utility.
C++ support is considered experimental.
"""
LANGUAGES = {'C', 'C++'}
REQUIREMENTS = {DistributionRequirement('indent')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
CAN_FIX = {'Formatting'}
@staticmethod
@deprecate_settings(indent_size='tab_width')
def create_arguments(filename, file, config_file,
max_line_length: int = 79,
use_spaces: bool = True,
blank_lines_after_declarations: bool = False,
blank_lines_after_procedures: bool = False,
blank_lines_after_commas: bool = False,
braces_on_if_line: bool = False,
braces_on_func_def_line: bool = False,
cuddle_else: bool = False,
while_and_brace_on_same_line: bool = False,
case_indentation: int = 0,
space_before_semicolon_after_empty_loop: bool = True,
delete_optional_blank_lines: bool = True,
declaration_indent: int = 0,
brace_indent: int = 2,
gnu_style: bool = False,
k_and_r_style: bool = False,
linux_style: bool = False,
indent_size: int = SpacingHelper.DEFAULT_TAB_WIDTH,
indent_cli_options: str = '',
):
"""
:param max_line_length:
Maximum number of characters for a line.
When set to 0, infinite line length is allowed.
:param use_spaces:
True if spaces are to be used, else tabs.
:param blank_lines_after_declarations:
Forces blank lines after the declarations.
Example: If ``blank_lines_after_declarations = True`` then::
int a;
return ...;
changes to::
int a;
return ...;
:param blank_lines_after_procedures:
Force blank lines after procedure bodies.
:param blank_lines_after_commas:
Forces newline after comma in declaration.
Example: If ``blank_lines_after_commas = True`` then::
int a, b;
changes to::
int a,
b;
:param braces_on_if_line:
Puts the brace ``{`` on same line with if.
Example: If ``braces_on_if_line = True`` then::
if (x > 0)
{
changes to::
if (x > 0) {
:param braces_on_func_def_line:
Puts the brace `{` on same line with the function declaration.
:param cuddle_else:
Cuddle else and preceding ``}``.
Example: If ``cuddle_else = True`` then::
if (...) {
....
}
else {
changes to::
if (...) {
....
} else {
:param while_and_brace_on_same_line:
Cuddles while of ``do {} while``; and preceding ``}``.
:param case_indentation:
Specifies the number of spaces by which ``case`` in the ``switch``
are indented.
:param space_before_semicolon_after_empty_loop:
Forces a blank before the semicolon ``;`` on one-line ``for`` and
``whil
|
e`` statements.
:param delete_optional_blank_lines:
Deletes blank lines that are not needed. An example of needed
|
blank line, is the blank line following a declaration when
``blank_line_after_declaration=True``.
:param declaration_indent:
Forces variables names to be aligned in column ``n`` with
``n = declaration_indent`` in declaration.
Example: If ``declaration_indent = 8`` then::
int a;
float b;
changes to::
int a;
float b;
:param brace_indent:
Specifies the number of spaces by which braces are indented. Its
default value is 2.
:param gnu_style:
Uses GNU coding style.
:param k_and_r_style:
Uses Kernighan & Ritchie coding style.
:param linux_style:
Uses Linux coding style.
:param indent_size:
Number of spaces per indentation level.
:param indent_cli_options:
Any command line options the indent binary understands. They
will be simply passed through.
"""
indent_options = ('--no-tabs' if use_spaces else '--use-tabs',
'--line-length', str(max_line_length),
'--indent-level', str(indent_size),
'--tab-size', str(indent_size), )
indent_options += (('--cuddle-do-while',)
if while_and_brace_on_same_line
else ('--dont-cuddle-do-while',))
indent_options += (('--swallow-optional-blank-lines',)
if delete_optional_blank_lines else ('-nsob',))
indent_options += (('--blank-lines-after-declarations',)
if blank_lines_after_declarations else ('-nbad',))
indent_options += (('--blank-lines-after-commas',)
if blank_lines_after_commas else ('-nbc',))
indent_options += (('--blank-lines-after-procedures',)
if blank_lines_after_procedures else ('-nbap',))
indent_options += (('-di' + str(declaration_indent),)
if declaration_indent != 0 else ())
indent_options += (('--case-indentation'+str(case_indentation),)
if case_indentation != 0 else ())
indent_options += (('--space-special-semicolon',)
if space_before_semicolon_after_empty_loop
else ('-nss',))
indent_options += ('--brace-indent'+str(brace_indent),)
indent_options += (('--braces-on-func-def-line',)
if braces_on_func_def_line else ('-blf',))
indent_options += ((('-ce',) if cuddle_else else ('-nce',)) +
('-br',)) if braces_on_if_line else ('-bl',)
indent_style_option = ()
indent_style_option += ('--gnu-style',) if gnu_style else ()
indent_style_option += (('--k-and-r-style',)
if k_and_r_style and indent_style_option is ()
else ())
indent_style_option += (('--linux-style',)
if linux_style and indent_style_option is ()
else ())
# The limit is set to a number which equals to int max in C
if not max_line_length:
max_line_length = 2147483647
# If a style is chosen the other configs aren't passed to `indent`
return (indent_style_option if indent_style_option is not ()
else indent_options) + tuple(shlex.split(indent_cli_options))
|
sarvex/tensorflow
|
tensorflow/lite/micro/examples/person_detection/utils/raw_to_bitmap.py
|
Python
|
apache-2.0
| 5,452
| 0.007337
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Convert raw bytes to a bitmap.
Converts a raw image dumped to a file into a bitmap. The file must contain
complete bitmap images in 324 x 244 resolution, formatted as follows:
+++ frame +++
<byte number> <16 one-byte values separated by spaces>
--- frame ---
For example, the first line might look like:
0x00000000 C5 C3 CE D1 D9 DA D6 E3 E2 EB E9 EB DB E4 F5 FF
The bitmaps are automatically saved to the same directory as the log file, and
are displayed by the script.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import os.path
import re
import numpy as np
_DICT_RESOLUTIONS = {
'QVGA': (324, 244, 1),
'GRAY': (96, 96, 1),
'RGB': (96, 96, 3),
}
_VERSION = 0
_SUBVERSION = 1
def check_file_existence(x):
if not os.path.isfile(x):
# Argparse uses the ArgumentTypeError to give a rejection message like:
# error: argument input: x does not exist
raise argparse.ArgumentTypeError('{0} does not exist'.format(x))
return x
def show_and_save_bitmaps(input_file, bitmap_list, channels):
"""Display and save a list of bitmaps.
Args:
input_file: input file name
bitmap_list: list of numpy arrays to represent bitmap images
channels: color channel count
"""
try:
from PIL import Image # pylint: disable=g-import-not-at-top
except ImportError:
raise NotImplementedError('Image display and save not implemented.')
for idx, bitmap in enumerate(bitmap_list):
path = os.path.dirname(os.path.abspath(input_file))
basename = os.path.split(os.path.splitext(input_file)[0])[-1]
outputfile = os.path.join(path, basename + '_' + str(idx) + '.bmp')
if channels == 3:
img = Image.fromarray(bitmap, 'RGB')
else:
img = Image.fromarray(bitmap, 'L')
img.save(outputfile)
img.show()
def reshape_bitmaps(frame_list, width, height, channels):
"""Reshape flat integer arrays.
Args:
frame_list: list of 1-D arrays to represent raw image data
width: image width in pixels
height: image height in pixels
channels: color channel count
Returns:
list of numpy arrays to represent bitmap images
"""
bitmap_list = []
for frame in frame_list:
shape = (height, width, channels) if channels > 1 else (height, width)
bitmap = np.reshape(frame, shape)
bitmap = np.flip(bitmap, 0)
bitmap_list.append(bitmap)
return bitmap_list
def parse_file(inputfile, width, height, channels):
"""Convert log file to array of pixels.
Args:
inputfile: log file to parse
width: image width in pixels
height: image height in pixels
channels: color channel count
Returns:
list 1-D arrays to represent raw image data.
"""
data = None
bytes_written = 0
frame_start = False
frame_stop = False
frame_list = list()
# collect all pixel data into an int array
for line in inputfile:
if line == '+++ frame +++\n':
frame_start = True
data = np.zeros(height * width * channels, dtype=np.uint8)
bytes_written = 0
continue
elif line == '--- frame ---\n':
frame_stop = True
if frame_start and not frame_stop:
linelist = re.findall(r"[\w']+", line)
if len(linelist) != 17:
# drop this frame
frame_start = False
continue
for item in linelist[1:]:
data[bytes_written] = int(item, base=16)
bytes_written += 1
elif frame_start and frame_stop:
if bytes_written == height * width * channels:
frame_list.append(data)
frame_start = False
frame_stop = False
return frame_list
def main():
parser = argparse.ArgumentParser(
description='This program converts raw data from HM01B0 to a bmp file.')
parser.add_argument(
'-i',
'--input',
dest='inputfile',
required=True,
help='input file',
metavar='FILE',
type=check_file_existence)
parse
|
r.add_argument(
'-r',
'--resolution',
dest='resolution',
required=False,
help='Resolution',
choices=['QVGA', 'RGB', 'GRAY'],
default='QVGA',
)
parser.add_argument(
'-v',
'--version',
help='Program version',
action='version',
version='%(prog)s {ver}'.format(ver='v%d.%d' % (_VERSION, _SUBVERSION)))
args = parser.parse_args()
(w
|
idth, height,
channels) = _DICT_RESOLUTIONS.get(args.resolution,
('Resolution not supported', 0, 0, 0))
frame_list = parse_file(open(args.inputfile), width, height, channels)
bitmap_list = reshape_bitmaps(frame_list, width, height, channels)
show_and_save_bitmaps(args.inputfile, bitmap_list, channels)
if __name__ == '__main__':
main()
|
airbnb/streamalert
|
streamalert/apps/config.py
|
Python
|
apache-2.0
| 18,276
| 0.002845
|
"""
Copyright 2017-present Airbnb, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import calendar
from datetime import datetime
import json
import re
import time
import backoff
import boto3
from botocore import client
from botocore.exceptions import ClientError
from streamalert.apps.exceptions import AppAuthError, AppConfigError, AppStateError
from streamalert.shared.logger import get_logger
LOGGER = get_logger(__name__)
AWS_RATE_RE = re.compile(r'^rate\(((1) (minute|hour|day)|'
r'([2-9]+|[1-9]\d+) (minutes|hours|days))\)$')
AWS_RATE_HELPER = 'http://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html'
class AppConfig:
"""Centralized config for handling configuration loading/parsing"""
MAX_STATE_SAVE_TRIES = 5
BOTO_TIMEOUT = 5
SSM_CLIENT = None
AUTH_CONFIG_SUFFIX = 'auth'
STATE_CONFIG_SUFFIX = 'state'
_STATE_KEY = 'current_state'
_TIME_KEY = 'last_timestamp'
_CONTEXT_KEY = 'context'
_STATE_DESCRIPTION = 'State information for the \'{}\' app for use in the \'{}\' function'
class States:
"""States object to encapsulate various acceptable states"""
PARTIAL = 'partial'
RUNNING = 'running'
SUCCEEDED = 'succeeded'
FAILED = 'failed'
class Events:
"""Events object to encapsulate various acceptable events"""
SUCCESSIVE_INVOKE = 'successive'
def __init__(self, auth_config, state_config, event, func_name, func_version):
self._validate_event(event)
self._auth_config = auth_config
self._current_state = state_config.get(self._STATE_KEY)
self._last_timestamp = state_config.get(self._TIME_KEY)
self._context = state_config.get(self._CONTEXT_KEY, {})
self._event = event
self.function_name = func_name
self.function_version = func_version
self.start_last_timestamp = None
def set_starting_timestamp(self, date_format):
self.start_last_timestamp = self._determine_last_time(date_format)
@property
def successive_event(self):
"""Return formatted json for event representing a successive invocation"""
event = {'invocation_type': self.Events.SUCCESSIVE_INVOKE}
event.update(self._event)
return json.dumps(event)
@property
def _app_type(self):
"""The app type for this config"""
return self._event['app_type']
@property
def _schedule(self):
"""The rate schedule on which this app runs"""
return self._event['schedule_expression']
@property
def destination_function(self):
"""The destination function name where logs should be sent"""
return self._event['destination_function_name']
@property
def _invocation_type(self):
"""The invocation type for this function, can be None"""
return self._event.get('invocation_type')
@property
def _state_name(self):
"""The name of the state parameter in ssm"""
return '{}_{}'.format(self.function_name, self.STATE_CONFIG_SUFFIX)
@staticmethod
def remaining_ms():
"""Static method that gets mapped to the address of the AWS Lambda
context object's "get_remaining_time_in_millis" method so we can
monitor execution. This is helpful to save state when nearing the
timeout for a lambda execution.
"""
@classmethod
def required_event_keys(cls):
"""Get the set of keys that are required in the input event
Returns:
set: Set of required keys
"""
return {'app_type', 'destination_function_name', 'schedule_expression'}
@classmethod
def load_config(cls, event, context):
"""Load the configuration for this app invocation
Args:
event (dict): The AWS Lambda input event, which is JSON serialized to a dictionary
context (LambdaContext): The AWS LambdaContext object, passed in via the handler.
Returns:
AppConfig: Configuration for the running application
"""
# Patch out the protected _remaining_ms method to the AWS timing function
AppConfig.remaining_ms = context.get_remaining_time_in_millis
func_name = context.function_name
func_version = context.function_version
# Get full parameter names for authentication and state parameters
auth_param_name = '_'.join([func_name, cls.AUTH_CONFIG_SUFFIX])
state_param_name = '_'.join([func_name, cls.STATE_CONFIG_SUFFIX])
# Get the loaded parameters and a list of any invalid ones from parameter store
params, invalid_params = cls._get_parameters(auth_param_name, state_param_name)
# Check to see if the authentication param is in the invalid params list
if auth_param_name in invalid_params:
raise AppConfigError('Could not load authentication parameter required for this '
'app: {}'.format(auth_param_name))
LOGGER.debug('Retrieved parameters from parameter store: %s',
cls._scrub_auth_info(params, auth_param_name))
LOGGER.debug('Invalid parameters could not be retrieved from parameter store: %s',
invalid_params)
# Load the authentication info. This data can vary from service to service
auth_config = {
key: value if isinstance(value, str) else value
for key, value in params[auth_param_name].items()
}
state_config = params.get(state_param_name, {})
return AppConfig(auth_config, state_config, event, func_name, func_version)
@staticmethod
def _scrub_auth_info(param_info, auth_param_name):
"""Scrub sensitive authentication info from a copy of the retrieved parameters.
By scrubbing/masking the authentication info, it allows us to safely print the info
to stdout (logger) without revealing secrets needed for authentication.
Args:
param_info (dict): All of the parameter information pulled from Parameter Store
auth_param_name (str): The key for the auth config info within the param_info
Returns:
dict: A copy of the passed param_info dictionary with the authentication
information scrubbed with an asterisk for each character
"""
info = param_info.copy()
info[auth_param_name] = {key: '*' * len(str(value))
for key, value in info[auth_param_name].items()}
return info
def validate_auth(self, required_keys):
"""Validate the authentication dictionary retrieved from AWS Parameter Store
Args:
required_keys (set): required authentication keys for the running app
Returns:
bool: ndicator of successful validation
"""
# The config validates that the 'auth' dict was loaded, but do a safety check here
if not self.auth:
raise AppAuthError('[{}] Auth config is empty'.format(self))
auth_key_diff = required_keys.difference(set(self.auth))
if not auth_key_diff:
return True
missing_auth_keys = ', '.join('\'{}\''.format(key) for key in auth_key_diff)
raise AppAuthError('[{}] Auth config is missing the following '
|
'required keys: {}'.format(self, missing_auth_keys))
@classmethod
def _get_parameters(cls, *names):
"""Simple helper function to house the boto3 ssm client ge
|
t_parameters operations
Args:
names (list): A list of parameter names to retrieve from the aws ssm
|
foxcarlos/decimemijobot
|
bot/migrations/0014_auto_20171109_1813.py
|
Python
|
gpl-3.0
| 694
| 0.001441
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-09 1
|
8:13
from __future__ import unicode_literals
i
|
mport datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bot', '0013_auto_20171109_1759'),
]
operations = [
migrations.AlterField(
model_name='alertausuario',
name='chat_id',
field=models.IntegerField(blank=True),
),
migrations.AlterField(
model_name='alertausuario',
name='ultima_actualizacion',
field=models.DateTimeField(default=datetime.datetime(2017, 11, 9, 18, 13, 50, 254179)),
),
]
|
mlperf/training_results_v0.6
|
Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/nnvm/tests/python/compiler/test_top_level3.py
|
Python
|
apache-2.0
| 1,275
| 0.012549
|
import numpy a
|
s np
import tvm
from tvm.contrib import graph_runtime
import topi.testing
import nnvm.symbol as sym
import nnvm.comp
|
iler
from nnvm.testing.config import ctx_list
from nnvm.testing.check_computation import check_function
def check_map(symfunc, np_func, np_backward=None, dtype="float32", rnd_min=-1, rnd_max=1):
x = sym.Variable("x")
y = symfunc(x)
shape = {'x': (1, 3, 32, 32)}
check_function(y, lambda x: np_func(x), np_backward,
dtype=dtype, shape=shape, in_range=(rnd_min, rnd_max))
def test_floor():
check_map(sym.floor, np.floor)
def test_ceil():
check_map(sym.ceil, np.ceil)
def test_trunc():
check_map(sym.trunc, np.trunc)
def test_round():
check_map(sym.round, np.round)
def test_abs():
check_map(sym.abs, np.abs)
check_map(sym.abs, np.abs, dtype = "int32")
check_map(sym.abs, np.abs, dtype = "int8")
def test_shift():
n = 3
for dtype in ["int32", "int8"]:
check_map(lambda x : x >> n, lambda x: x >> n, dtype=dtype, rnd_min=-100, rnd_max=100)
check_map(lambda x : x << n, lambda x: x << n, dtype=dtype, rnd_min=-100, rnd_max=100)
if __name__ == "__main__":
test_shift()
test_floor()
test_ceil()
test_round()
test_abs()
test_trunc()
|
adityahase/frappe
|
frappe/website/doctype/web_page_block/web_page_block.py
|
Python
|
mit
| 272
| 0.003676
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and contributors
# For license i
|
nformation, please see license.txt
from __future__ import unicode_literals
# import frappe
from frappe.model.document import Document
class WebPageBlock(Document):
|
pass
|
pyfidelity/rest-seed
|
backend/backrest/tests/test_migrations.py
|
Python
|
bsd-2-clause
| 3,745
| 0.001068
|
from alembic.config import Config
from alembic.environment import EnvironmentContext
from alembic.migration import MigrationContext
from alembic.
|
script import ScriptDirectory
from difflib import unified_diff
from
|
pytest import fixture
from re import split
from sqlalchemy import engine_from_config
from subprocess import call, Popen, PIPE
@fixture
def settings(testing):
# we use our own db for this test, since it will be created and dropped
project_name = testing.project_name()
db_name = '%s_migration_test' % project_name
return {
'db_name': db_name,
'sqlalchemy.url': 'postgresql:///' + db_name,
'testing': True,
}
def createdb(db_name):
call('createdb %s -E utf8 -T template0' % db_name, shell=True)
def dropdb(db_name):
call('dropdb %s' % db_name, shell=True)
def dumpdb(db_name):
p = Popen('pg_dump --schema-only %s' % db_name, shell=True, stdout=PIPE, stderr=PIPE)
out, err = p.communicate()
assert p.returncode == 0, err
# we parse the output and change it a little bit for better diffing
out = out.splitlines()
start = None
for index, line in enumerate(out):
# we only change CREATE TABLE statements
if line.startswith('CREATE TABLE'):
start = index
if start is not None:
if line.strip().endswith(');'):
# we sort the columns
out[start + 1:index] = sorted(out[start + 1:index])
start = None
else:
# and remove trailing commas
out[index] = line.rstrip().rstrip(',')
else:
# for COPY statements, we have to sort the column names as well
if line.startswith('COPY'):
parts = split('[()]', line)
columns = sorted(x.strip() for x in parts[1].split(','))
out[index] = '%s(%s)%s' % (parts[0], ' ,'.join(columns), parts[2])
# we add newlines for diffing
return ['%s\n' % x for x in out]
def test_db_metadata_differences(models, settings):
db_name = settings['db_name']
# first we drop anything there might be
dropdb(db_name)
# then we create a clean DB from the metadata
createdb(db_name)
metadata = models.metadata
engine = engine_from_config(settings)
metadata.bind = engine
metadata.create_all(engine, tables=[table for name, table
in metadata.tables.items() if not name.startswith('test_')])
# and store the results
create_all_result = dumpdb(db_name)
engine.dispose()
# now we do it again, but this time using migrations
dropdb(db_name)
createdb(db_name)
config = Config()
config.set_main_option('script_location', 'backrest:migrations')
script = ScriptDirectory.from_config(config)
connection = engine.connect()
environment = EnvironmentContext(config, script,
starting_rev='base', destination_rev='head')
context = MigrationContext.configure(connection)
def upgrade(rev, context):
return script._upgrade_revs('head', rev)
context._migrations_fn = upgrade
environment._migration_context = context
with environment.begin_transaction():
environment.run_migrations()
# we drop alembic_version to avoid it showing up in the diff
engine.execute('DROP TABLE alembic_version;')
# we store these results
alembic_result = dumpdb(db_name)
del context
del environment
connection.close()
del connection
engine.dispose()
# now we check whether there are differences and output them if there are
diff = unified_diff(alembic_result, create_all_result)
assert alembic_result == create_all_result, \
'Migration output differs:\n' + ''.join(diff)
|
aronsky/home-assistant
|
homeassistant/components/tradfri/base_class.py
|
Python
|
apache-2.0
| 4,045
| 0.000494
|
"""Base class for IKEA TRADFRI."""
from __future__ import annotations
from collections.abc import Callable
from functools import wraps
import logging
from typing import Any
from pytradfri.command import Command
from pytradfri.device import Device
from pytradfri.device.air_purifier import AirPurifier
from pytradfri.device.air_purifier_control import AirPurifierControl
from pytradfri.device.blind import Blind
from pytradfri.device.blind_control import BlindControl
from pytradfri.device.light import Light
from pytradfri.device.light_control import LightControl
from pytradfri.device.signal_repeater_control import SignalRepeaterControl
from pytradfri.device.socket import Socket
from pytradfri.device.socket_control import SocketControl
from pytradfri.error import PytradfriError
from homeassistant.core import callback
from homeassistant.helpers.entity import DeviceInfo, Entity
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
def handle_error(
func: Callable[[Command | list[Command]], Any]
) -> Callable[[str], Any]:
"""Handle tradfri api call error."""
@wraps(func)
async def wrapper(command: Command | list[Command]) -> None:
"""Decorate api call."""
try:
await func(command)
except PytradfriError as err:
_LOGGER.error("Unable to execute command %s: %s", command, err)
return wrapper
class TradfriBaseClass(Entity):
"""Base class for IKEA TRADFRI.
All devices and groups should ultimately inherit from this class.
"""
_attr_should_poll = False
def __init__(
self,
device: Device,
api: Callable[[Command | list[Command]], Any],
gatewa
|
y_id: str,
) -> None:
"""Initialize a device."""
self._api = handle_error(api)
self._device: Device = device
self._device_control: BlindControl | LightControl | SocketControl | SignalRepeaterControl | AirPurifierControl | None = (
None
)
self._device_data: Socket | Light | Blind | AirPurifier | None = None
self._gateway_id = gateway_id
self._refresh(device)
@callback
def _async_start_observe(self, exc: Exc
|
eption | None = None) -> None:
"""Start observation of device."""
if exc:
self.async_write_ha_state()
_LOGGER.warning("Observation failed for %s", self._attr_name, exc_info=exc)
try:
cmd = self._device.observe(
callback=self._observe_update,
err_callback=self._async_start_observe,
duration=0,
)
self.hass.async_create_task(self._api(cmd))
except PytradfriError as err:
_LOGGER.warning("Observation failed, trying again", exc_info=err)
self._async_start_observe()
async def async_added_to_hass(self) -> None:
"""Start thread when added to hass."""
self._async_start_observe()
@callback
def _observe_update(self, device: Device) -> None:
"""Receive new state data for this device."""
self._refresh(device)
self.async_write_ha_state()
def _refresh(self, device: Device) -> None:
"""Refresh the device data."""
self._device = device
self._attr_name = device.name
class TradfriBaseDevice(TradfriBaseClass):
"""Base class for a TRADFRI device.
All devices should inherit from this class.
"""
@property
def device_info(self) -> DeviceInfo:
"""Return the device info."""
info = self._device.device_info
return DeviceInfo(
identifiers={(DOMAIN, self._device.id)},
manufacturer=info.manufacturer,
model=info.model_number,
name=self._attr_name,
sw_version=info.firmware_version,
via_device=(DOMAIN, self._gateway_id),
)
def _refresh(self, device: Device) -> None:
"""Refresh the device data."""
super()._refresh(device)
self._attr_available = device.reachable
|
sridevikoushik31/nova
|
nova/virt/xenapi/vm_utils.py
|
Python
|
apache-2.0
| 88,420
| 0.000373
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Citrix Systems, Inc.
# Copyright 2011 Piston Cloud Computing, Inc.
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Helper methods for operations related to the management of VM records and
their attributes like VDIs, VIFs, as well as their lookup functions.
"""
import contextlib
import decimal
import os
import re
import time
import urllib
import urlparse
import uuid
from xml.dom import minidom
from xml.parsers import expat
from eventlet import greenthread
from oslo.config import cfg
from nova.api.metadata import base as instance_metadata
from nova import block_device
from nova.compute import flavors
from nova.compute import power_state
from nova.compute import task_states
from nova import exception
from nova.image import glance
from nova.openstack.common import excutils
from nova.openstack.common import log as logging
from nova.openstack.common import processutils
from nova.openstack.common import strutils
from nova import utils
from nova.virt import configdrive
from nova.virt.disk import api as disk
from nova.virt.disk.vfs import localfs as vfsimpl
from nova.virt.xenapi import agent
from nova.virt.xenapi import volume_utils
LOG = logging.getLogger(__name__)
xenapi_vm_utils_opts = [
cfg.StrOpt('cache_images',
default='all',
help='Cache glance images locally. `all` will cache all'
' images, `some` will only cache images that have the'
' image_property `cache_in_nova=True`, and `none` turns'
' off caching entirely'),
cfg.StrOpt('default_os_type',
default='linux',
help='Default OS type'),
cfg.IntOpt('block_device_creation_timeout',
default=10,
help='Time to wait for a block device to be created'),
cfg.IntOpt('max_kernel_ramdisk_size',
default=16 * 1024 * 1024,
help='Maximum size in bytes of kernel or ramdisk images'),
cfg.StrOpt('sr_matching_filter',
default='other-config:i18n-key=local-storage',
help='Filter for finding the SR to be used to install guest '
'instances on. The default value is the Local Storage in '
'default XenServer/XCP installations. To select an SR '
'with a different matching criteria, you could set it to '
'other-config:my_favorite_sr=true. On the other hand, to '
'fall back on the Default SR, as displayed by XenCenter, '
'set this flag to: default-sr:true'),
cfg.BoolOpt('xenapi_sparse_copy',
default=True,
help='Whether to use sparse_copy for copying data on a '
'resize down (False will use standard dd). This speeds '
'up resizes down considerably since large runs of zeros '
'won\'t have to be rsynced'),
cfg.IntOpt('xenapi_num_vbd_unplug_retries',
default=10,
help='Maximum number of retries to unplug VBD'),
cfg.StrOpt('xenapi_torrent_images',
default='none',
help='Whether or not to download images via Bit Torrent '
'(all|some|none).'),
cfg.StrOpt('xenapi_torrent_base_url',
default=None,
help='Base URL for torrent files.'),
cfg.FloatOpt('xenapi_torrent_seed_chance',
default=1.0,
help='Probability that peer will become a seeder.'
' (1.0 = 100%)'),
cfg.IntOpt('xenapi_torrent_seed_duration',
|
default=3600,
help='Number of seconds after downloading an image via'
' BitTorrent that it should be seeded for other peers.'),
cfg.IntOpt('xenapi_torrent_max_last_accessed',
default=86400,
he
|
lp='Cached torrent files not accessed within this number of'
' seconds can be reaped'),
cfg.IntOpt('xenapi_torrent_listen_port_start',
default=6881,
help='Beginning of port range to listen on'),
cfg.IntOpt('xenapi_torrent_listen_port_end',
default=6891,
help='End of port range to listen on'),
cfg.IntOpt('xenapi_torrent_download_stall_cutoff',
default=600,
help='Number of seconds a download can remain at the same'
' progress percentage w/o being considered a stall'),
cfg.IntOpt('xenapi_torrent_max_seeder_processes_per_host',
default=1,
help='Maximum number of seeder processes to run concurrently'
' within a given dom0. (-1 = no limit)')
]
CONF = cfg.CONF
CONF.register_opts(xenapi_vm_utils_opts)
CONF.import_opt('default_ephemeral_format', 'nova.virt.driver')
CONF.import_opt('use_cow_images', 'nova.virt.driver')
CONF.import_opt('glance_num_retries', 'nova.image.glance')
CONF.import_opt('use_ipv6', 'nova.netconf')
XENAPI_POWER_STATE = {
'Halted': power_state.SHUTDOWN,
'Running': power_state.RUNNING,
'Paused': power_state.PAUSED,
'Suspended': power_state.SUSPENDED,
'Crashed': power_state.CRASHED}
SECTOR_SIZE = 512
MBR_SIZE_SECTORS = 63
MBR_SIZE_BYTES = MBR_SIZE_SECTORS * SECTOR_SIZE
KERNEL_DIR = '/boot/guest'
MAX_VDI_CHAIN_SIZE = 16
class ImageType(object):
"""Enumeration class for distinguishing different image types
| 0 - kernel image (goes on dom0's filesystem)
| 1 - ramdisk image (goes on dom0's filesystem)
| 2 - disk image (local SR, partitioned by objectstore plugin)
| 3 - raw disk image (local SR, NOT partitioned by plugin)
| 4 - vhd disk image (local SR, NOT inspected by XS, PV assumed for
| linux, HVM assumed for Windows)
| 5 - ISO disk image (local SR, NOT partitioned by plugin)
| 6 - config drive
"""
KERNEL = 0
RAMDISK = 1
DISK = 2
DISK_RAW = 3
DISK_VHD = 4
DISK_ISO = 5
DISK_CONFIGDRIVE = 6
_ids = (KERNEL, RAMDISK, DISK, DISK_RAW, DISK_VHD, DISK_ISO,
DISK_CONFIGDRIVE)
KERNEL_STR = "kernel"
RAMDISK_STR = "ramdisk"
DISK_STR = "root"
DISK_RAW_STR = "os_raw"
DISK_VHD_STR = "vhd"
DISK_ISO_STR = "iso"
DISK_CONFIGDRIVE_STR = "configdrive"
_strs = (KERNEL_STR, RAMDISK_STR, DISK_STR, DISK_RAW_STR, DISK_VHD_STR,
DISK_ISO_STR, DISK_CONFIGDRIVE_STR)
@classmethod
def to_string(cls, image_type):
return dict(zip(ImageType._ids, ImageType._strs)).get(image_type)
@classmethod
def get_role(cls, image_type_id):
"""Get the role played by the image, based on its type."""
return {
cls.KERNEL: 'kernel',
cls.RAMDISK: 'ramdisk',
cls.DISK: 'root',
cls.DISK_RAW: 'root',
cls.DISK_VHD: 'root',
cls.DISK_ISO: 'iso',
cls.DISK_CONFIGDRIVE: 'configdrive'
}.get(image_type_id)
def create_vm(session, instance, name_label, kernel, ramdisk,
use_pv_kernel=False):
"""Create a VM record. Returns new VM reference.
the use_pv_kernel flag indicates whether the guest is HVM or PV
There are 3 scenarios:
1. Using paravirtualization, kernel passed in
2. Using paravirtualization, kernel within the image
3. Using hardware virtualization
"""
instance_type = flavors.extract_instance_type(instance)
mem = str(long(instance_type['memory_mb']) * 1024 * 1024)
vcpus = str(instance_type['vcp
|
zwindler/simfrastructure
|
simfrastructure_core.py
|
Python
|
mit
| 9,266
| 0.015649
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import random
from graphviz import Digraph
current_indent = 0
color_map = ['lightgrey', 'lightpink', 'orange', 'cyan', 'gold', 'lawngreen', 'sienna', 'yellow', 'red', 'hotpink']
verbose = 0
def print_simple_tree():
indent= "| "
last_indent= "\_"
"""prints a beautiful dependancy tree"""
return max(current_indent-1,0)*indent+min(1,current_indent)*last_indent
class sim_datacenter:
"""A datacenter that can contain racks"""
def __init__(self, name, tenant_id=0):
self.name = name
self.racks = []
self.rack_max = None
self.tenant_id= tenant_id
"""Set maximum number of racks in DC"""
def set_rack_max(self, rack_max):
self.rack_max = rack_max
def add_rack(self, rack):
if (not isinstance(rack, sim_rack)):
print(rack.name+" is not a rack!")
if ( self.rack_max == None or len(racks) < self.rack_max ):
self.racks.append(rack)
else:
print(self.name+" is full, can't add rack!")
def find_suitable_host(self, kind, guest_capacity):
suitable_objects = []
for rack in self.racks:
for server in rack.servers:
if server.get_host_capability(kind) and server.has_enough_ressources(guest_capacity):
suitable_objects.append(server)
if "vms" in server.guests:
for vm in server.guests["vms"]:
if vm.get_host_capability(kind) and vm.has_enough_ressources(guest_capacity):
suitable_objects.append(vm)
return random.choice(suitable_objects)
def get_dc_free_capacity(self, kind):
dc_free_capacity = {"vcpu": 0, "ram": 0}
for rack in self.racks:
rack_free_capacity = rack.get_rack_free_capacity(kind)
dc_free_capacity["vcpu"] += rack_free_capacity["vcpu"]
dc_free_capacity["ram"] += rack_free_capacity["ram"]
return dc_free_capacity
def generate_graph(self, graph):
global color_map
graph.attr('node', style='filled', color=color_map[self.tenant_id])
graph.attr('node', shape='diamond', overlap='false')
graph.node(self.name)
for rack in self.racks:
graph.attr('node', style='filled', color=color_map[rack.tenant_id])
graph.attr('node', shape='box', overlap='false')
graph.node(rack.name)
graph.edge(self.name, rack.name)
graph = rack.generate_graph(graph)
return graph
def __str__(self):
global current_indent
output = "Datacenter "+self.name+"\n"
current_indent += 1
if verbose:
output += print_simple_tree()+"Datacenter size: "+str(self.rack_max)+"\n"
output += print_simple_tree()+"Racks in this datacenter: \n"
current_indent += 1
for rack in self.racks:
output += str(rack)
current_indent = 0
return output
class sim_rack:
"""A rack that can contain servers"""
def __init__(self, name, tenant_id=0):
self.name = name
self.servers = []
self.rack_size = 42
self.tenant_id = tenant_id
"""Set maximum number of servers units in rack"""
def set_rack_size(self, rack_size):
self.rack_size = rack_size
def get_rack_usage(self):
rack_usage = 0
for server in self.servers:
rack_usage += server.server_size
return rack_usage
def get_rack_free_capacity(self, kind):
rack_free_capacity = {"vcpu": 0, "ram": 0}
for server in self.servers:
server_free_capacity = server.get_host_free_capacity(kind)
rack_free_capacity["vcpu"] += server_free_capacity["vcpu"]
rack_free_capacity["ram"] += server_free_capacity["ram"]
return rack_free_capacity
def add_server(self, server):
if (not isinstance(server, sim_server)):
print(server.name+" is not a server!")
rack_usage = self.get_rack_usage()
if ( self.rack_size == None or rack_usage < self.rack_size):
self.servers.append(server)
else:
print(self.name+" is full, can't add server!")
def generate_graph(self, graph):
for server in self.servers:
global color_map
graph.attr('node', style='filled', color=color_map[server.tenant_id])
graph.attr('node', shape='ellipse')
graph.node(server.name)
graph.edge(self.name, server.name)
with graph.subgraph(name='cluster_'+server.name) as c:
c.attr(color='blue')
c = server.generate_graph(c)
return graph
def __str__(self):
global current_indent
output = print_simple_tree()+"Rack "+self.name+"\n"
current_indent += 1
if verbose:
output += print_simple_tree()+"Rack usage: "+str(self.get_rack_usage())+"/"+str(self.rack_size)+"U\n"
output += print_simple_tree()+"Servers in this rack: \n"
current_indent += 1
for server in self.servers:
output += str(server)
current_indent -= 1
return output
class sim_host:
def has_enough_ressources(self, guest_capacity):
usage = self.get_host_usage()
suitable = True
for k in usage.keys():
if usage[k] + guest_capacity[k] > self.capacity[k]:
return None
return self
def get_host_usage(self):
host_usage = {"vcpu" : 0.0, "ram" : 0.0}
for k, v in self.guests.items():
for logical_object in v:
host_usage["vcpu"] += logical_object.capacity["vcpu"]
host_usage["ram"] += logical_object.capacity["ram"]
return host_usage
def get_host_free_capacity(self, kind):
host_free_capacity = {"vcpu": 0.0, "ram": 0.0}
if self.get_host_capability(kind):
usage = self.get_host_usage()
for k in usage.keys():
host_free_capacity[k] = self.capacity[k] - usage[k]
return host_free_capacity
"""Set the ability to run VMs or containers or both"""
def set_host_capability(self, capabilities):
for capability in capabilities:
self.guests[capability] = []
def get_host_capability(self, kind):
if kind in self.guests.keys() :
return True
return False
def register_logical_object_to_host(self, guest):
self.guests[guest.kind].append(guest)
def generate_graph(self, graph):
for guest_types in self.guests:
for guest in self.guests[guest_types]:
global color_map
graph.attr('node', style='fill
|
ed', color=color_map[guest.tenant_id])
if guest_types == "containers":
graph.attr('node', shape='box3d', style="")
elif guest_types == "vms":
graph.attr('node', shape='hexagon', style="")
graph.node(guest.name)
graph.edge(self.name, guest.name)
with graph.subgraph(name='cluster_'+guest.name) as c:
|
c.attr(color='blue')
c = guest.generate_graph(c)
return graph
class sim_server(sim_host):
"""A 2U server that may run containers or virtual machines or both"""
def __init__(self, name, vcpu_max_capacity, ram_max_capacity, tenant_id=0):
self.name = name
self.server_size = 2
self.capacity = {"vcpu": vcpu_max_capacity, "ram": ram_max_capacity}
self.guests = {}
self.tenant_id = tenant_id
def __str__(self):
global current_indent
output = print_simple_tree()+"Server "+self.name+" (Usage: "+str(self.get_host_usage()["vcpu"])+"/"+str(self.capacity["vcpu"])+" vCPU; "+str(self.get_host_usage()["ram"])+"/"+str(self.capacity["ram"])+" GB RAM)\n"
current_indent += 1
if verbose:
output += print_simple_tree()+"Server size : "+str(self.server_size)+"U\n"
if "vms" in self.guests.keys():
if verbose:
output += print_simple_tree()+"Can run VMs\n"
for vm in self.guests["vms"]:
output += str(vm)
if "containers" in self.guests.keys():
current_indent += 1
if verbose:
output += print_simple_tree()+"Can run containers\n"
for container in self.guests["containers"]:
output += str(container)
current_indent -= 1
current_indent -= 1
return output
class sim_logical_object(sim_host):
"""Allocate vm or container in a specified DC"""
def add_logical_object_in_dc(self, dc):
host = dc.find_suitable_host(self.kind, self.capacity)
host.register_logical_object_to_host(self)
"""Force VM or container allocation on a specified server"""
"""def add_logical_object_on_server(self, server):
print(guest)"""
def __init__(self, name, vcpu_
|
guorendong/iridium-browser-ubuntu
|
third_party/chromite/lib/git_unittest.py
|
Python
|
bsd-3-clause
| 13,396
| 0.004479
|
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for chromite.lib.git and helpers for testing that module."""
from __future__ import print_function
import functools
import mock
import os
from chromite.lib import cros_build_lib
from chromite.lib import cros_build_lib_unittest
from chromite.lib import cros_test_lib
from chromite.lib import git
from chromite.lib import partial_mock
from chromite.lib import patch_unittest
class ManifestMock(partial_mock.PartialMock):
"""Partial mock for git.Manifest."""
TARGET = 'chromite.lib.git.Manifest'
ATTRS = ('_RunParser',)
def _RunParser(self, *_args):
pass
class ManifestCheckoutMock(partial_mock.PartialMock):
"""Partial mock for git.ManifestCheckout."""
TARGET = 'chromite.lib.git.ManifestCheckout'
ATTRS = ('_GetManifestsBranch',)
def _GetManifestsBranch(self, _root):
return 'default'
class NormalizeRefTest(cros_test_lib.TestCase):
"""Test the Normalize*Ref functions."""
def _TestNormalize(self, functor, tests):
"""Helper function for testing Normalize*Ref functions.
Args:
functor: Normalize*Ref functor that only needs the input
ref argument.
tests: Dict of test inputs to expected test outputs.
"""
for test_input, test_output in tests.iteritems():
result = functor(test_input)
msg = ('Expected %s to translate %r to %r, but got %r.' %
(functor.__name__, test_input, test_output, result))
self.assertEquals(test_output, result, msg)
def testNormalizeRef(self):
"""Test git.NormalizeRef function."""
tests = {
# These should all get 'refs/heads/' prefix.
'foo': 'refs/heads/foo',
'foo-bar-123': 'refs/heads/foo-bar-123',
# If input starts with 'refs/' it should be left alone.
'refs/foo/bar': 'refs/foo/bar',
'refs/heads/foo': 'refs/heads/foo',
# Plain 'refs' is nothing special.
'refs': 'refs/heads/refs',
None: None,
}
self._TestNormalize(git.NormalizeRef, tests)
def testNormalizeRemoteRef(self):
"""Test git.NormalizeRemoteRef function."""
remote = 'TheRemote'
tests = {
# These should all get 'refs/remotes/TheRemote' prefix.
'foo': 'refs/remotes/%s/foo' % remote,
'foo-bar-123': 'refs/remotes/%s/foo-bar-123' % remote,
# These should be translated from local to remote ref.
'refs/heads/foo': 'refs/remotes/%s/foo' % remote,
'refs/heads/foo-bar-123': 'refs/remotes/%s/foo-bar-123' % remote,
# These should be moved from one remote to another.
'refs/remotes/OtherRemote/foo': 'refs/remotes/%s/foo' % remote,
# These should be left alone.
'refs/remotes/%s/foo' % remote: 'refs/remotes/%s/foo' % remote,
'refs/foo/bar': 'refs/foo/bar',
# Plain 'refs' is nothing special.
'refs': 'refs/remotes/%s/refs' % remote,
None: None,
}
# Add remote arg to git.NormalizeRemoteRef.
functor = functools.partial(git.NormalizeRemoteRef, remote)
functor.__name__ = git.NormalizeRemoteRef.__name__
self._TestNormalize(functor, tests)
class GitWrappersTest(cros_build_lib_unittest.RunCommandTempDirTestCase):
"""Tests for small git wrappers"""
CHANGE_ID = 'I0da12ef6d2c670305f0281641bc53db22faf5c1a'
COMMIT_LOG = '''
foo: Change to foo.
Change-Id: %s
''' % CHANGE_ID
PUSH_REMOTE = 'fake_remote'
PUSH_BRANCH = 'fake_branch'
PUSH_LOCAL = 'fake_local_branch'
def setUp(self):
self.fake_git_dir = os.path.join(self.tempdir, 'foo/bar')
self.fake_file = 'baz'
self.fake_path = os.path.join(self.fake_git_dir, self.fake_file)
def testInit(self):
git.Init(self.fake_path)
# Should have created the git repo directory, if it didn't exist.
os.path.exists(self.fake_git_dir)
self.assertCommandContains(['init'])
def testAddPath(self):
git.AddPath(self.fake_path)
self.assertCommandContains(['add'])
self.assertCommandContains([self.fake_file])
def testRmPath(self):
git.RmPath(self.fake_path)
self.assertCommandContains(['rm'])
self.assertCommandContains([self.fake_file])
def testGetObjectAtRev(self):
git.GetObjectAtRev(self.fake_git_dir, '.', '1234')
self.assertCommandContains(['show'])
def testRevertPath(self):
git.RevertPath(self.fake_git_dir, self.fake_file, '1234')
self.assertCommandContains(['checkout'])
self.assertCommandContains([self.fake_file])
def testCommit(self):
self.rc.AddCmdResult(partial_mock.In('log'), output=self.COMMIT_LOG)
git.Commit(self.fake_git_dir, 'bar')
self.assertCommandContains(['--amend'], expected=False)
cid = git.Commit(self.fake_git_dir, 'bar', amend=True)
self.assertCommandContains(['--amend'])
self.assertCommandContains(['--allow-empty'], expected=False)
self.assertEqual(cid, self.CHANGE_ID)
cid = git.Commit(self.fake_git_dir, 'new', allow_empty=True)
self.assertCommandContains(['--allow-empty'])
def testUploadCLNormal(self):
git.UploadCL(self.fake_git_dir, self.PUSH_REMOTE, self.PUSH_BRANCH,
local_branch=self.PUSH_LOCAL)
self.assertCommandContains(['%s:refs/for/%s' % (self.PUSH_LOCAL,
self.PUSH_BRANCH)],
capture_output=False)
def testUploadCLDraft(self):
git.UploadCL(self.fake_git_dir, self.PUSH_REMOTE, self.PUSH_BRANCH,
local_branch=self.PUSH_LOCAL, draft=True)
self.assertCommandContains(['%s:refs/drafts/%s' % (self.PUSH_LOCAL,
self.PUSH_BRANCH)],
capture_output=False)
def testUploadCLCaptured(self):
git.UploadCL(self.fake_git_dir, self.PUSH_REMOTE, self.PUSH_BRANCH,
local_branch=self.PUSH_LOCAL, draft=True, capture_output=True)
self.assertCommandContains(['%s:refs/drafts/%s' % (self.PUSH_LOCAL,
self.PUSH_BRANCH)],
capture_output=True)
class ProjectCheckoutTest(cros_test_lib.TestCase):
"""Tests for git.ProjectCheckout"""
def setUp(self):
self.fake_unversioned_patchable = git.ProjectCheckout(
dict(name='chromite',
path='src/chromite',
revision='remotes/for/master'))
self.fake_unversioned_unpatchable = git.ProjectCheckou
|
t(
dict(name='chromite',
path='src/platform/somethingsomething/chromite',
# Pinned to a SHA1.
revision='1deadbeeaf1deadbeeaf1deadbeeaf1deadbeeaf'))
self.fake_versioned_patchable = git.ProjectCheckout(
dict(name='chromite',
path='src/chromite',
revision='1deadbeeaf1deadbeeaf1deadbeeaf1deadb
|
eeaf',
upstream='remotes/for/master'))
self.fake_versioned_unpatchable = git.ProjectCheckout(
dict(name='chromite',
path='src/chromite',
revision='1deadbeeaf1deadbeeaf1deadbeeaf1deadbeeaf',
upstream='1deadbeeaf1deadbeeaf1deadbeeaf1deadbeeaf'))
def testIsPatchable(self):
self.assertTrue(self.fake_unversioned_patchable.IsPatchable())
self.assertFalse(self.fake_unversioned_unpatchable.IsPatchable())
self.assertTrue(self.fake_versioned_patchable.IsPatchable())
self.assertFalse(self.fake_versioned_unpatchable.IsPatchable())
class RawDiffTest(cros_test_lib.MockTestCase):
"""Tests for git.RawDiff function."""
def testRawDiff(self):
"""Test the parsing of the git.RawDiff function."""
diff_output = '''
:100644 100644 ac234b2... 077d1f8... M\tchromeos-base/chromeos-chrome/Manifest
:100644 100644 9e5d11b... 806bf9b... R099\tchromeos-base/chromeos-chrome/chromeos-chrome-40.0.2197.0_rc-r1.ebuild\tchromeos-base/chromeos-chrome/chromeos-chrome-40.0.2197.2_rc-r1.ebuild
:100644 100644 70d6e94... 821c642... M\tchromeos-base/chromeos-chrome/chromeos-chrome-9999.ebuild
:100644 100644 be445f9... be445f9... R100\tchromeos-base/chromium-source/chromium-source-40
|
mferenca/HMS-ecommerce
|
ecommerce/tests/factories.py
|
Python
|
agpl-3.0
| 1,062
| 0.001883
|
from django.contrib.sites.models import Site
import factory
from factory.fuzzy import FuzzyText
from oscar.core.loading import get_model
from oscar.test.factories import ProductFactory, StockRecordFactory as OscarStockRecordFactory
from ecommerce.core.models import SiteConfiguration
class PartnerFactory(factory.DjangoModelFactory):
class Meta(object):
model = get_model('partner', 'Partner')
django_get_or_create = ('name',)
name = FuzzyText(prefix='test-partner-')
short_code = factory.
|
SelfAttribute('name')
class SiteFactory(factory.DjangoModelFactory):
class Meta(object):
model = Site
class Site
|
ConfigurationFactory(factory.DjangoModelFactory):
class Meta(object):
model = SiteConfiguration
lms_url_root = factory.LazyAttribute(lambda obj: "http://lms.testserver.fake")
site = factory.SubFactory(SiteFactory)
partner = factory.SubFactory(PartnerFactory)
class StockRecordFactory(OscarStockRecordFactory):
product = factory.SubFactory(ProductFactory)
price_currency = 'USD'
|
bcstack/btsdk
|
shell/service.py
|
Python
|
apache-2.0
| 714
| 0.004202
|
from threading import Thread, Lock
import bluetooth as bt
impo
|
rt socket
class SerialPortService(Thread):
def __init__(self, port):
super(SerialPortService, self).__init__()
self.port = port
self.send_lock = Lock()
self.setDaemon(True)
def run(self):
self.running = True
self.port.settimeout(0.1)
while self.running:
try:
data = self.port.recv(2048)
print 'IN:', data
except :
|
pass
self.port.close()
def end(self):
self.running = False
def send(self, msg):
self.send_lock.acquire()
self.port.send(msg)
self.send_lock.release()
|
rackerlabs/marconi
|
tests/unit/common/test_request.py
|
Python
|
apache-2.0
| 912
| 0
|
# Copyright (c) 2013 Rackspace, Inc.
# Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# S
|
ee the License for the specific language governing permissions and
# limitations under the License.
from marconi.common import request
from marconi.tests import base
class TestRequest(base.TestBase):
def test_request_deserialized(self):
data = '{"data": "tons of GBs"}'
req = request.Request(content=data)
self.assertIsInstance(r
|
eq.deserialized_content, dict)
|
luiscberrocal/rango_tutorial
|
versiontools/tests.py
|
Python
|
apache-2.0
| 948
| 0.009494
|
from django.test import TestCase
from versiontools.models import AssemblyInfo
from django.utils.timesince import timesince
from django.utils.datetime_safe import datetime
# Create your tests here.
class AssembyInf
|
oTest(TestCase):
def test_create(self):
data = {'fullpath' : r'C:\Users\lberrocal\Documents\Visual Studio 2010\Projects\vessel_scheling_app\5-VesselScheduleEngine\VsApplicationData\My
|
Project\AssemblyInfo.vb ',
'title' : 'Vessel Schedule WorkBench',
'description' : '',
'version' : '1.2.6'}
ai = AssemblyInfo.objects.create(**data)
self.assertEqual(ai.fullpath, data['fullpath'])
self.assertEqual(ai.title, data['title'])
self.assertEqual(ai.description, data['description'])
self.assertEqual(ai.version, data['version'])
print ai.updated_on
print datetime.now()
print ai.project
|
openaid-IATI/OIPA
|
OIPA/currency_convert/migrations/0002_monthlyaverage_imf_url.py
|
Python
|
agpl-3.0
| 404
| 0
|
# Generated by Django 2.0.13 on 2020-10-09 14:56
from django.
|
db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('currency_convert', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='monthlyaverage',
name='imf_url',
field=models.URLField(max_length=2000, null=True),
|
),
]
|
ServiceLearningB/ServiceLearningNew
|
submit_reports/views.py
|
Python
|
mit
| 12,136
| 0.023978
|
from django.shortcuts import render, RequestContext
from .forms import *
import datetime
from django.forms import modelformset_factory
from .models import SubmitReport, Student, Faculty, Staff, Course
from django.http import HttpResponse, HttpResponseRedirect, HttpResponsePermanentRedirect
from django.contrib import auth
from django.views.generic.list import ListView
from django.views.generic import DetailView
from django.views.generic.edit import FormMixin
from django.template.context_processors import csrf
from django.contrib.auth.decorators import login_required,user_passes_test, permission_required
from django.contrib.auth.mixins import UserPassesTestMixin
import pandas as pd
from django.core.mail import send_mail
# Create your views here.
class FilteredListView(FormMixin, ListView):
def get_form_kwargs(self):
return {
'initial': self.get_initial(),
'prefix': self.get_prefix,
'data': self.request.GET or None
}
def get(self, request, *args, **kwargs):
self.object_list = self.get_queryset()
form = self.get_form(self.get_form_class())
if form.is_valid():
self.object_list = form.filter_queryset(request, self.object_list)
context = self.get_context_data(form=form, object_list=self.object_list)
return self.render(request, context)
@login_required(redirect_field_name=None)
@user_passes_test(lambda u: hasattr(u, 'student'), redirect_field_name=None,
login_url='/accounts/login/')
def submit_page(request):
'''Page for submitting records, accessible to student users'''
student = Student.objects.get(user=request.user)
if request.method == 'POST':
form = SubmitReportForm(request.POST or None, student)
form.fields['courses'].queryset = Course.objects.filter(students__in=[student])
if form.is_valid():
save_form = form.save(commit=False)
save_form.submitter = student
save_form.first_name = student.user.first_name
save_form.last_name = student.user.last_name
save_form.save()
form.save_m2m()
return HttpResponseRedirect('/accounts/student_view')
else:
form = SubmitReportForm()
form.fields['courses'].queryset = Course.objects.filter(students__in=[student])
return render(request, "submit_report.html", {'form': form})
# Faculty view of reports
######################################################################
from django.template.backends.utils import csrf_input_lazy, csrf_token_lazy
@login_required
@user_passes_test(lambda u: hasattr(u, 'faculty'))
def faculty_view(request):
reports = SubmitReport.objects.filter(courses__in=request.user.faculty.course_set.all()).distinct()
reports.filter(status='APPROVED')
form = ReportSearchForm(request.POST, user_type=request.user.faculty)
courses = request.user.faculty.course_set.all()
course_choices = []
for course in courses:
course_choices += [[course.pk, course]]
df = pd.DataFrame(list(reports.values('first_name', 'last_name', 'start_date', 'start_time', 'end_date', 'end_time', 'summary')))
form.fields['courses'].choices = course_choices
from django.template import Template, Context
if form.is_valid():
reports = form.filter_queryset(reports)
df = pd.DataFrame(list(reports.values(
'first_name', 'last_name', 'start_date', 'start_time', 'end_date', 'end_time', 'summary')))
if reports:
table = df.to_html(escape=False, index=False,
columns=['first_name', 'last_name', 'start_date', 'start_time', 'end_date', 'end_time', 'summary'],
formatters={
'summary': (lambda s: '<abbr title=\"' + s + '\">Notes</abbr>'),
# 'start_time': (lambda s: readable_datetime(s)),
# 'end_time': (lambda s: readable_datetime(s)),
})
else:
table = "No reports matched your search."
return render(request, "faculty_view.html", {'form': form,
'table': table,
})
#View for TA
##########################################################################
from django.template.backends.utils import csrf_input_lazy, csrf_token_lazy
@login_required
@user_passes_test(lambda u: hasattr(u, 'staff'))
def ta_view(request):
reports = SubmitReport.objects.query_pending_reports()
reports = reports.filter(courses__in=request.user.staff.courses.all()).distinct()
ApproveReportFormSet = modelformset_factory(SubmitReport, form=ReportApproveForm, extra=0)
if request.method == 'POST':
form = ReportSearchForm(request.POST, user_type=request.user.staff)
courses = request.user.staff.courses.all()
course_choices = []
for course in courses:
course_choices += [[course.pk, course]]
form.fields['courses'].choices = course_choices
if form.is_valid():
reports = form.filter_queryset(reports)
import pickle
request.session['search_results'] = pickle.dumps(reports.query)
return HttpResponseRedirect('/accounts/ta_search_results/')
else:
form = ReportSearchForm(request.POST, user_type=request.user.staff)
courses = request.user.staff.courses.all()
course_choices = []
for course in courses:
course_choices += [[course.pk, course]]
form.fields['courses'].choices = course_choices
report_forms = ApproveReportFormSet(queryset=reports)
return render(request, "ta_view.html", {'form': form,
'report_forms': report_forms,
})
@login_required
@user_passes_test(lambda u: hasattr(u, 'staff'))
def ta_results_view(request):
import pickle
ApproveR
|
eportFormSet = modelformset_factory(SubmitReport, form=ReportApproveForm, extra=0)
reports = SubmitReport.objects.query_pending_reports().filter(courses__in=request.user.staff.courses.all()).distinct()
reports.query = pickle.loads(request.session['search_results'])
print reports
if request.method == 'POST':
report_for
|
ms = ApproveReportFormSet(request.POST, queryset=reports)
if report_forms.is_valid():
report_forms.save()
reports.filter(status__exact='PENDING')
return render(request, "ta_search_results.html", {'formset': report_forms,})
else:
report_forms = ApproveReportFormSet(queryset=reports)
print report_forms
return render(request, "ta_search_results.html", {'formset': report_forms,})
#Related to login
##############################################################
def login_view(request):
"""Page for logging in"""
c = {}
c.update(csrf(request))
return render(request, 'login.html', c)
def auth_view(request):
"""Redirects users after login, or if login fails"""
username = request.POST.get('username', '')
password = request.POST.get('password', '')
user = auth.authenticate(username=username, password=password)
if user is not None:
auth.login(request, user)
if hasattr(user, 'student'):
return HttpResponseRedirect('/accounts/student_view/')
if hasattr(user, 'faculty'):
return HttpResponseRedirect('/accounts/faculty_view/')
else:
return HttpResponseRedirect('/accounts/invalid/')
def logout_view(request):
"""Page for users which have just logged out"""
auth.logout(request)
return render(request, 'logout.html')
#Home pages for different users (and also bd login info)
###################################################################
@login_required
@user_passes_test(lambda u: hasattr(u, 'student'))
def student_logged_in_view(request):
"""Homepage for logged in users"""
return render(request, 'loggedin.html',
{'username': request.user.username, 'is_TA': hasattr(request.user, "staff"),
'is_Student': hasattr(request.user, 'student')})
def invalid_login_view(request):
"""Page for users who have not successfully logged in"""
return render(request, 'invalid_login.html')
@login_required
@user_passes_test(lambda u: u.is_superuser)
def admin_home_view(request):
"""Homepage for logged in admin"""
return render(request, 'admin_loggedin.html',
{'username': request.user.username})
#Views for doing the actual stuff that users want to do
##########################################################################
@login_required
@user_passes_test(lambda u: u.is_superuser)
def add_partners_view(request):
'''Page for adding partners'''
form = AddPartnerForm(request.POST or None)
if form.is_valid():
save_form = form.save(commit=False)
save_form.save()
if '_add_another' in request.POST:
return HttpResponseRedirect('/admin/add_partner/')
return HttpResponseRedirect('/admin/home/')
return render(request,
|
cobrab11/black1-bot
|
extensions/servres.py
|
Python
|
apache-2.0
| 1,659
| 0.027557
|
# BS mark.1-55
# /* coding: utf-8 */
# BlackSmith plugin
# servres_plugin.py
# Coded by: WitcherGeralt (WitcherGeralt@jabber.ru)
# http://witcher-team.ucoz.ru/
SERVSTAT = {}
RESSTAT = {}
def handler_servres_stat(conf, nick, afl, role, status, text):
instance = GROUPCHATS[conf][nick]
if instance.has_key('full_jid'):
jid = instance['full_jid']
if jid.count('/'):
list = jid.split('/')
stripped_jid = list[0]
server = stripped_jid.split('@')[1]
resourse = list[1]
if server not in S
|
ERVSTAT:
SERVSTAT[server] = []
if stripped_jid not in SERVSTAT[server]:
SERVSTAT[server].append(stripped_jid)
if resourse not in RESSTAT:
RESSTAT[resourse] = []
if stripped_jid not in RESSTAT[resourse]:
RESSTAT[resourse].append(stripped_jid)
def handler_check_servsta
|
t(type, source, body):
stat = []
for server in SERVSTAT:
stat.append([len(SERVSTAT[server]), server])
stat.sort()
stat.reverse()
list = ''
col = 0
for item in stat:
col = col + 1
if col <= 20:
list += '\n'+str(col)+'. '+item[1]+' - '+str(item[0])
reply(type, source, u'Всего серверов '+str(col)+' :'+list)
def handler_check_resstat(type, source, body):
stat = []
for resourse in RESSTAT:
stat.append([len(RESSTAT[resourse]), resourse])
stat.sort()
stat.reverse()
list = ''
col = 0
for item in stat:
col = col + 1
if col <= 20:
list += '\n'+str(col)+'. '+item[1]+' - '+str(item[0])
reply(type, source, u'Всего ресурсов '+str(col)+' :'+list)
handler_register("04eh", handler_servres_stat)
command_handler(handler_check_servstat, 10, "servres")
command_handler(handler_check_resstat, 10, "servres")
|
elkingtowa/alphacoin
|
Bitcoin/ngcccbase-master/server/run.py
|
Python
|
mit
| 2,469
| 0.00081
|
#!/usr/bin/env python
import json
import sys
import web
from coloredcoinlib import BlockchainState, ColorDefinition
blockchainstate = BlockchainState.from_url(None, True)
urls = (
'/tx', 'Tx',
'/prefetch', 'Prefetch',
)
class ErrorThrowingRequestProcessor:
def require(self, data, key, message):
value = data.get(key)
if not value:
raise web.HTTPError("400 Bad request",
{"content-type": "text/plain"},
message)
class Tx(ErrorThrowingRequestProcessor):
def POST(self):
# data is sent in as json
data = json.loads(web.input().keys()[0])
self.require(data, 'txhash', "TX requires txhash")
txhash = data.get('txhash')
return blockchainstate.get_raw(txhash)
class Prefetch(ErrorThrowingRequestProcessor):
def POST(self):
# data is sent in as json
data = json.loads(web.input().keys()[0])
self.require(data, 'txhash', "Prefetch requires txhash")
self.require(data, 'output_set', "Prefetch requires output_set")
self.require(data, 'color_desc', "Prefetch requires color_desc")
txhash = data.get('txhash')
output_set = data.get('output_set')
color_desc = data.get('color_desc')
limit = data.get('limit')
color_def = ColorDefinition.from_color_desc(17, color_desc)
tx_lookup = {}
def process(current_txhash, current_outindex):
"""For any tx out, process the colorvalues
|
of the affecting
inputs first and then scan that tx.
"""
if limit and len(tx_lookup) > limit:
return
if tx_lookup.get(current_txhash):
return
current_tx = blockchainstate.get_tx(current_txhash)
if not current_tx:
return
tx_lookup[current
|
_txhash] = blockchainstate.get_raw(current_txhash)
# note a genesis tx will simply have 0 affecting inputs
inputs = set()
inputs = inputs.union(
color_def.get_affecting_inputs(current_tx,
[current_outindex]))
for i in inputs:
process(i.prevout.hash, i.prevout.n)
for oi in output_set:
process(txhash, oi)
return tx_lookup
if __name__ == "__main__":
app = web.application(urls, globals())
app.run()
|
iitjee/steppinsPython
|
SystemProgramming/Parallel System/01 Forking.py
|
Python
|
gpl-3.0
| 5,146
| 0.021697
|
Forked processes are a traditional way to structure parallel tasks, and they are a fun- damental part of the Unix tool set.
Forking is based on the notion of copying programs: when a program calls the fork routine, the operating system makes a new
copy of that program and its process in memory and starts running that copy in parallel with the original.
Some systems don’t really copy the original program (it’s an expensive operation), but the new copy works as if it were a
literal copy.
After a fork, original process = parent and the copy created by os.fork = child
In general, parents can make any number of children, and children can create child processes of their own; all forked
processes run independently and in parallel under the operating system’s control, and children may continue to run after their
parent exits.
'
"This script forks child processes until you type 'q'"
import os
def child():
print('Hello from child', os.getpid())
os._exit(0) # else goes back to parent loop
def parent():
while True:
newpid = os.fork()
if newpid == 0: #this is from child process
child()
else: #this is from parent process
print('Hello from parent', os.getpid(), newpid)
if input() == 'q': break
parent()
Python’s process forking tools, available in the os module, are simply thin wrappers over standard forking calls in the
system library also used by C language programs.
**To start a new, parallel process, call the os.fork built-in function. Because this function generates a copy of the
calling program, it returns a different value in each copy: zero in the child process and the process ID of the new child in
the parent.
Programs generally test this result to begin different processing in the child only; this script, for instance,
runs the child function in child processes only.
Because forking is ingrained in the Unix programming model, this script works well on Unix, Linux, and modern Macs. (won’t
work on the standard version of Python for Windows today, because fork is too much at odds with the Win- dows model. multi
processing module, provides an alternative for running processes portably )
Note: Above script does work on Windows, however, if you use the Python shipped with the Cygwin system (or build one of your
own from source-code with Cygwin’s libraries). Cygwin is a free, open source system that provides full Unix-like
functionality for Windows
A subtle point: the child process function is also careful to exit explicitly with an os._exit call. We’ll discuss this call
in more detail later in this chapter, but if it’s not made, the child process would live on after the child function r
eturns (remember, it’s just a copy of the original process). The net effect is that the child would go back to the loop in
parent and start forking children of its own (i.e., the parent would have grandchildren). If you delete the exit call and
rerun, you’ll likely have to type more than one q to stop, because multiple processes are running in the parent function.
in above eg, each process exits very soon after it starts, so there’s little overlap in time. Let’s do something slightly
more sophisticated to better illustrate multiple forked processes running in parallel.
"""
fork basics: start 5 copies of this program running in parallel with the original; each copy counts up to 5 on the same
stdout stream--forks copy process memory, including file descriptor
|
s; fork doesn't currently work on Windows without Cygwin:
use os.spawnv or multiprocessing on Wi
|
ndows instead; spawnv is roughly like a fork+exec combination;
"""
import os, time
def counter(count):
for i in range(count):
time.sleep(1)
print('[%s] => %s' % (os.getpid(), i))
for i in range(5):
pid = os.fork()
if pid != 0:
print('Process %d spawned' % pid)
else:
counter(5)
os._exit(0)
print('Main process exiting.')
# run in new process # simulate real work
# in parent: continue
# else in child/new process # run function and exit
# parent need not wait
When run, this script starts 5 processes immediately and exits. All 5 forked processes check in with their first count
display one second later and every second thereafter. Notice that child processes continue to run, even if the parent
process that created them terminates:
The output of all of these processes shows up on the same screen, because all of them share the standard output stream (and
a system prompt may show up along the way, too).
Technically, a forked process gets a copy of the original process’s global memory, including open file descriptors. Because
of that, global objects like files start out with the same values in a child process, so all the processes here are tied to
the same single stream. But it’s important to remember that global memory is copied, not shared; if a child process changes
a global object, it changes only its own copy. (As we’ll see, this works differently in threads,
|
thinkAmi-sandbox/python_ms_access_sample
|
pyodbc_runner.py
|
Python
|
unlicense
| 487
| 0.020316
|
import pyodbc
|
import config
def main():
# formatで`{`を使うため、`{`を重ねることでエスケープ
con_str = 'Driver={{Microsoft Access Driver (*.mdb, *.accdb)}};Dbq={0};'.format(config.PATH_ACCDB)
conn = pyodbc.connect(con_str)
cur = conn.cursor()
cur.execute("select item_name from item")
for c in cur.fetchall():
print(c[0]) #=> `ringo`, `みかん
cur
|
.close()
conn.close()
if __name__ == '__main__':
main()
|
UCRoboticsLab/BaxterTictactoe
|
src/baxter_tictactoe/scripts/animator_server.py
|
Python
|
apache-2.0
| 4,046
| 0.005932
|
#!/usr/bin/env python
import glob
import cv2
import cv_bridge
import rospy
from sensor_msgs.msg import Image
from std_msgs.msg import Int32, Float32
import rospkg
import sys
class Animation:
def __init__(self, directory):
self.fnames = [fname for fname in glob.glob("%s/*" % directory)]
self.fnames.sort()
self.images = [cv2.imread(path) for path in self.fnames]
self.animation_timer = None
s
|
elf.current_value = 0
self.current_idx = 0
self.set_velocity(1/20.0)
self.current_target = 99
self.image_publisher = rospy.Publisher("/robot/xdisplay", Image,
queue_size=10)
self.value_subscriber = rospy.Subscriber("/confusion/value/command", Int32, self.set_value)
self.target_subscriber = rospy.Subscriber("/confusion/target/command", Int32, self.set_target)
self.targ
|
et_subscriber = rospy.Subscriber("/confusion/target/command", Int32, self.set_target)
self.value_publisher = rospy.Publisher("/confusion/value/state", Int32,
queue_size=10)
self.target_publisher = rospy.Publisher("/confusion/target/state", Int32,
queue_size=10)
self.timer = rospy.Timer(rospy.Duration(self.velocity), self.timer_cb)
def set_velocity(self, velocity):
if isinstance(velocity, Float32):
velocity = velocity.data
self.velocity = velocity
if (self.animation_timer != None):
self.animation_timer.shutdown()
def set_idx(self, idx):
self.current_idx = idx;
self.current_value = int((float(idx) / len(self.images)) * 100)
self.checkrep()
return self.publish
def set_value(self, value):
if isinstance(value, Int32):
print "setting value from topic"
value = value.data
self.current_value = value
self.current_idx = int((value / 100.0) * (len(self.images)))
print self.current_idx
self.checkrep()
return self.publish_image()
def checkrep(self):
assert 0 <= self.current_idx < len(self.images), self.current_idx
assert 0 <= self.current_value < 100, self.current_value
assert self.current_target == None or (0 <= self.current_target < 100), self.current_target
def publish_image(self):
msg = cv_bridge.CvBridge().cv2_to_imgmsg(self.image, encoding="bgr8")
self.image_publisher.publish(msg)
return self.images[self.current_idx]
def set_target(self, target):
if isinstance(target, Int32):
print "setting target from topic"
target = target.data
print "setting target", target
self.current_target = target
@property
def image(self):
return self.images[self.current_idx]
def publish_state(self):
self.value_publisher.publish(self.current_value)
self.target_publisher.publish(self.current_target)
def timer_cb(self, time):
self.animate()
#print "anime timer is running"
self.publish_state()
def animate(self):
if self.current_target != None:
print "target", self.current_target, self.current_value
if self.current_target < self.current_value:
self.set_value(self.current_value - 1)
elif self.current_target > self.current_value:
self.set_value(self.current_value + 1)
elif self.current_target == self.current_value:
#self.current_target = None
self.current_value = 0
else:
raise ValueError("No target: " + `self.target`)
def main():
rospy.init_node('animator_server', anonymous=True)
rate = rospy.Rate(30)
rospack = rospkg.RosPack()
path = sys.argv[1]
Animation(path)
while not rospy.is_shutdown():
rate.sleep()
if __name__ == "__main__":
main()
|
Coder-Yu/RecQ
|
algorithm/ranking/DMF.py
|
Python
|
gpl-3.0
| 6,418
| 0.012932
|
#coding:utf8
from baseclass.DeepRecommender import DeepRecommender
import numpy as np
from random import choice,random,randint,shuffle
from tool import config
import tensorflow as tf
#According to the paper, we only
class DMF(DeepRecommender):
def __init__(self,conf,trainingSet=None,testSet=None,fold='[1]'):
super(DMF, self).__init__(conf,trainingSet,testSet,fold)
def next_batch(self,i):
rows = np.zeros(((self.negative_sp+1)*self.batch_size,self.num_items))
cols = np.zeros(((self.negative_sp+1)*self.batch_size,self.num_users))
batch_idx = range(self.batch_size*i,self.batch_size*(i+1))
users = [self.data.trainingData[idx][0] for idx in batch_idx]
items = [self.data.trainingData[idx][1] for idx in batch_idx]
u_idx = [self.data.user[u] for u in users]
v_idx = [self.data.item[i] for i in items]
ratings = [float(self.data.trainingData[idx][2]) for idx in batch_idx]
for i,user in enumerate(users):
rows[i] = self.data.row(user)
for i,item in enumerate(items):
cols[i] = self.data.col(item)
userList = self.data.user.keys()
itemList = self.data.item.keys()
#negative sample
for i in range(self.negative_sp*self.batch_size):
u = choice(userList)
v = choice(itemList)
while self.data.contains(u,v):
u = choice(userList)
v = choice(itemList)
rows[self.batch_size-1+i]=self.data.row(u)
cols[self.batch_size-1+i]=self.data.col(i)
u_idx.append(self.data.user[u])
v_idx.append(self.data.item[v])
ratings.append(0)
return rows,cols,np.array(ratings),np.array(u_idx),np.array(v_idx)
def initModel(self):
super(DMF, self).initModel()
n_input_u = len(self.data.item)
n_input_i = len(self.data.user)
self.negative_sp = 5
self.n_hidden_u=[256,512]
|
self.n_hidden_i=[256,512]
|
self.input_u = tf.placeholder(tf.float, [None, n_input_u])
self.input_i = tf.placeholder(tf.float, [None, n_input_i])
def buildModel(self):
super(DMF, self).buildModel_tf()
initializer = tf.contrib.layers.xavier_initializer()
#user net
user_W1 = tf.Variable(initializer([self.num_items, self.n_hidden_u[0]],stddev=0.01))
self.user_out = tf.nn.relu(tf.matmul(self.input_u, user_W1))
self.regLoss = tf.nn.l2_loss(user_W1)
for i in range(1, len(self.n_hidden_u)):
W = tf.Variable(initializer([self.n_hidden_u[i-1], self.n_hidden_u[i]],stddev=0.01))
b = tf.Variable(initializer([self.n_hidden_u[i]],stddev=0.01))
self.regLoss = tf.add(self.regLoss,tf.nn.l2_loss(W))
self.regLoss = tf.add(self.regLoss, tf.nn.l2_loss(b))
self.user_out = tf.nn.relu(tf.add(tf.matmul(self.user_out, W), b))
#item net
item_W1 = tf.Variable(initializer([self.num_users, self.n_hidden_i[0]],stddev=0.01))
self.item_out = tf.nn.relu(tf.matmul(self.input_i, item_W1))
self.regLoss = tf.add(self.regLoss, tf.nn.l2_loss(item_W1))
for i in range(1, len(self.n_hidden_i)):
W = tf.Variable(initializer([self.n_hidden_i[i-1], self.n_hidden_i[i]],stddev=0.01))
b = tf.Variable(initializer([self.n_hidden_i[i]],stddev=0.01))
self.regLoss = tf.add(self.regLoss, tf.nn.l2_loss(W))
self.regLoss = tf.add(self.regLoss, tf.nn.l2_loss(b))
self.item_out = tf.nn.relu(tf.add(tf.matmul(self.item_out, W), b))
norm_user_output = tf.sqrt(tf.reduce_sum(tf.square(self.user_out), axis=1))
norm_item_output = tf.sqrt(tf.reduce_sum(tf.square(self.item_out), axis=1))
self.y_ = tf.reduce_sum(tf.multiply(self.user_out, self.item_out), axis=1) / (
norm_item_output * norm_user_output)
self.y_ = tf.maximum(1e-6, self.y_)
self.loss = self.r*tf.log(self.y_) + (1 - self.r) * tf.log(1 - self.y_)#tf.nn.sigmoid_cross_entropy_with_logits(logits=self.y_,labels=self.r)
#self.loss = tf.nn.l2_loss(tf.subtract(self.y_,self.r))
self.loss = -tf.reduce_sum(self.loss)
reg_lambda = tf.constant(self.regU, dtype=tf.float32)
self.regLoss = tf.multiply(reg_lambda,self.regLoss)
self.loss = tf.add(self.loss,self.regLoss)
optimizer = tf.train.AdamOptimizer(self.lRate).minimize(self.loss)
self.U = np.zeros((self.num_users, self.n_hidden_u[-1]))
self.V = np.zeros((self.num_items, self.n_hidden_u[-1]))
init = tf.global_variables_initializer()
self.sess.run(init)
total_batch = int(len(self.data.trainingData)/ self.batch_size)
for epoch in range(self.maxIter):
shuffle(self.data.trainingData)
for i in range(total_batch):
users,items,ratings,u_idx,v_idx = self.next_batch(i)
shuffle_idx=np.random.permutation(range(len(users)))
users = users[shuffle_idx]
items = items[shuffle_idx]
ratings = ratings[shuffle_idx]
u_idx = u_idx[shuffle_idx]
v_idx = v_idx[shuffle_idx]
_,loss= self.sess.run([optimizer, self.loss], feed_dict={self.input_u: users,self.input_i:items,self.r:ratings})
print self.foldInfo, "Epoch:", '%04d' % (epoch + 1), "Batch:", '%03d' % (i + 1), "loss=", "{:.9f}".format(loss)
#save the output layer
U_embedding, V_embedding = self.sess.run([self.user_out, self.item_out], feed_dict={self.input_u: users,self.input_i:items})
for ue,u in zip(U_embedding,u_idx):
self.U[u]=ue
for ve,v in zip(V_embedding,v_idx):
self.V[v]=ve
self.normalized_V = np.sqrt(np.sum(self.V * self.V, axis=1))
self.normalized_U = np.sqrt(np.sum(self.U * self.U, axis=1))
self.ranking_performance()
print("Optimization Finished!")
def predictForRanking(self, u):
'invoked to rank all the items for the user'
if self.data.containsUser(u):
uid = self.data.user[u]
return np.divide(self.V.dot(self.U[uid]),self.normalized_U[uid]*self.normalized_V)
else:
return [self.data.globalMean] * self.num_items
|
mypinballs/whirlwind
|
tilt.py
|
Python
|
gpl-3.0
| 2,464
| 0.027597
|
# Top Rollover Lanes
__author__="jim"
__date__ ="$Jan 18, 2011 1:36:37 PM$"
import procgame
import locale
from procgame import *
base_path = config.value_for_key_path('base_path')
game_path = base_path+"games/whirlwind/"
speech_path = game_path +"speech/"
sound_path = game_path +"sound/"
music_path = game_path +"music/"
class Tilt(game.Mode):
def __init__(self, game,priority):
super(Tilt, self).__init__(game, p
|
riority)
self.text_layer = dmd.TextLayer(128/2, 10, self.game.fonts['num_09Bx7'], "center", opaque=True)
self.game.sound.register_sound('tilt', sound_path+"tilt.ogg")
self.game.sound.register_sound('tilt_warning', sound_path+"tilt_warning.ogg")
self.reset()
def reset(self):
self.status = 0
self.times_warned = 0;
|
def status(self):
if self.status==0:
return False
elif self.status==1:
return True
def tilt(self):
#check if already in a tilt state
if self.status == 0:
#set status
self.status = 1
#update display
self.text_layer.set_text("TILT",blink_frames=20)
self.layer = self.text_layer
#play sound
self.game.sound.play('tilt')
#turn off flippers
self.game.enable_flippers(enable=False)
# Make sure ball won't be saved when it drains.
self.game.ball_save.disable()
# Make sure the ball search won't run while ball is draining.
self.game.ball_search.disable()
#turn off all lamps
for lamp in self.game.lamps:
lamp.disable()
#check for stuck balls
# if self.game.switches.shooterLane.is_active():
# self.game.coils.ballLaunch.pulse(50)
def warning(self):
self.times_warned += 1
#update display
time=2
self.text_layer.set_text("WARNING",blink_frames=5,seconds=time)
self.layer = self.text_layer
self.delay(name='clear_delay', event_type=None, delay=time, handler=self.clear)
#play sound
self.game.sound.play('tilt_warining')
def clear(self):
self.layer = None
def sw_plumbBobTilt_active(self, sw):
if self.times_warned == self.game.user_settings['Standard']['Tilt Warnings']:
self.tilt()
else:
self.warning()
|
isb-cgc/ISB-CGC-Webapp
|
seqpeek/admin.py
|
Python
|
apache-2.0
| 666
| 0
|
#
# Copyright 2015-2019, Institute for Systems Biology
#
# Licensed under the Apache License, Version 2.0 (the "License")
|
;
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by
|
applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.contrib import admin
# Register your models here.
|
drodri/client
|
command/tool_catalog.py
|
Python
|
mit
| 2,824
| 0.001771
|
import inspect
from biicode.client.shell.biistream import Color
class ToolCatalog(dict):
def __init__(self, main_class, tools):
dict.__init__(self)
self.main_class = main_class
# dict from tool group name to set of classes
for c in tools:
self[c.group] = c
self.show_advanced = False
def _get_doc_short(self, doc):
return doc.split('\n', 1)[0]
def print_help(self, out, argv):
out.writeln('\nSYNOPSIS:', Color.YELLOW)
out.writeln(' $ bii COMMAND [options]')
out.writeln('For help about a command:', Color.YELLOW)
out.writeln(' $ bii COMMAND --help')
out.write('To change verbosity, use options ', Color.YELLOW)
out.writeln('--quiet --verbose\n')
if not argv or 'all' in argv:
out.writeln('--------- Global Commands ----------', Color.YELLOW)
for m in inspect.getmembers(self.main_class, predicate=inspect.ismethod):
method_name = m[0]
if not method_name.startswith('_'):
method = m[1]
if not method.__doc__.startswith(' ADVANCED'):
doc = method.__doc__
out.write(' %-10s' % method_name, Color.GREEN)
out.writeln(self._get_doc_short(doc))
elif self.show_advanced:
doc = method.__doc__.replace(' ADVANCED', '')
out.write(' %-10s' % method_name, Color.GREEN)
|
out.writeln(self._get_doc_short(doc))
if not argv:
out.writeln('\n--------- Tools ----------', Color.YELLOW)
out.writeln('For help about one or more tools ("all" for all):', Color.YELLOW)
out.writeln(' $ bii --help TOOL [TOOL2]\n')
for group, class_ in self.iteritems():
|
out.write(' %-10s ' % class_.group, Color.GREEN)
out.writeln(class_.__doc__)
else:
# Tools, as main commands
for group, class_ in self.iteritems():
if group not in argv and 'all' not in argv:
continue
out.writeln('---------%s--------' % class_.__doc__, Color.YELLOW)
for m in inspect.getmembers(class_, predicate=inspect.ismethod):
method_name = m[0]
method = m[1]
if method.__doc__:
method_doc = self._get_doc_short(method.__doc__)
if not method_name.startswith('_') and not method_doc.startswith('HIDDEN'):
com = '%s:%s' % (group, method_name)
out.write(' %-15s ' % com, Color.GREEN)
out.writeln(method_doc)
|
mpvoss/RickAndMortyWeatherTweets
|
env/lib/python3.5/site-packages/pyowm/commons/weather_client.py
|
Python
|
mit
| 4,935
| 0.001216
|
"""
Module containing classes for HTTP client/server interactions
"""
# Python 2.x/3.x compatibility imports
try:
from urllib.error import HTTPError, URLError
from urllib.parse import urlencode
except ImportError:
from urllib2 import HTTPError, URLError
from urllib import urlencode
import socket
from pyowm.exceptions import api_call_error, unauthorized_error, not_found_error
from pyowm.webapi25.configuration25 import ROOT_API_URL
class WeatherHttpClient(object):
API_SUBSCRIPTION_SUBDOMAINS = {
'free': 'api',
'pro': 'pro'
}
"""
An HTTP client class for the OWM web API. The class can leverage a
caching mechanism
:param API_key: a Unicode object representing the OWM web API key
:type API_key: Unicode
:param cache: an *OWMCache* concrete instance that will be used to
cache OWM web API responses.
:type cache: an *OWMCache* concrete instance
:param subscription_type: the type of OWM web API subscription to be wrapped.
The value is used to pick the proper API subdomain for HTTP calls.
Defaults to: 'free'
:type subscription_type: str
"""
def __init__(self, API_key, cache, subscription_type='free'):
self._API_key = API_key
self._cache = cache
self._API_root_URL = ROOT_API_URL % \
(self.API_SUBSCRIPTION_SUBDOMAINS[subscription_type],)
def _lookup_cache_or_invoke_API(self, cache, API_full_url, timeout):
cached = cache.get(API_full_url)
if cached:
return cached
else:
try:
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
response = urlopen(API_full_url, None, timeout)
except HTTPError as e:
if '401' in str(e):
raise unauthorized_error.UnauthorizedError('Invalid API key')
if '404' in str(e):
raise not_found_error.NotFoundError('The resource was not found')
if '502' in str(e):
raise api_call_error.BadGatewayError(str(e), e)
except URLError as e:
raise api_call_error.APICallError(str(e), e)
e
|
lse:
data = response.read().decode('utf-8')
cache.set(API_full_url, data)
return data
def call_API(self, API_endpoint_URL, params_dict,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
|
"""
Invokes a specific OWM web API endpoint URL, returning raw JSON data.
:param API_endpoint_URL: the API endpoint to be invoked
:type API_endpoint_URL: str
:param params_dict: a dictionary containing the query parameters to be
used in the HTTP request (given as key-value couples in the dict)
:type params_dict: dict
:param timeout: how many seconds to wait for connection establishment
(defaults to ``socket._GLOBAL_DEFAULT_TIMEOUT``)
:type timeout: int
:returns: a string containing raw JSON data
:raises: *APICallError*
"""
url = self._build_full_URL(API_endpoint_URL, params_dict)
return self._lookup_cache_or_invoke_API(self._cache, url, timeout)
def _build_full_URL(self, API_endpoint_URL, params_dict):
"""
Adds the API key and the query parameters dictionary to the specified
API endpoint URL, returning a complete HTTP request URL.
:param API_endpoint_URL: the API endpoint base URL
:type API_endpoint_URL: str
:param params_dict: a dictionary containing the query parameters to be
used in the HTTP request (given as key-value couples in the dict)
:type params_dict: dict
:param API_key: the OWM web API key
:type API_key: str
:returns: a full string HTTP request URL
"""
url =self._API_root_URL + API_endpoint_URL
params = params_dict.copy()
if self._API_key is not None:
params['APPID'] = self._API_key
return self._build_query_parameters(url, params)
def _build_query_parameters(self, base_URL, params_dict):
"""
Turns dictionary items into query parameters and adds them to the base
URL
:param base_URL: the base URL whom the query parameters must be added
to
:type base_URL: str
:param params_dict: a dictionary containing the query parameters to be
used in the HTTP request (given as key-value couples in the dict)
:type params_dict: dict
:returns: a full string HTTP request URL
"""
return base_URL + '?' + urlencode(params_dict)
def __repr__(self):
return "<%s.%s - cache=%s>" % \
(__name__, self.__class__.__name__, repr(self._cache))
|
di/ppa
|
stats.py
|
Python
|
mit
| 2,062
| 0.022793
|
#!/usr/bin/python
import fetch
import sys
from pymongo import MongoClient
db = MongoClient().ppa.ticket
for loc in db.distinct('location'):
print loc
sys.exit(1)
''' Getting the count
|
for each date
for date in sorted(db.distinct('issueDate')):
print "%s: %s" % (date, db.find({'issueDate':date}).count())
'''
min_id = db.find().sort([('_id', 1)]).limit(1)[0]['_id'] - 1
max_id = db.find().sort([('_id', -1)]).limit(1)[0]['_id'] + 1
i = (min_id % 1000) % 200
block_start = min_id - i + 1
_total = 0
_resolved = 0
_unresolved = 0
_missing = 0
while block_start < max_id :
block_end = block_start + 199
total = db.find({'_id':{'$gte':block_sta
|
rt, '$lte':block_end}}).count()
try :
start_date = db.find({'_id':{'$gte':block_start,'$lte':block_end},'issueTime':{'$exists':True}}).sort([('_id', 1)]).limit(1)[0]['issueTime']
except :
start_date = "???"
try :
end_date = db.find({'_id':{'$gte':block_start,'$lte':block_end},'issueTime':{'$exists':True}}).sort([('_id', -1)]).limit(1)[0]['issueTime']
except :
end_date = "???"
resolved = db.find({'_id':{'$gte':block_start,'$lte':block_end},'resolved':True}).count()
unresolved = db.find({'_id':{'$gte':block_start,'$lte':block_end},'resolved':False}).count()
missing = db.find({'_id':{'$gte':block_start,'$lte':block_end},'missing':True}).count()
if True:
print "%s: %d-%d" % ("Range", block_start, block_end)
print "\t%s:\t\t%s" % ("Start", start_date)
print "\t%s:\t\t%s" % ("End", end_date)
print "\t%s:\t%d" % ("Resolved", resolved)
print "\t%s:\t%d" % ("Unresolved", unresolved)
print "\t%s:\t%d" % ("Missing", missing)
print "\t%s:\t\t%d" % ("Total", total)
block_start += 200
_total += total
_resolved += resolved
_unresolved += unresolved
_missing += missing
print "*************"
print "%s:\t%d" % ("Resolved", _resolved)
print "%s:\t%d" % ("Unresolved", _unresolved)
print "%s:\t%d" % ("Missing", _missing)
print "%s:\t\t%d" % ("Total", _total)
|
opendoor-labs/rets
|
rets/http/parsers/parse_object.py
|
Python
|
mit
| 4,892
| 0.001635
|
import mimetypes
from typing import Optional, Sequence
import cgi
from requests import Response
from requests.structures import CaseInsensitiveDict
from requests_toolbelt.multipart.decoder import MultipartDecoder
from rets.errors import RetsApiError, RetsResponseError
from rets.http.data import Object
from rets.http.parsers.parse import DEFAULT_ENCODING, ResponseLike, parse_xml
def parse_object(response: Response) -> Sequence[Object]:
"""
Parse the response from a GetObject transaction. If there are multiple
objects to be returned then the response should be a multipart response.
The headers of the response (or each part in the multipart response)
contains the metadata for the object, including the location if requested.
The body of the response should contain the binary content of the object,
an XML document specifying a transaction status code, or left empty.
"""
content_type = response.headers.get('content-type')
if content_type and 'multipart/parallel' in content_type:
return _parse_multipart(response)
object_ = _parse_body_part(response)
return (object_,) if object_ is not None else ()
def _parse_multipart(response: ResponseLike) -> Sequence[Object]:
"""
RFC 2045 describes the format of an Internet message body containing a MIME message. The
body contains one or more body parts, each preceded by a boundary delimiter line, and the
last one followed by a closing boundary delimiter line. After its boundary delimiter line,
each body part then consists of a header area, a blan
|
k line, and a body area.
HTTP/1.1 200 OK
Server: Apache/2.0.13
Date: Fri, 22 OCT 2004 12:03:38 GMT
Cache-Control: private
RETS-Version: RETS/1.7.2
MIME-Version: 1.0
Content-Type: multipart/parallel; boundary="simple boundary"
--simple boundary
Content-Type: image/jpeg
Content-ID: 123456
Object-ID: 1
<binary data>
--simple bo
|
undary
Content-Type: text/xml
Content-ID: 123457
Object-ID: 1
<RETS ReplyCode="20403" ReplyText="There is no listing with that ListingID"/>
--simple boundary--
"""
encoding = response.encoding or DEFAULT_ENCODING
multipart = MultipartDecoder.from_response(response, encoding)
# We need to decode the headers because MultipartDecoder returns bytes keys and values,
# while requests.Response.headers uses str keys and values.
for part in multipart.parts:
part.headers = _decode_headers(part.headers, encoding)
objects = (_parse_body_part(part) for part in multipart.parts)
return tuple(object_ for object_ in objects if object_ is not None)
def _parse_body_part(part: ResponseLike) -> Optional[Object]:
headers = part.headers
content_id = headers.get('content-id')
object_id = headers.get('object-id')
preferred = 'preferred' in headers
description = headers.get('content-description')
location = headers.get('location')
content_type = headers.get('content-type')
mime_type = _parse_mime_type(content_type) if content_type else None
# Check XML responses first, it may contain an error description.
if mime_type == 'text/xml':
try:
parse_xml(part)
except RetsApiError as e:
if e.reply_code == 20403: # No object found
return None
raise
# All RETS responses _must_ have `Content-ID` and `Object-ID` headers.
if not content_id or not object_id:
raise RetsResponseError(part.content, part.headers)
# Respond with `Location` header redirect.
if location:
return Object(
mime_type=_guess_mime_type(location) or mime_type,
content_id=content_id,
description=description,
object_id=object_id,
url=location,
preferred=preferred,
data=None,
)
# Check the `Content-Type` header exists for object responses.
if mime_type is None or mime_type == 'text/html':
raise RetsResponseError(part.content, part.headers)
return Object(
mime_type=mime_type,
content_id=content_id,
description=description,
object_id=object_id,
url=None,
preferred=preferred,
data=part.content or None,
)
def _guess_mime_type(location: str) -> Optional[str]:
mime_type, _ = mimetypes.guess_type(location)
return mime_type
def _parse_mime_type(content_type: str) -> Optional[str]:
# Parse mime type from content-type header, e.g. 'image/jpeg;charset=US-ASCII' -> 'image/jpeg'
mime_type, _ = cgi.parse_header(content_type)
return mime_type or None
def _decode_headers(headers: CaseInsensitiveDict, encoding: str) -> CaseInsensitiveDict:
return CaseInsensitiveDict({
k.decode(encoding): v.decode(encoding)
for k, v in headers.items()
})
|
stacybird/CS510CouchDB
|
scripts/csv_to_json.py
|
Python
|
apache-2.0
| 651
| 0.029186
|
#!/usr/bin/env python
import sys
import csv
import json
if len(sys.argv) != 3:
print 'Inco
|
rrect number of arguments.'
print 'Usage: csv_to_json.py path_to_csv path_to_json'
exit()
print 'Argument List:', str(sys.argv)
csvFileName = sys.argv[1]
jsonFileName = sys.argv[2]
csvFile = open (csvFileName, 'rU')
myReader = csv.reader(csvFile)
header = myReader.next()
print "Header fields:", header
myReade
|
r = csv.DictReader( csvFile, fieldnames = header)
parsedJson = json.dumps( [ row for row in myReader ] )
print "JSON parsed."
jsonFile = open( jsonFileName, 'w')
jsonFile.write(parsedJson)
print "JSON saved to: ", jsonFileName
|
JavaRabbit/CS496_capstone
|
codelabs/flex_and_vision/main.py
|
Python
|
apache-2.0
| 4,034
| 0
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START app]
from datetime import datetime
import logging
import os
from flask import Flask, redirect, render_template, request
from google.cloud import datastore
from google.cloud import storage
from google.cloud import vision
CLOUD_STORAGE_BUCKET = os.environ.get('CLOUD_STORAGE_BUCKET')
app = Flask(__name__)
@app.route('/')
def homepage():
# Create a Cloud Datastore client.
datastore_client = datastore.Client()
# Use the Cloud Datastore client to fetch information from Datastore about
# each photo.
query = datastore_client.query(kind='Faces')
image_entities = list(query.fetch())
# Return a Jinja2 HTML template and pass in image_entities as a parameter.
return render_template('homepage.html', image_entities=image_entities)
@app.route('/upload_photo', methods=['GET', 'POST'])
def upload_photo():
photo = request.files['file']
# Create a Cloud Storage client.
storage_client = storage.Client()
# Get the bucket that the file will be uploaded to.
bucket = storage_client.get_bucket(CLOUD_STORAGE_BUCKET)
# Create
|
a new blob and upload the file's content.
blob = bucket.blob(photo.filename)
blob.upload_from_string(
photo.read(), content_type=photo.content_type)
# Make the blob publicly viewable.
blob.make_public()
# Create a Cloud Vision client.
vision_client = vision.Client()
# Use the Cloud Vision client to detect a face
|
for our image.
source_uri = 'gs://{}/{}'.format(CLOUD_STORAGE_BUCKET, blob.name)
image = vision_client.image(source_uri=source_uri)
faces = image.detect_faces(limit=1)
# If a face is detected, save to Datastore the likelihood that the face
# displays 'joy,' as determined by Google's Machine Learning algorithm.
if len(faces) > 0:
face = faces[0]
# Convert the face.emotions.joy enum type to a string, which will be
# something like 'Likelihood.VERY_LIKELY'. Parse that string by the
# period to extract only the 'VERY_LIKELY' portion.
face_joy = str(face.emotions.joy).split('.')[1]
else:
face_joy = 'Unknown'
# Create a Cloud Datastore client.
datastore_client = datastore.Client()
# Fetch the current date / time.
current_datetime = datetime.now()
# The kind for the new entity.
kind = 'Faces'
# The name/ID for the new entity.
name = blob.name
# Create the Cloud Datastore key for the new entity.
key = datastore_client.key(kind, name)
# Construct the new entity using the key. Set dictionary values for entity
# keys blob_name, storage_public_url, timestamp, and joy.
entity = datastore.Entity(key)
entity['blob_name'] = blob.name
entity['image_public_url'] = blob.public_url
entity['timestamp'] = current_datetime
entity['joy'] = face_joy
# Save the new entity to Datastore.
datastore_client.put(entity)
# Redirect to the home page.
return redirect('/')
@app.errorhandler(500)
def server_error(e):
logging.exception('An error occurred during a request.')
return """
An internal error occurred: <pre>{}</pre>
See logs for full stacktrace.
""".format(e), 500
if __name__ == '__main__':
# This is used when running locally. Gunicorn is used to run the
# application on Google App Engine. See entrypoint in app.yaml.
app.run(host='127.0.0.1', port=8080, debug=True)
# [END app]
|
mathblogging/mathblogging
|
dataexport.py
|
Python
|
agpl-3.0
| 4,361
| 0.016969
|
from main import *
from django.utils import simplejson
class CsvView(CachedPage):
cacheName = "CsvView"
mimeType = "text/csv"
selfurl = "database.csv"
def generatePage(self):
output = []
output.append( """title, homepage, person, category, posts_url, comments_url, priority, favicon, listtitle, language""")
for feed in Feed.gql("WHERE category IN :1 ORDER BY listtitle", ['pure', 'applied', 'teacher', 'history', 'visual','art','journalism','fun','journal', 'general','institution','commercial','community']):
output.append(""" "%(title)s",%(homepage)s,"%(person)s",%(category)s,%(url)s,%(comments)s,1,%(favicon)s, "%(listtitle)s",%(language)s \n""" % {'title': feed.title,'homepage': feed.homepage, 'person': feed.person , 'category':feed.category, 'url': feed.posts_url,'comments': feed.comments_url, 'favicon': feed.favicon, 'listtitle': feed.listtitle, 'language' : feed.language } )
return "".join(outpu
|
t)
class OPMLView(CachedPage):
cacheName = "OPMLView"
mimeType = "application/xml"
|
selfurl = "database-opml.xml"
def generatePage(self):
output = []
output.append("""<?xml version="1.0" encoding="UTF-8"?> <opml version="1.0">
<head>
<title>Mathblogging.org Database export to OPML</title>
</head>
<body><outline title="Mathblogging.org" text="Mathblogging.org">""")
for feed in Feed.gql("WHERE category IN :1 ORDER BY listtitle", ['pure', 'applied', 'teacher', 'history', 'visual','art','journalism','fun','journal', 'general','institution','commercial','community']):
output.append("""
<outline text="%(title)s" title="%(title)s" type="rss"
xmlUrl="%(url)s" htmlUrl="%(homepage)s"/>""" % {'title': feed.title,'homepage': feed.homepage, 'person': feed.person , 'category':feed.category, 'url': feed.posts_url,'comments': feed.comments_url, 'favicon': feed.favicon, 'listtitle': feed.listtitle, 'language' : feed.language } )
output.append("""</outline></body></opml>""")
return "".join(output)
class CSEConfig(CachedPage):
cacheName = "CSEConfig"
mimeType = "application/xml"
def generatePage(self):
output = []
output.append( """<?xml version="1.0" encoding="UTF-8" ?>
<Annotations>""")
for feed in Feed.all():
output.append( """
<Annotation about="%(homepage)s*">
<Label name="_cse_et7bffbfveg"/>
</Annotation>
""" % {'homepage': add_slash(strip_http(feed.homepage)) } )
output.append("""</Annotations>""")
return "".join(output)
class PostsJSONExport(CachedPage):
cacheName = "PostsJSONExport"
mimeType = "application/json"
def generatePage(self):
posts = []
for post in Post.gql("WHERE category IN :1 ORDER BY timestamp_created DESC LIMIT 150", ['history','fun','general','commercial','art','visual','pure','applied','teacher','journalism']):
posts.append({
"title": post.title,
"date": post.timestamp_created.strftime('%B %d,%Y %I:%M:%S %p'),
"length": post.length,
"blog": post.service,
"tags": [tag for tag in post.tags],
"category": post.category,
"comments": 0 #TODO
})
output = {"posts":posts}
return simplejson.dumps(output)
class JSONPHandler(CachedPage):
mimeType = "application/javascript"
def post_process_content(self, content):
callback = self.request.get("callback")
logging.info("Add JSONP padding: " + callback)
return "%s(%s);" % (callback, content)
class WeeklyPicksJSONPHandler(JSONPHandler):
cacheName = "WeeklyPicksJSONPHandler"
def generatePage(self):
picks = [ {
"url": "http://rjlipton.wordpress.com/2011/12/03/the-meaning-of-omega/",
"caption": "If you haven't followed the debate on TCS breakthrough in matrix multiplication, you can read up on it at Godel's Lost Letter and P=NP (and you might also check out a short comment at Yet Another Math Programmer)." } ]
output = picks
return simplejson.dumps(output)
|
ColinDuquesnoy/QCrash
|
qcrash/_forms/dlg_github_login_ui.py
|
Python
|
mit
| 3,196
| 0.003755
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/colin/dev/QCrash/forms/dlg_github_login.ui'
#
# Created by: PyQt5 UI code generator 5.5.1
#
# WARNING! All changes made in this file will be lost!
from qcrash.qt import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(366, 248)
Dialog.setMinimumSize(QtCore.QSize(350, 0))
self.verticalLayout = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.lbl_html = QtWidgets.QLabel(Dialog)
self.lbl_html.setObjectName("lbl_html")
self.verticalLayout.addWidget(self.lbl_html)
self.formLayout = QtWidgets.QFormLayout()
self.formLayout.setContentsMargins(-1, 0, -1, -1)
self.formLayout.setObjectName("formLayout")
self.label_2 = QtWidgets.QLabel(Dialog)
self.label_2.setObjectName("label_2")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_2)
self.le_username = QtWidgets.QLineEdit(Dialog)
self.le_username.setObjectName("le_username")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.le_username)
self.label_3 = QtWidgets.QLabel(Dialog)
self.label_3.setObjectName("label_3")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_3)
self.le_password = QtWidgets.QLineEdit(Dialog)
self.le_password.setEchoMode(QtWidgets.QLineEdit.Password)
self.le_password.setObjectName("le_password")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.le_password)
self.verticalLayout.addLayout(self.formLayout)
self.cb_remember = QtWidgets.QCheckBox(Dialog)
self.cb_remember.setObjectName("cb_remember")
self.verticalLayout.addWidget(self.cb_remember)
self.cb_remember_password = QtWidgets.QCheckBox(Dialog)
self.cb_remember_password.setObjectName("cb_remember_password")
self.verticalLayout.addWidget(self.cb_remember_password)
self.bt_sign_in = QtWidgets.QPushButton(Dialog)
self.bt_sign_in.setObjectName("bt_sign_in")
self.verticalLayout.addWidget(self.bt_sign_in)
self.retranslateUi(Dialog)
self.cb_remember.toggled['bool'].connect(self.cb_remember_password.setEnabled)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "S
|
ign in to github"))
self.lbl_html.setText(_translate("Dialog",
|
"<html><head/><body><p align=\"center\"><img src=\":/rc/GitHub-Mark.png\"/></p><p align=\"center\">Sign in to GitHub</p></body></html>"))
self.label_2.setText(_translate("Dialog", "Username:"))
self.label_3.setText(_translate("Dialog", "Password: "))
self.cb_remember.setText(_translate("Dialog", "Remember me"))
self.cb_remember_password.setText(_translate("Dialog", "Remember password"))
self.bt_sign_in.setText(_translate("Dialog", "Sign in"))
from . import qcrash_rc
|
libyal/libfsntfs
|
tests/pyfsntfs_test_support.py
|
Python
|
lgpl-3.0
| 1,560
| 0.005128
|
#!/usr/bin/env python
#
# Python-bindings support functions test script
#
# Copyright (C) 2010-2022, Joachim Metz <joachim.metz@gmail.com>
#
# Refer to AUTHORS for acknowledgements.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it
|
will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import argparse
import os
import sys
import unittest
import pyfsntfs
class Suppo
|
rtFunctionsTests(unittest.TestCase):
"""Tests the support functions."""
def test_get_version(self):
"""Tests the get_version function."""
version = pyfsntfs.get_version()
self.assertIsNotNone(version)
if __name__ == "__main__":
argument_parser = argparse.ArgumentParser()
argument_parser.add_argument(
"source", nargs="?", action="store", metavar="PATH",
default=None, help="path of the source file.")
options, unknown_options = argument_parser.parse_known_args()
unknown_options.insert(0, sys.argv[0])
setattr(unittest, "source", options.source)
unittest.main(argv=unknown_options, verbosity=2)
|
takinbo/rapidsms-borno
|
apps/logger/models.py
|
Python
|
lgpl-3.0
| 675
| 0.013333
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import django
from django.db import models
class MessageBase(models.Model):
text = models.CharField(max_length=140)
# TODO save connection title rather than wacky object string?
identity = models.CharField(max_length=150)
backend = models.CharField(max_length=150)
def __unicode__(self):
return "%s (%s) %s:" % (self.identity, self.backend, self.text)
class Meta:
|
abstract = True
class IncomingMessage(MessageBase):
|
received = models.DateTimeField(auto_now_add=True)
class OutgoingMessage(MessageBase):
sent = models.DateTimeField(auto_now_add=True)
|
tbenthompson/quadracheer
|
interpolate_rl5.py
|
Python
|
mit
| 496
| 0.002016
|
# I was jus
|
t curious how well I could approximate the rl5 starting values
# with an interpolative scheme
from quadracheer.rl5 import *
import numpy as np
import matplotlib.pyplot as plt
fnc = lambda ay, by: mu_5_4(ay, by, k1(ay, by), k2(ay, by))
ay = np.linspace(-2, 2, 100)
by = np.linspace(-2.0, 2.0, 100)
Ay, By = np.meshgrid(ay, by)
f = np.log(np.abs(fnc(Ay, By)))
plt.imshow(f)
plt.title(r'$\log(|\mu_4^5(a_y, b_y)|)$')
plt.xlabel(r'$a_y$')
plt.ylabel(r'$b_y$')
plt.colorbar()
plt.sh
|
ow()
|
mazafrav/JdeRobot
|
src/drivers/MAVLinkServer/modules/mavproxy_smartcamera/sc_main.py
|
Python
|
gpl-3.0
| 7,941
| 0.004282
|
import time
import math
import cv2
from pymavlink import mavutil
from droneapi.lib import VehicleMode, Location
import sc_config
from sc_video import sc_video
from sc_webcam import SmartCameraWebCam
from sc_SonyQX1 import SmartCamera_SonyQX
"""
sc_main.py - runs top level smart camera function
To run this module:
* Run mavproxy.py with the correct options to connect to your vehicle
* module load api
* api start sc_main.py
(Once tested we can put these directives into a mavinit.scr file and mavproxy will load/run
this code automatically)
"""
class SmartCamera(object):
def __init__(self, use_api):
# if using droneapi connect to vehicle
if (use_api):
# First get an instance of the API endpoint (the connect via web case will be similar)
self.api = local_connect()
# Our vehicle (we assume the user is trying to control the virst vehicle attached to the GCS)
self.vehicle = self.api.get_vehicles()[0]
else:
self.api = None
self.vehicle = None
# initialised flag
self.home_initialised = False
# timer to intermittently check for home position
self.last_home_check = time.time()
self.home_location = None
# vehicle mission
self.mission_cmds = None
# check if we should display debug messages
self.debug = sc_config.config.get_boolean('general','debug',True)
# register cameras
self.register_cameras();
# initialise video writer
self.writer = None
# register cameras - creates camera objects based on camera-type configuration
def register_cameras(self):
# initialise list
self.camera_list = []
#look for up to 2 cameras
for i in range(0,2):
config_group = "camera%d" % i
camera_type = sc_config.config.get_integer(config_group, 'type', 0)
# webcam
if camera_type == 1:
new_camera = SmartCameraWebCam(i)
self.camera_list = self.camera_list + [new_camera]
# Sony QX1
if camera_type == 2:
new_camera = SmartCamera_SonyQX(i,"wlan0")
if new_camera.boValidCameraFound() is True:
self.camera_list = self.camera_list + [new_camera]
print("Found QX Camera")
# display number of cameras found
print ("cameras found: %d" % len(self.camera_list))
# fetch_mission - fetch mission from flight controller
def fetch_mission(self):
# download the vehicle waypoints
self.mission_cmds = self.vehicle.commands
self.mission_cmds.download()
self.mission_cmds.wait_valid()
# check home - intermittently checks for changes to the home location
def check_home(self):
# return immediately if home has already been initialised
if self.home_initialised:
return True
# check for home no more than once every two seconds
if (time.time() - self.last_home_check > 2):
# update that we have performed a status check
self.last_home_check = time.time()
# check if we have a vehicle
if self.vehicle is None:
self.vehicle = self.api.get_vehicles()[0]
return
# ensure the vehicle's position is known
if self.vehicle.location is None:
return False
if self.vehicle.location.lat is None or self.vehicle.location.lon is None or self.vehicle.location.alt is None:
return False
# download the vehicle waypoints if we don't have them already
if self.mission_cmds is None:
self.fetch_mission()
return False
# get the home lat and lon
home_lat = self.mission_cmds[0].x
home_lon = self.mission_cmds[0].y
home_alt = self.mission_cmds[0].z
# sanity check the home position
if home_lat is None or home_lon is None or home_alt is None:
return False
# sanity check again and set home position
if (home_lat != 0 and home_lon != 0):
self.home_location = Location(home_lat,home_lon,home_alt)
self.home_initialised = True
else:
self.mission_cmds = None
# To-Do: if we wish to have the same home position as the flight controller
# we must download the home waypoint again whenever the vehicle is armed
# return whether home has been initialised or not
return self.home_initialised
# checks if video output should be started
def check_video_out(self):
# return immediately if video has already been started
if not self.writer is None:
return
# start video once vehicle is armed
if self.vehicle.armed:
self.writer = sc_video.open_video_writer()
# check_status - poles vehicle' status to determine if we should record video or not
def check_status(self):
# download the vehicle waypoints if we don't have them already
# To-Do: do not load waypoints if vehicle is armed
if self.mission_cmds is None:
self.fetch_mission()
return
# take_picture_all - ask all cameras to take a picture
def take_picture_all(self):
for cam in self.camera_list:
cam.take_picture()
# saves_picture_all - ask all cameras for their latest image and saves to files
def save_picture_all(self):
cam_num = 0
for cam in self.camera_list:
img = cam.get_latest_image()
# display image
window_name = "cam%d" % cam_num
cv2.namedWindow(window_name, 0)
cv2.resizeWindow(window_name, 640, 480)
cv2.imshow (window_name, img)
# write to file
#imgfilename = "C:\Users\rmackay9\Documents\GitHub\ardupilot-balloon-finder\smart_camera\img%d-%d.jpg" % (cam_num,cam.get_image_counter())
#imgfilename = "img%d-%d.jpg" % (cam_num,cam.get_image_counter())
#print (imgfilename)
#cv2.imwrite(imgfilename, img)
# check for ESC key being pressed
k = cv2.waitKey(5) & 0xFF
if k == 27:
break
cam_num = cam_num + 1
# get image from sc_video class and write to file
def analyze_image(self):
# record time
now = time.time()
# get new image from camera
f = self.
|
get_frame()
# save image for debugging later
if not self.writer is None:
self.writer.write(f)
def run(self):
while True:
# ask all cameras to take a picture
self.take_picture_all()
# store images to disk
self.save_picture_all()
# Don't suck up too much CPU, only process a new image occasionally
time.sleep(1)
'''
while not self.api.exit:
|
# only process images once home has been initialised
if self.check_home():
# start video if required
self.check_video_out()
# check if we are controlling the vehicle
self.check_status()
# ask all cameras to take a picture
self.take_picture_all()
# Don't suck up too much CPU, only process a new image occasionally
time.sleep(2.0)
if not self.use_simulator:
sc_video.stop_background_capture()
'''
# initialise depending upon whether running from command line or as part of mavproxy
if __name__ == "__main__":
sc_main = SmartCamera(False)
else:
sc_main = SmartCamera(True)
# run the smart camera
sc_main.run()
|
mikrosimage/OpenRenderManagement
|
src/pulitools/puliquery/settings.py
|
Python
|
bsd-3-clause
| 556
| 0.001799
|
#!/usr/bin/python2.6
# -*- coding: utf8 -*-
"""
settings.
|
py: Config des utilitaires pul_* permettant la consultation et l'edition des jobs en batch
"""
__author__ = "Jerome Samson"
__copyright__ = "Copyright 2013, Mikros Image"
class Settings(object):
# Global tools attributes
verbose = False
# Initial server/port config
hostname = "puliserver"
port = "8004"
# Default formating & enums
date_format = '%m/%d %H:%M'
time_format = '%H:%M'
precise_date_format = '%m/%d %H:%M:%S'
pre
|
cise_time_format = '%H:%M:%S'
|
chromium2014/src
|
tools/telemetry/telemetry/core/platform/profiler/perf_profiler.py
|
Python
|
bsd-3-clause
| 8,787
| 0.008649
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import re
import signal
import subprocess
import sys
import tempfile
from telemetry.core import platform
from telemetry.core import util
from telemetry.core.platform import profiler
from telemetry.core.platform.profiler import android_profiling_helper
from telemetry.util import support_binaries
util.AddDirToPythonPath(util.GetChromiumSrcDir(), 'build', 'android')
from pylib.perf import perf_control # pylint: disable=F0401
_PERF_OPTIONS = [
# In perf 3.13 --call-graph requires an argument, so use the -g short-hand
# which does not.
'-g',
# Increase sampling frequency for better coverage.
'--freq', '2000',
]
_PERF_OPTIONS_ANDROID = [
# Increase priority to avoid dropping samples. Requires root.
'--realtime', '80',
]
def _NicePath(path):
rel_path = os.path
|
.relpath(path, os.curdir)
return rel_path if len(rel_path) < len(path) else path
def _PrepareHostForPerf():
kptr_file = '/proc/sys/k
|
ernel/kptr_restrict'
with open(kptr_file) as f:
if f.read().strip() != '0':
logging.warning('Making kernel symbols unrestricted. You might have to '
'enter your password for "sudo".')
with tempfile.NamedTemporaryFile() as zero:
zero.write('0')
zero.flush()
subprocess.call(['sudo', 'cp', zero.name, kptr_file])
def _InstallPerfHost():
host = platform.GetHostPlatform()
if not host.CanLaunchApplication('perfhost'):
host.InstallApplication('perfhost')
return support_binaries.FindPath('perfhost', host.GetOSName())
class _SingleProcessPerfProfiler(object):
"""An internal class for using perf for a given process.
On android, this profiler uses pre-built binaries from AOSP.
See more details in prebuilt/android/README.txt.
"""
def __init__(self, pid, output_file, browser_backend, platform_backend,
perf_binary, perfhost_binary):
self._pid = pid
self._browser_backend = browser_backend
self._platform_backend = platform_backend
self._output_file = output_file
self._tmp_output_file = tempfile.NamedTemporaryFile('w', 0)
self._is_android = platform_backend.GetOSName() == 'android'
self._perfhost_binary = perfhost_binary
cmd_prefix = []
perf_args = ['record', '--pid', str(pid)]
if self._is_android:
cmd_prefix = ['adb', '-s', browser_backend.adb.device_serial(), 'shell',
perf_binary]
perf_args += _PERF_OPTIONS_ANDROID
output_file = os.path.join('/sdcard', 'perf_profiles',
os.path.basename(output_file))
self._device_output_file = output_file
browser_backend.adb.RunShellCommand(
'mkdir -p ' + os.path.dirname(self._device_output_file))
browser_backend.adb.RunShellCommand('rm -f ' + self._device_output_file)
else:
cmd_prefix = [perf_binary]
perf_args += ['--output', output_file] + _PERF_OPTIONS
self._proc = subprocess.Popen(cmd_prefix + perf_args,
stdout=self._tmp_output_file, stderr=subprocess.STDOUT)
def CollectProfile(self):
if ('renderer' in self._output_file and
not self._is_android and
not self._platform_backend.GetCommandLine(self._pid)):
logging.warning('Renderer was swapped out during profiling. '
'To collect a full profile rerun with '
'"--extra-browser-args=--single-process"')
if self._is_android:
device = self._browser_backend.adb.device()
perf_pids = device.old_interface.ExtractPid('perf')
device.RunShellCommand('kill -SIGINT ' + ' '.join(perf_pids))
util.WaitFor(lambda: not device.old_interface.ExtractPid('perf'),
timeout=2)
self._proc.send_signal(signal.SIGINT)
exit_code = self._proc.wait()
try:
if exit_code == 128:
raise Exception(
"""perf failed with exit code 128.
Try rerunning this script under sudo or setting
/proc/sys/kernel/perf_event_paranoid to "-1".\nOutput:\n%s""" %
self._GetStdOut())
elif exit_code not in (0, -2):
raise Exception(
'perf failed with exit code %d. Output:\n%s' % (exit_code,
self._GetStdOut()))
finally:
self._tmp_output_file.close()
cmd = '%s report -n -i %s' % (_NicePath(self._perfhost_binary),
self._output_file)
if self._is_android:
device = self._browser_backend.adb.device()
device.old_interface.Adb().Pull(self._device_output_file,
self._output_file)
required_libs = \
android_profiling_helper.GetRequiredLibrariesForPerfProfile(
self._output_file)
symfs_root = os.path.dirname(self._output_file)
kallsyms = android_profiling_helper.CreateSymFs(device,
symfs_root,
required_libs,
use_symlinks=True)
cmd += ' --symfs %s --kallsyms %s' % (symfs_root, kallsyms)
for lib in required_libs:
lib = os.path.join(symfs_root, lib[1:])
if not os.path.exists(lib):
continue
objdump_path = android_profiling_helper.GetToolchainBinaryPath(
lib, 'objdump')
if objdump_path:
cmd += ' --objdump %s' % _NicePath(objdump_path)
break
print 'To view the profile, run:'
print ' ', cmd
return self._output_file
def _GetStdOut(self):
self._tmp_output_file.flush()
try:
with open(self._tmp_output_file.name) as f:
return f.read()
except IOError:
return ''
class PerfProfiler(profiler.Profiler):
def __init__(self, browser_backend, platform_backend, output_path, state):
super(PerfProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
process_output_file_map = self._GetProcessOutputFileMap()
self._process_profilers = []
self._is_android = platform_backend.GetOSName() == 'android'
perf_binary = perfhost_binary = _InstallPerfHost()
try:
if self._is_android:
device = browser_backend.adb.device()
perf_binary = android_profiling_helper.PrepareDeviceForPerf(device)
self._perf_control = perf_control.PerfControl(device)
self._perf_control.SetPerfProfilingMode()
else:
_PrepareHostForPerf()
for pid, output_file in process_output_file_map.iteritems():
if 'zygote' in output_file:
continue
self._process_profilers.append(
_SingleProcessPerfProfiler(
pid, output_file, browser_backend, platform_backend,
perf_binary, perfhost_binary))
except:
if self._is_android:
self._perf_control.SetDefaultPerfMode()
raise
@classmethod
def name(cls):
return 'perf'
@classmethod
def is_supported(cls, browser_type):
if sys.platform != 'linux2':
return False
if browser_type.startswith('cros'):
return False
return True
@classmethod
def CustomizeBrowserOptions(cls, browser_type, options):
options.AppendExtraBrowserArgs([
'--no-sandbox',
'--allow-sandbox-debugging',
])
def CollectProfile(self):
if self._is_android:
self._perf_control.SetDefaultPerfMode()
output_files = []
for single_process in self._process_profilers:
output_files.append(single_process.CollectProfile())
return output_files
@classmethod
def GetTopSamples(cls, file_name, number):
"""Parses the perf generated profile in |file_name| and returns a
{function: period} dict of the |number| hottests functions.
"""
assert os.path.exists(file_name)
with open(os.devnull, 'w') as devnull:
_InstallPerfHost()
report = subprocess.Popen(
['perfhost', 'report', '--show-total-period', '-U', '-t', '^', '-i',
file_name],
stdo
|
hpbader42/Klampt
|
Python/klampt/sim/simlog.py
|
Python
|
bsd-3-clause
| 14,115
| 0.011335
|
from ..math import vectorops,so3,se3
class SimLogger:
"""A CSV logger for a simulation. """
def __init__(self,sim,state_fn,contact_fn=None,colliding='all',saveheader=True):
"""
Logs a simulation to a CSV file.
Arguments:
- sim: the klampt.Simulator object you wish to use
- state_fn: the file that you want to save state to
- contact_fn: the file that you want to save contacts to (or None if you don't want them)
- colliding: either 'all' (default) or a list of all objects
/ object ids that you want to check self collisions between
- saveheader: true if you want a CSV header giving the name of each value
"""
self.saveSensors = False
self.sim = sim
self.fn = state_fn
self.f = None
if state_fn != None:
print "SimLogger: Saving state to",state_fn
self.f = open(state_fn,'w')
self.f_contact = None
if contact_fn != None:
print "SimLogger: Saving contacts to",contact_fn
self.f_contact = open(contact_fn,'w')
self.colliding = []
if colliding=='all':
self.sim.enableContactFeedbackAll()
n = self.sim.world.numIDs()
self.colliding = range(n)
else:
for i in colliding:
if isinstance(i,int):
self.colliding.append(i)
elif hasattr(i,'getID'):
self.colliding.append(i.getID())
elif isinstance(i,str):
raise NotImplementedError("Lookup id from entity name")
else:
raise ValueError("Invalid object given in the colliding list")
if saveheader:
#need to call simulate to get proper sensor readings...
self.sim.simulate(0)
self.saveHeader()
self.saveContactHeader()
return
def saveHeader(self,extra=[]):
if self.f is None:
print "SimLogger: No state file specified"
return
world = self.sim.world
elements = ['time']
for i in xrange(world.numRobots()):
n = world.robot(i).getName()
elements.append(n+'_cmx')
elements.append(n+'_cmy')
elements.append(n+'_cmz')
for j in xrange(world.robot(i).numLinks()):
elements.append(n+'_qcmd['+world.robot(i).link(j).getName()+']')
for j in xrange(world.robot(i).numLinks()):
elements.append(n+'_dqcmd['+world.robot(i).link(j).getName()+']')
for j in xrange(world.robot(i).numLinks()):
elements.append(n+'_q['+world.robot(i).link(j).getName()+']')
for j in xrange(world.robot(i).numLinks()):
elements.append(n+'_dq['+world.robot(i).link(j).getName()+']')
for j in xrange(world.robot(i).numDrivers()):
elements.append(n+'_t['+str(j)+']')
if self.saveSensors:
j = 0
while True:
s = self.sim.controller(i).sensor(j)
if len(s.name())==0:
break
names = s.measurementNames()
for sn in range(len(names)):
elements.append(n+'_'+s.name()+'['+names[sn]+']')
j += 1
for i in xrange(world.numRigidObjects()):
n = world.rigidObject(i).getName()
elements += [n+'_'+suffix for suffix in ['comx','comy','comz','x','y','z','rx','ry','rz','dx','dy','dz','wx','wy','wz']]
if extra:
elements += extra
self.f.write(','.join(elements))
self.f.write('\n')
return
def saveContactHeader(self):
if self.f_contact is None:
print "SimLogger: No contact file specified"
return
elements = ['time','body1','body2']
elements += ['numContacts']
elements += ['cpx_avg','cpy_avg','cpz_avg','cnx_avg','cny_avg','cnz_avg','fx_avg','fy_avg','fz_avg','mx_avg','my_avg','mz_avg']
self.f_contact.write(','.join(elements))
self.f_contact.write('\n')
def saveStep(self,extra=[]):
sim = self.sim
world = sim.world
sim.updateWorld()
values = []
values.append(sim.getTime())
for i in xrange(world.numRobots()):
robot = world.robot(i)
values += robot.getCom()
controller = sim.controller(i)
try:
values += controller.getCommandedConfig()
values += controller.getCommandedVelocity()
except Exception:
values += [0.0]*robot.numLinks()
values += [0.0]*robot.numLinks()
values += sim.getActualConfig(i)
values += sim.getActualVelocity(i)
assert len(sim.getActualTorques(i)) == world.robot(i).numDrivers()
values += sim.getActualTorques(i)
if self.saveSensors:
j = 0
while True:
s = self.sim.controller(i).sensor(j)
if len(s.name())==0:
break
meas = s.getMeasurements()
assert len(meas) == len(s.measurementNames())
values += meas
j += 1
for i in xrange(world.numRigidObjects()):
obj = world.rigidObject(i)
T = obj.getTransform()
values += se3.apply(T,obj.getMass().getCom())
values += T[1]
values += so3.moment(T[0])
values += sim.body(obj).getVelocity()[1]
values += sim.body(obj).getVelocity()[0]
if self.f_contact:
for i,id in enumerate(self.colliding):
for j in range(i+1,len(self.colliding)):
id2 = self.colliding[j]
if sim.hadContact(id,id2):
clist = sim.getContacts(id,id2);
f = sim.contactForce(id,id2)
m = sim.contactTorque(id,id2)
pavg = [0.0]*3
navg = [0.0]*3
for c in clist:
pavg = vectorops.add(pavg,c[0:3])
navg = vectorops.add(navg,c[3:6])
if len(clist) > 0:
pavg = vectorops.div(pavg,len(clist))
navg = vectorops.div(navg,len(clist))
body1 = world.getName(id)
body2 = world.getName(id2)
|
cvalues = [sim.getTime(),body1,body2,len(clist)]
cvalues += pavg
cvalues += navg
cvalues += f
|
cvalues += m
self.f_contact.write(','.join(str(v) for v in cvalues))
self.f_contact.write('\n')
if extra:
values += extra
if not (self.f is None):
self.f.write(','.join([str(v) for v in values]))
self.f.write('\n')
def close(self):
if not (self.f is None):
self.f.close()
if not (self.f_contact is None):
self.f_contact.close()
class SimLogPlayback:
"""A replay class for simulation traces from SimLogger or the SimTest app. """
def __init__(self,sim,state_fn,contact_fn=None):
"""
Loads from a CSV file.
Arguments:
- sim: the klampt.Simulator object you wish to use. This should be instantiated with
all objects that you recorded from.
- state_fn: the state file that you want to load
- contact_fn: the contact file that you want to load
"""
import csv
self.sim = sim
self.state_header = []
self.state_array = []
self.contact_header = []
self.contact_array = []
self.state_to_index = {}
self.contact_to_index = {}
if state_fn != None:
print "SimLogPlayback: Loading state from",state_fn
|
WildBill567/nn-toy
|
neat/phenome.py
|
Python
|
gpl-3.0
| 8,014
| 0.002496
|
# Copyright (C) 2016 William Langhoff WildBill567@users.noreply.github.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Th
|
is program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARR
|
ANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Adapted from:
# https://github.com/CodeReclaimers/neat-python, accessed May 2016
# Which is distributed with the following license:
# Copyright (c) 2007-2011, cesar.gomes and mirrorballu2
# Copyright (c) 2015, CodeReclaimers, LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import time
import networkx as nx
import matplotlib.pyplot as plt
from neat import activation_functions
def find_feed_forward_layers(inputs, connections):
"""
Collect the layers whose members can be evaluated in parallel in a feed-forward network.
Adapted from: https://github.com/CodeReclaimers/neat-python, accessed May 2016
:param inputs: list of the network input nodes
:param connections: list of (input, output) connections in the network.
Returns a list of layers, with each layer consisting of a set of node identifiers.
"""
# TODO: Detect and omit nodes whose output is ultimately never used.
layers = []
prev_nodes = set(inputs)
prev_nodes.add(0)
while 1:
# Find candidate nodes for the next layer. These nodes should connect
# a node in S to a node not in S.
candidate_set = set(b for (a, b) in connections if a in prev_nodes and b not in prev_nodes)
# Keep only the nodes whose entire input set is contained in S.
keeper_set = set()
for n in candidate_set:
if all(a in prev_nodes for (a, b) in connections if b == n):
keeper_set.add(n)
if not keeper_set:
break
layers.append(keeper_set)
prev_nodes = prev_nodes.union(keeper_set)
return layers
class FeedForwardPhenome:
def __init__(self, genome, config):
"""
FeedForwardPhenome - A feedforward network
Adapted from: https://github.com/CodeReclaimers/neat-python, accessed May 2016
:param genome: the genome to create the phenome
"""
self.graph, node_lists = self._construct_graph(genome)
self.input_nodes, self.hidden_nodes, self.output_nodes = node_lists
self.links = [(g.src, g.sink) for g in genome.link_genes]
self.node_evals = []
self.config = config
layers = find_feed_forward_layers(self.input_nodes, self.links)
used_nodes = set(self.input_nodes + self.output_nodes)
for layer in layers:
for node in layer:
inputs = []
# TODO: This could be more efficient.
for cg in genome.link_genes:
if cg.sink == node and cg.enabled:
inputs.append((cg.src, cg.weight))
used_nodes.add(cg.src)
used_nodes.add(node)
ng = genome.get_node_by_index(node)
activation_function = activation_functions.get(ng.activation)
self.node_evals.append((node, activation_function, inputs))
self.values = [0.0] * (1 + max(used_nodes))
def serial_activate(self, inputs):
"""
serial_activate - gives network output for an input
Adapted from: https://github.com/CodeReclaimers/neat-python, accessed May 2016
:param inputs: numerical input list
:return: numerical output list
"""
if len(self.input_nodes) != len(inputs):
raise ValueError("Expected {0} inputs, got {1}".format(len(self.input_nodes), len(inputs)))
self.values[0] = 1.0
for idx, v in zip(self.input_nodes, inputs):
self.values[idx] = v
for node, func, links in self.node_evals:
linear_activation = 0.0
for idx, weight in links:
linear_activation += self.values[idx] * weight
self.values[node] = func(linear_activation)
return [self.values[i] for i in self.output_nodes]
def draw(self, testing=False):
"""Draws the network with matplotlib"""
fig = plt.figure()
pos = {0: (-1.5, 0)}
for idx in range(len(self.input_nodes)):
pos[idx+1] = (idx, 0)
for idx, val in enumerate(self.output_nodes):
pos[val] = (idx, 4)
for idx, val in enumerate(self.hidden_nodes):
pos[val] = (idx, 2)
nx.draw_networkx_nodes(self.graph, pos,
nodelist=self.input_nodes,
node_color='r')
nx.draw_networkx_nodes(self.graph, pos,
nodelist=self.output_nodes,
node_color='g')
nx.draw_networkx_nodes(self.graph, pos,
nodelist=[0],
node_color='k')
nx.draw_networkx_edges(self.graph, pos)
plt.yticks([])
plt.xticks([])
fig.show()
if testing:
time.sleep(1)
plt.close(fig)
else:
plt.show()
@staticmethod
def _construct_graph(genome):
"""Constructs the DiGraph"""
graph = nx.DiGraph()
graph.add_node(0, {'node_type': 'BIAS', 'val': 1})
input_list = []
output_list = []
hidden_list = []
for gene in genome.input_genes:
graph.add_node(gene.idx)
input_list.append(gene.idx)
for gene in genome.output_genes:
graph.add_node(gene.idx)
output_list.append(gene.idx)
for gene in genome.hidden_genes:
graph.add_node(gene.idx)
hidden_list.append(gene.idx)
for gene in genome.link_genes:
graph.add_edge(gene.src, gene.sink,
{'weight': gene.weight,
'enabled': gene.enabled})
return graph, (input_list, hidden_list, output_list)
def handle_close(fig):
plt.close(fig)
|
cboyce93/epitome-xl
|
src/util/treestore_functions.py
|
Python
|
gpl-3.0
| 316
| 0
|
import pygtk
import gtk
import dill
pygtk.require('2.0')
class TreeStoreFunctions():
def get_children(self, root_iter):
c
|
hildren = []
for i in range(0, self.treestore.iter_n_children(root_iter)):
c
|
hildren.append(self.treestore.iter_nth_child(root_iter, i))
return children
|
drglove/SickRage
|
sickbeard/notifiers/libnotify.py
|
Python
|
gpl-3.0
| 5,007
| 0.003795
|
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import os
import cgi
import sickbeard
from sickbeard import logger, common
def diagnose():
'''
Check the environment for reasons libnotify isn't working. Return a
user-readable message indicating possible issues.
'''
try:
import pynotify #@UnusedImport
except ImportError:
return (u"<p>Error: pynotify isn't installed. On Ubuntu/Debian, install the "
u"<a href=\"apt:python-notify\">python-notify</a> package.")
if 'DISPLAY' not in os.environ and 'DBUS_SESSION_BUS_ADDRESS' not in os.environ:
return (u"<p>Error: Environment variables DISPLAY and DBUS_SESSION_BUS_ADDRESS "
u"aren't set. libnotify will only work when you run SickRage "
u"from a desktop login.")
try:
import dbus
except ImportError:
pass
else:
try:
|
bus = dbus.SessionBus()
except dbus.DBusException, e:
return (u"<p>Error: unable to connect to D-Bus session bus: <code>%s</code>."
|
u"<p>Are you running SickRage in a desktop session?") % (cgi.escape(e),)
try:
bus.get_object('org.freedesktop.Notifications',
'/org/freedesktop/Notifications')
except dbus.DBusException, e:
return (u"<p>Error: there doesn't seem to be a notification daemon available: <code>%s</code> "
u"<p>Try installing notification-daemon or notify-osd.") % (cgi.escape(e),)
return u"<p>Error: Unable to send notification."
class LibnotifyNotifier:
def __init__(self):
self.pynotify = None
self.gobject = None
def init_pynotify(self):
if self.pynotify is not None:
return True
try:
import pynotify
except ImportError:
logger.log(u"Unable to import pynotify. libnotify notifications won't work.", logger.ERROR)
return False
try:
import gobject
except ImportError:
logger.log(u"Unable to import gobject. We can't catch a GError in display.", logger.ERROR)
return False
if not pynotify.init('SickRage'):
logger.log(u"Initialization of pynotify failed. libnotify notifications won't work.", logger.ERROR)
return False
self.pynotify = pynotify
self.gobject = gobject
return True
def notify_snatch(self, ep_name):
if sickbeard.LIBNOTIFY_NOTIFY_ONSNATCH:
self._notify(common.notifyStrings[common.NOTIFY_SNATCH], ep_name)
def notify_download(self, ep_name):
if sickbeard.LIBNOTIFY_NOTIFY_ONDOWNLOAD:
self._notify(common.notifyStrings[common.NOTIFY_DOWNLOAD], ep_name)
def notify_subtitle_download(self, ep_name, lang):
if sickbeard.LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD:
self._notify(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ": " + lang)
def notify_git_update(self, new_version = "??"):
if sickbeard.USE_LIBNOTIFY:
update_text=common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT]
title=common.notifyStrings[common.NOTIFY_GIT_UPDATE]
self._notify(title, update_text + new_version)
def test_notify(self):
return self._notify('Test notification', "This is a test notification from SickRage", force=True)
def _notify(self, title, message, force=False):
if not sickbeard.USE_LIBNOTIFY and not force:
return False
if not self.init_pynotify():
return False
# Can't make this a global constant because PROG_DIR isn't available
# when the module is imported.
icon_path = os.path.join(sickbeard.PROG_DIR, "data/images/sickbeard_touch_icon.png")
icon_uri = 'file://' + os.path.abspath(icon_path)
# If the session bus can't be acquired here a bunch of warning messages
# will be printed but the call to show() will still return True.
# pynotify doesn't seem too keen on error handling.
n = self.pynotify.Notification(title, message, icon_uri)
try:
return n.show()
except self.gobject.GError:
return False
notifier = LibnotifyNotifier
|
mcallaghan/tmv
|
BasicBrowser/parliament/migrations/0030_auto_20180430_1447.py
|
Python
|
gpl-3.0
| 556
| 0.001799
|
# -*-
|
coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-04-30 14:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('parliament', '0029_auto_20180323_1039'),
]
operations = [
migrations.AlterField(
model_name='interjection',
name='type',
field=models.IntegerField(choices=[(1, 'Applause'), (2, 'Speech'), (3,
|
'Objection'), (4, 'Amusement'), (5, 'Laughter'), (6, 'Outcry')]),
),
]
|
FederatedAI/FATE
|
python/federatedml/transfer_variable/transfer_class/oblivious_transfer_transfer_variable.py
|
Python
|
apache-2.0
| 1,391
| 0.002876
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Righ
|
ts Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on a
|
n "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
# AUTO GENERATED TRANSFER VARIABLE CLASS. DO NOT MODIFY
#
################################################################################
from federatedml.transfer_variable.base_transfer_variable import BaseTransferVariables
# noinspection PyAttributeOutsideInit
class ObliviousTransferTransferVariable(BaseTransferVariables):
def __init__(self, flowid=0):
super().__init__(flowid)
self.s = self._create_variable(name='s', src=['host'], dst=['guest'])
self.s_legal = self._create_variable(name='s_legal', src=['guest'], dst=['host'])
self.r = self._create_variable(name='r', src=['guest'], dst=['host'])
|
bgris/ODL_bgris
|
lib/python3.5/site-packages/spyder/widgets/projects/type/python.py
|
Python
|
gpl-3.0
| 2,795
| 0
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright © Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
# -----------------------------------------------------------------------------
"""Python project type"""
import os
import os.path as osp
from spyder.config.base import _
from spyder.widgets.projects.type import EmptyProject
class PythonProject(EmptyProject):
"""Python project."""
PROJECT_TYPE_NAME = _('Python project')
IGNORE_FILE = """"""
def _get_relative_pythonpath(self):
"""Return PYTHONPATH list as relative paths"""
# Workaround to replace os.path.relpath (new in Python v2.6):
offset = len(self.root_path)+len(os.pathsep)
return [path[offset:] for path in self.pythonpath]
def _set_relative_pythonpath(self, value):
"""Set PYTHONPATH list relative paths"""
self.pythonpath = [osp.abspath(osp.join(self.root_path, path))
for path in value]
relative_pythonpath = property(_get_relative_pythonpath,
_set_relative_pythonpath)
# --- Python Path
def is_in_pythonpath(self, dirname):
"""Return True if dirname is in project's PYTHONPATH"""
return fixpath(dirname) in [fixpath(_p) for _p in self.pythonpath]
def get_pyt
|
honpath(self):
""
|
"Return a copy of pythonpath attribute"""
return self.pythonpath[:]
def set_pythonpath(self, pythonpath):
"""Set project's PYTHONPATH"""
self.pythonpath = pythonpath
self.save()
def remove_from_pythonpath(self, path):
"""Remove path from project's PYTHONPATH
Return True if path was removed, False if it was not found"""
pathlist = self.get_pythonpath()
if path in pathlist:
pathlist.pop(pathlist.index(path))
self.set_pythonpath(pathlist)
return True
else:
return False
def add_to_pythonpath(self, path):
"""Add path to project's PYTHONPATH
Return True if path was added, False if it was already there"""
pathlist = self.get_pythonpath()
if path in pathlist:
return False
else:
pathlist.insert(0, path)
self.set_pythonpath(pathlist)
return True
class PythonPackageProject(PythonProject):
""" """
PROJECT_TYPE_NAME = _('Python package')
IGNORE_FILE = """
"""
STRUCTURE_TEMPATE = {
'relative_path/test.py':
"""
test
""",
'other/test.py':
"""
test
""",
}
|
watsonbox/xbmc-confreaks
|
api/video.py
|
Python
|
gpl-3.0
| 558
| 0.010753
|
class Video(object):
def __init__(self, json):
self.id = json['id']
self.slug = json['slug']
self.title = json['title']
|
self.presenters = json['presenters']
self.host = json['host']
self.embed_code = json['embed_code']
|
def presenter_names(self):
return ', '.join(map(lambda p: p['first_name'] + ' ' + p['last_name'], self.presenters))
def url(self):
return 'plugin://plugin.video.%s/?action=play_video&videoid=%s' % (self.host, self.embed_code)
def is_available(self):
return True if self.embed_code else False
|
Aravinthu/odoo
|
addons/point_of_sale/models/__init__.py
|
Python
|
agpl-3.0
| 368
| 0
|
# -*- coding: utf-8 -*-
# Part of Odo
|
o. See LICENSE file for full copyright and licensing details.
from . import account_bank_statement
from . import account_journal
from . import barcode_rule
from . import pos_category
from . import pos_config
from . import pos_order
from . import pos_session
from . import product
from . import res_partner
from . impo
|
rt res_users
|
endlessm/chromium-browser
|
third_party/chromite/lib/build_target_lib_unittest.py
|
Python
|
bsd-3-clause
| 1,720
| 0.004651
|
# -*- coding: utf-8 -*-
# Copyright 2019 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""build_target_lib tests."""
from __future__ import print_function
import os
from chromite.lib.build_target_lib import BuildTarget
from chromite.lib.build_target_lib import InvalidNameError
from chromite.lib import cros_test_lib
from chromite.lib import osutils
class BuildTargetTest(cros_test_lib.TempDirTestCase):
"""BuildTarget tests."""
def setUp(self):
self.sysroot = os.path.join(self.tempdir, 'sysroot')
self.sysroot_denormalized = os.path.join(self.tempdir, 'dne', '..',
'sysroot')
osutils.SafeMakedirs(self.sysroot)
def testEqual(self):
"""Sanity check for __eq__ method."""
bt1 = BuildTarget('board', profile='base')
bt2 = BuildTarget('board', profile='base')
bt3 = BuildTarget('different', profile='base')
self
|
.assertEqual(bt1, bt2)
self.assertNotEqual(bt1, bt3)
def testInvalidName(self):
"""Test invalid name check."""
with self.assertRaises(InvalidNameError):
BuildTarget('')
def testNormalRoot(self):
"""Test normalized sysroot path."""
target = BuildTarget('board', build_root=self.sysroot)
self.assertEqual(self.sysroot, target.root)
def testDenormalizedRoot(self):
"""Test a non-normal sysroot
|
path."""
target = BuildTarget('board', build_root=self.sysroot_denormalized)
self.assertEqual(self.sysroot, target.root)
def testDefaultRoot(self):
"""Test the default sysroot path."""
target = BuildTarget('board')
self.assertEqual('/build/board', target.root)
|
patta42/pySICM
|
pySICM.py
|
Python
|
gpl-3.0
| 1,938
| 0.006192
|
#!/usr/
|
bin/python
# Copyright (C) 2015 Patrick Happel <patri
|
ck.happel@rub.de>
#
# This file is part of pySICM.
#
# pySICM is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 2 of the License, or (at your option) any later
# version.
#
# pySICM is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# pySICM. If not, see <http://www.gnu.org/licenses/>.
import sys, getopt, os
def usage(scriptname):
print "\nUsage: "+scriptname+ " option\n"
print "\nOptions:"
print "------\n"
print " -h, --help"
print " Show this help.\n"
print " -s, --server"
print " Run the server process (used on the controlling computer).\n"
print " -g, --gui"
print " Launch the gui.\n\n"
def main(allargs):
try:
opts, args = getopt.getopt(allargs[1:],"hsg",["help","server","gui"])
except getopt.GetoptError:
usage(allargs[0])
sys.exit(2)
if len(opts) == 0:
usage(allargs[0])
sys.exit(2)
for opt, arg in opts:
if opt in ["-h", "--help"]:
usage(allargs[0])
sys.exit(0)
elif opt in ["-s","--server"]:
print "Server start not yet implemented."
sys.exit(0)
elif opt in ["-g","--gui"]:
sys.path.append(os.path.abspath('./pySICM/'))
sys.path.append(os.path.abspath('./pySICMgui/'))
from pySICM.pysicmcore import PySicmCore
PySicmCore(PySicmCore.CLIENT)
else:
usage(allargs[0])
sys.exit(2)
if __name__ == "__main__":
main(sys.argv)
|
butchman0922/gourmet
|
gourmet/plugins/import_export/html_plugin/html_exporter.py
|
Python
|
gpl-2.0
| 12,913
| 0.013785
|
import re, os.path, os, xml.sax.saxutils, time, shutil, urllib, textwrap
from gettext import gettext as _
from gourmet import convert,gglobals
from gourmet.exporters.exporter import ExporterMultirec, exporter_mult
HTML_HEADER_START = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
<head>
"""
HTML_HEADER_CLOSE = """<meta http-equiv="Content-Style-Stype" content="text/css">
<meta http-equiv="Content-Type" content="text/html;charset=utf-8">
</head>"""
class html_exporter (exporter_mult):
def __init__ (self, rd, r, out, conv=None,
css=os.path.join(gglobals.style_dir,"default.css"),
embed_css=True, start_html=True, end_html=True, imagedir="pics/", imgcount=1,
link_generator=None,
# exporter_mult args
mult=1,
change_units=True,
):
"""We export web pages. We have a number of possible options
here. css is a css file which will be embedded if embed_css is
true or referenced if not. start_html and end_html specify
whether or not to write header info (so we can be called in
the midst of another script writing a page). imgcount allows
an outside function to keep number exported images, handing
us the imgcount at the start of our export. link_generator
will be handed the ID referenced by any recipes called for
as ingredients. It should return a URL for that recipe
or None if it can't reference the recipe based on the ID."""
self.start_html=start_html
self.end_html=end_html
self.embed_css=embed_css
self.css=css
self.link_generator=link_generator
if imagedir and imagedir[-1] != os.path.sep: imagedir += os.path.sep #make sure we end w/ slash
if not imagedir: imagedir = "" #make sure it's a string
self.imagedir_absolute = os.path.join(os.path.split(out.name)[0],imagedir)
self.imagedir = imagedir
exporter_mult.__init__(self, rd, r, out,
conv=conv,
imgcount=imgcount,
mult=mult,
change_units=change_units,
do_markup=True,
use_ml=True)
def htmlify (self, text):
t=text.strip()
#t=xml.sax.saxutils.escape(t)
t="<p>%s</p>"%t
t=re.sub('\n\n+','</p><p>',t)
t=re.sub('\n','<br>',t)
return t
def write_head (self):
title = self._grab_attr_(self.r,'title')
if not title: title = _('Recipe')
title=xml.sax.saxutils.escape(title)
if self.start_html:
self.out.write(HTML_HEADER_START)
self.out.write("<title>%s</title>"%title)
if self.css:
if self.embed_css:
self.out.write("<style type='text/css'><!--\n")
f=open(self.css,'r')
for l in f.readlines():
self.out.write(l)
f.close()
self.out.write("--></style>")
else:
self.out.write("<link rel='stylesheet' href='%s' type='text/css'>"%self.make_relative_link(self.css))
self.out.write(HTML_HEADER_CLOSE)
self.out.write('<body>')
self.out.write('<div class="recipe" itemscope itemtype="http://schema.org/Recipe">')
def write_image (self, image):
imgout = os.path.join(self.imagedir_absolute,"%s.jpg"%self.imgcount)
while os.path.isfile(imgout):
self.imgcount += 1
imgout = os.path.join(self.imagedir_absolute,"%s.jpg"%self.imgcount)
if not os.path.isdir(self.imagedir_absolute):
os.mkdir(self.imagedir_absolute)
o = open(imgout,'wb')
o.write(image)
o.close()
# we use urllib here because os.path may fsck up slashes for urls.
self.out.write('<img src="%s" itemprop="image">'%self.make_relative_link("%s%s.jpg"%(self.imagedir,
self.imgcount)
)
)
self.images.append(imgout)
def write_inghead (self):
self.out.write('<div class="ing"><h3>%s</h3><ul class="ing">'%_('Ingredients'))
def write_text (self, label, text):
attr = gglobals.NAME_TO_ATTR.get(label,label)
if attr == 'instructions':
self.out.write('<div class="%s"><h3 class="%s">%s</h3><div itemprop="recipeInstructions">%s</div></div>' % (attr,label,label,self.htmlify(text)))
else:
self.out.write('<div class="%s"><h3 class="%s">%s</h3>%s</div>' % (attr,label,label,self.htmlify(text)))
def handle_italic (self, chunk): return "<em>" + chunk + "</em>"
def handle_bold (self, chunk): return "<strong>" + chunk + "</strong>"
def handle_underline (self, chunk): return "<u>" + chunk + "</u>"
def write_attr_head (self):
self.out.write("<div class='header'>")
def write_attr (self, label, text):
attr = gglobals.NAME_TO_ATTR.get(label,label)
if attr=='link':
webpage = text.strip('http://')
webpage = webpage.split('/')[0]
self.out.write('<a href="%s">'%text +
_('Original Page from %s')%webpage +
'</a>\n')
elif attr == 'rating':
rating, rest = text.split('/', 1)
self.out.write('<p class="%s" itemprop="aggregateRating" itemscope itemtype="http://schema.org/AggregateRating"><span class="label">%s:</span> <span itemprop="ratingValue">%s</span><span>/%s</span></p>\n' % (attr, label.capitalize(), rating, rest))
else:
itemprop = None
if attr == 'title':
itemprop = 'name'
elif attr == 'category':
itemprop = 'recipeCategory'
elif attr == 'cuisine':
itemprop = 'recipeCuisine'
elif attr == 'yields':
itemprop = 'recipeYield'
elif attr == 'preptime':
itemprop = 'prepTime'
elif attr == 'cooktime':
itemprop = 'cookTime'
elif attr == 'instructions':
itemprop = 'recipeInstructions'
if i
|
temprop:
self.out.write('<p class="%s"><span class="label">%s:</span> <span itemprop="%s">%s</span></p>\n' % (attr, label.capitalize(), itemprop, xml.sax.saxutils.escape(text)))
else:
self.out.write("<p class='%s'><span class='label'>%s:</span> %s</p>\n"%(attr, label.capitalize(), xml.sax.saxutils.escape(text)
|
))
def write_attr_foot (self):
self.out.write("</div>")
def write_grouphead (self, name):
self.out.write("<li class='inggroup'>%s:<ul class='ing'>"%name)
def write_groupfoot (self):
self.out.write("</ul></li>")
def write_ingref (self, amount, unit, item, refid, optional):
link=False
if self.link_generator:
link=self.link_generator(refid)
if link:
self.out.write("<a href='")
self.out.write(
self.make_relative_link(link)
#xml.sax.saxutils.escape(link).replace(" ","%20")
#self.make_relative_link(link)
)
self.out.write("'>")
self.write_ing (amount, unit, item, optional=optional)
if link: self.out.write("</a>")
def write_ing (self, amount=1, unit=None,
item=None, key=None, optional=False):
self.out.write('<li class="ing" itemprop="ingredients">')
for o in [amount, unit, item]:
if o: self.out.write(xml.sax.saxutils.escape("%s "%o))
if optional:
self.out.write("(%s)"%_('optional'))
self.out.write("</li>\n")
def write_ingfoot (self):
self.out.write('</ul
|
MsStryker/Games
|
sudoku/default.py
|
Python
|
mit
| 4,916
| 0
|
# -*- coding: utf-8 -*-
from collections import Counter
from copy import deepcopy
from math import sqrt
from random import sample as random_sample
from string import ascii_uppercase
class SudokuBoard(object):
"""Create a Sudoku Board of variable size
The method will create the random sudoku Board
of various size.
"""
def __init__(self, size, level='easy', base_iterations=55):
self.base = sqrt(size)
if not (self.base).is_integer():
raise ValueError('Please provide a number that has a square root.')
if size > 25:
raise ValueError('Please select a number 25 or below.')
self.base = int(self.base)
self.size = size
self.base_iterations = base_iterations
self.board_list = self.get_board_list()
self.replacement_number = self.get_replacement_number(level)
self.board = []
def get_replacement_number(self, level):
"""Calculate replacement_number by level"""
if level.lower() == 'easy':
return int(self.size/4)
elif level.lower() == 'medium':
return int(self.size/3)
elif level.lower() == 'hard':
return int(self.size/2)
return int(self.size/1.5)
def _get_initial_list(self):
"""Create the initial list"""
initial_list = []
for i in range(1, self.size+1):
initial_list.append(i)
return initial_list
def get_board_list(self):
"""Convert list greater than 9 to alpha numeric"""
initial_list = self._get_initial_list()
if len(initial_list) > 9:
letters = list(ascii_uppercase[:(self.size - 9)])
initial_list = initial_list[:9] + letters
return initial_list
def get_initial_matrix(self):
"""Create the matrix for the board"""
board_list = self.board_list
initial_matrix = [board_list]
for i in range(1, self.size):
rhs = board_list[i:]
lhs = board_list[:i]
new_list = rhs + lhs
initial_matrix.append(new_list)
return initial_matrix
def get_full_board(self):
"""Get the full random board
First, get the initial matrix and mutate rows
and columns in the matrix. For this, we switch
rows and columns that are within the base groups.
ex. if the base is 3, we switch the following
groups with each other:
- 1, 2, 3
- 4, 5, 6
- 7, 8, 9
"""
s
|
elf.board = self.get_initial_matrix()
for i in range(0, self.base_iterations):
self.swap_columns()
self.swap_rows()
return self.board
def
|
swap_columns(self):
"""Swap the columns and return the board"""
board = self.board
swap_list = range(0, self.size)
for i in range(0, self.size, self.base):
l_ind = i
r_ind = i + self.base
for j in range(self.base - 1):
swap = random_sample(swap_list[l_ind:r_ind], 2)
for k in range(0, self.size):
row = board[k]
row[swap[0]], row[swap[1]] = row[swap[1]], row[swap[0]]
def swap_rows(self):
"""Swap the rows and return the board"""
board = self.board
swap_list = range(0, self.size)
for i in range(0, self.size, self.base):
l_ind = i
r_ind = i + self.base
for j in range(self.base - 1):
swap = random_sample(swap_list[l_ind:r_ind], 2)
board[swap[0]], board[swap[1]] = board[swap[1]], board[swap[0]]
def get_playable_board(self):
"""Get the playable board
Replace random values with `None` for the user.
"""
board = self.get_full_board()
playable_board = deepcopy(board)
i = 0
while i < self.replacement_number:
point = random_sample(range(0, self.size), 2)
if playable_board[point[0]][point[1]] is None:
pass
else:
playable_board[point[0]][point[1]] = None
i += 1
return playable_board
def is_unique_list(board_list):
"""Check if items are unique excluding None"""
items = Counter(board_list)
items.pop(None, None)
if len(set(items.values())) > 1:
return False
return True
def verify_sudoku_board(board):
"""Return boolean on if board passes"""
size = len(board)
# check rows
for i in range(size):
if not is_unique_list(board[i]):
return False
# check columns
for i in range(size):
column = []
for j in range(size):
column.append(board[i][j])
if not is_unique_list(column):
return False
return True
if __name__ == '__main__':
pass
|
CSC301H-Fall2013/Ultra-Remote-Medicine
|
sample/admin.py
|
Python
|
mit
| 1,569
| 0.007648
|
from django.contrib import admin
from django import forms
from django.contrib.auth.models import User
from sample.models import (Doctor, Worker, Patient, SpecialtyType, TimeSlot, Case, Comment, CommentGroup,
Scan)
class searchDoctor(admin.ModelAdmin):
list_display = ['user_first_name', 'user_last_name', 'get_some_value']
search_fields = ['user__first_name', 'user__last_name',
'specialties__na
|
me']
class searchWorker(admin.ModelAdmin):
list_display = ['user_first_name', 'user_last_name']
search_fields = ['user__first_name', 'user__last_name']
class searchPatient(admin.ModelAdmin):
list_display = ['first_name', 'last_name']
|
search_fields = ['first_name', 'last_name']
class searchSpeciality(admin.ModelAdmin):
search_fields = ['name']
list_display = ['name']
class searchTimeslot(admin.ModelAdmin):
search_fields = ['start_time', 'end_time']
class searchCase(admin.ModelAdmin):
search_fields = ['id']
list_display = ['id']
class searchComment(admin.ModelAdmin):
search_fields = ['text']
list_display = ['text']
class searchScan(admin.ModelAdmin):
search_fields = ['patient', 'comments']
admin.site.register(Doctor, searchDoctor)
admin.site.register(Worker, searchWorker)
admin.site.register(Patient, searchPatient)
admin.site.register(SpecialtyType, searchSpeciality)
admin.site.register(TimeSlot, searchTimeslot)
admin.site.register(Case, searchCase)
admin.site.register(Comment, searchComment)
admin.site.register(CommentGroup)
admin.site.register(Scan, searchScan)
|
fHachenberg/pyecstaticalib
|
Ecstatica/PixelEncoding.py
|
Python
|
gpl-3.0
| 6,356
| 0.013845
|
# coding: utf-8
#Created on 05.06.2012
#Copyright (C) 2013 Fabian Hachenberg
#This file is part of EcstaticaLib.
#EcstaticaLib is free software: you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#More information about the license is provided in the LICENSE file.
def decodePixels(data):
'''
according to 35008
'''
maxlen = 128000
bytedata = bytearray(data)
out = bytearray()
pos = 0
lastcolor = int(0)
while pos < len(bytedata) and len(out) < maxlen:
#we test whether the lowest bit is set
typeval = (bytedata[pos] & 0x03)
runlength = bytedata[pos] >> 2
pos += 1
if runlength == 0:
return out
if typeval == 0:
#relative color values
#each byte contains up to 2 relative pixel values
#if runlength is an odd number, for the last of these packed bytes only the lower 4 bits are used
while runlength > 0:
first = bytedata[pos] & 0x0f
second = bytedata[pos] >> 4
pos += 1 #we move the input pointer forward independent of wether we will use the upper 4 bits
#lower 4 bits
if first & 0x08 != 0:#if bit #4 is set, the value is negative
first |= 0xf0
lastcolor += first
if lastcolor < 0:
lastcolor = lastcolor + 256
if lastcolor > 255:
lastcolor = lastcolor - 256
out.append(lastcolor)
runlength -= 1
if runlength == 0:
break
#upper 4 bits
if second & 0x08 != 0: #if bit #4 is set, the value is negative
#print(second, second | 0xf0)
second |= 0xf0
lastcolor += second
if lastcolor < 0:
lastcolor = lastcolor + 256
if lastcolor > 255:
lastcolor = lastcolor - 256
out.append(lastcolor)
runlength -= 1
elif typeval == 2:
#direct transfer of pixel values
for i in range(runlength):
lastcolor = bytedata[pos]
out.append(lastcolor)
pos += 1
else:
#run-length times the following pixel value
lastcolor = bytedata[pos]
for i in range(runlength):
out.append(lastcolor)
pos += 1
return out
def decodePixelsWords(data):
'''
decodes depth data (2-byte-wide entries, as opposed to the color data with 1-byte-wide entries)
according to 35065
'''
maxlen = 128000
bytedata = bytearray(data)
out = []
pos = 0
lastcolor = int(0)
while pos < len(bytedata) and len(out) < maxlen:
#we test whether the lowest bit is set
typeval = (bytedata[pos] & 0x03)
runlength = bytedata[pos] >> 2
pos += 1
if runlength == 0:
return out
if typeval == 0:
#relative color values
#each byte contains up to 2 relative pixel values
#if runlength is an odd number, for the last of these packed bytes only the lower 4 bits are used
while runlength > 0:
first = (bytedata[pos] & 0x0f)
second = (bytedata[pos] >> 4)
pos += 1 #we move the input pointer forward independent of wether we will use the upper 4 bits
#lower 4 bits
if first & 0x08 != 0:#if bit #4 is set, the value is negative
first |= 0xfff0
lastcolor += first << 2
lastcolor &= 0xffff
out.append(lastcolor)
runlength -= 1
if runlength == 0:
break
#upper 4 bits
if second & 0x08 != 0: #if bit #4 is set, the value is negative
second |= 0xfff0
lastcolor += second << 2
lastcolor &= 0xffff
out.append(lastcolor)
runlength -= 1
elif typeval == 1:
for i in range(runlength):
value = bytedata[pos]
if value & 0x80: #negative?
value |= 0xff00
pos += 1
lastcolor += value << 2
lastcolor &= 0xffff
out.append(lastcolor)
elif typeval == 2:
#direct transfer of pixel values
for i in range(runlength):
lastcolor = ((bytedata[pos] + (bytedata[pos+1] << 8)) << 2) & 0xffff
out.append(lastcolor)
pos += 2
elif typeval == 3:
#run-length times the following pixel value
lastcolor = ((bytedata[pos] + (bytedata[pos+1] << 8)) << 2) & 0xffff
for i in range(runlength):
out.append(lastcolor)
pos += 2
return out
import unittest
import struct
import itertools
class TestView(unittest.TestCase):
def test_decodepixels(self):
#we have to load data for testing
viewfileobj = open("test/views/0002.raw", "rb")
typemark = viewfileobj.read(2)
len_a = struct.unpack("<i", viewfileobj.read(4))[0]
len_b = struct.unpack("<i", viewfileobj.read(4))[0]
print(len_a, len_b)
structstr = "".join(itertools.repeat("B", len_a))
packed_pixeldata_a = str
|
uct.unpack(structstr, viewfileobj.read(len_a))
decodePi
|
xels(packed_pixeldata_a)
structstr = "".join(itertools.repeat("B", len_b))
packed_depthdata = struct.unpack(structstr, viewfileobj.read(len_b))
decodePixelsWords(packed_depthdata)
|
markgw/pimlico
|
src/python/pimlico/datatypes/corpora/formatters/json.py
|
Python
|
gpl-3.0
| 313
| 0
|
import json
from pimlico.datatypes.corpora.json import JsonDocumentType
from pimlico.cli.browser.tools.formatter import DocumentBrowserFormatter
class JsonFormatter(DocumentBrowserFormatter):
|
DATATYPE = JsonDoc
|
umentType()
def format_document(self, doc):
return json.dumps(doc.data, indent=4)
|
narasimhan-v/avocado-misc-tests-1
|
io/net/bridge.py
|
Python
|
gpl-2.0
| 4,362
| 0.000229
|
#!/usr/bin/env python
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in
|
the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright: 2017 IBM
# Author: Harish Sriram <harish@linux.vnet.ibm.com>
# Bridge interface test
import os
import netifaces
from avocado import Test
from avocado.utils import distro
from av
|
ocado.utils import process
from avocado.utils.network.interfaces import NetworkInterface
from avocado.utils.network.hosts import LocalHost
class Bridging(Test):
'''
Test bridge interface
'''
def check_failure(self, cmd):
if process.system(cmd, sudo=True, shell=True, ignore_status=True):
self.fail("Command %s failed" % cmd)
def setUp(self):
self.host_interfaces = self.params.get("interfaces",
default="").split(" ")
if not self.host_interfaces:
self.cancel("User should specify host interface/s")
if self.host_interfaces[0:2] == 'ib':
self.cancel("Network Bridge is not supported for IB")
interfaces = netifaces.interfaces()
for host_interface in self.host_interfaces:
if host_interface not in interfaces:
self.cancel("Interface is not available")
self.peer_ip = self.params.get("peer_ip", default=None)
if not self.peer_ip:
self.cancel("User should specify peer IP")
self.ipaddr = self.params.get("host_ip", default="")
self.netmask = self.params.get("netmask", default="")
self.bridge_interface = self.params.get("bridge_interface",
default="br0")
local = LocalHost()
self.networkinterface = NetworkInterface(self.bridge_interface, local,
if_type="Bridge")
def test_bridge_create(self):
'''
Set up the ethernet bridge configuration in the linux kernel
'''
detected_distro = distro.detect()
net_path = 'network-scripts'
if detected_distro.name == "SuSE":
net_path = 'network'
if os.path.exists('/etc/sysconfig/%s/ifcfg-%s' % (net_path, self.bridge_interface)):
self.networkinterface.remove_cfg_file()
self.check_failure('ip link del %s' % self.bridge_interface)
self.check_failure('ip link add dev %s type bridge'
% self.bridge_interface)
check_flag = False
cmd = 'ip -d link show %s' % self.bridge_interface
check_br = process.system_output(cmd, verbose=True,
ignore_status=True).decode("utf-8")
for line in check_br.splitlines():
if line.find('bridge'):
check_flag = True
if not check_flag:
self.fail('Bridge interface is not created')
for host_interface in self.host_interfaces:
self.check_failure('ip link set %s master %s'
% (host_interface, self.bridge_interface))
self.check_failure('ip addr flush dev %s' % host_interface)
def test_bridge_run(self):
'''
run bridge test
'''
try:
self.networkinterface.add_ipaddr(self.ipaddr, self.netmask)
self.networkinterface.save(self.ipaddr, self.netmask)
except Exception:
self.networkinterface.save(self.ipaddr, self.netmask)
self.networkinterface.bring_up()
if self.networkinterface.ping_check(self.peer_ip, count=5) is not None:
self.fail('Ping using bridge failed')
self.networkinterface.remove_ipaddr(self.ipaddr, self.netmask)
def test_bridge_delete(self):
'''
Set to original state
'''
self.check_failure('ip link del dev %s' % self.bridge_interface)
try:
self.networkinterface.restore_from_backup()
except Exception:
self.networkinterface.remove_cfg_file()
|
unicefuganda/edtrac
|
edtrac_project/rapidsms_polls/poll/app.py
|
Python
|
bsd-3-clause
| 3,327
| 0.010219
|
# -*- coding: utf-8 -*-
import rapidsms
import datetime
from rapidsms.apps.base import AppBase
from .models import Poll
from django.db.models import Q
from rapidsms_httprouter.models import Message,MessageBatch
class App(AppBase):
def respond_to_message(self,message,response_msg,poll):
if response_msg == poll.default_response:
try:
batch=MessageBatch.objects.get(name=str(poll.pk))
batch.status="Q"
batch.save()
msg=Message.objects.create(text=response_msg,status="Q",connection=message.connection,direction="O",in_response_to=message.db_message)
batch.messages.add(msg)
except MessageBatch.DoesNotExist:
message.respond(response_msg)
else:
message.respond(response_msg)
def handle (self, message):
# see if this contact matches any of our polls
if (message.connection.contact):
try:
poll = Poll.objects.filter(contacts=message.connection.contact).exclude(start_date=None).filter(
Q(end_date=None) | (~Q(end_date=None) & Q(end_date__gt=datetime.datetime.now()))).latest(
'start_date')
if poll.responses.filter(
contact=message.connection.contact).exists():
old_response=poll.responses.filter(contact=message.connection.contact)[0]
|
response_obj, response_msg = poll.process_response(message)
if poll.response_type == Poll.RESPONSE_TYPE_ONE :
if not response_obj.has_errors or old_response.has_errors:
old_response.delete()
if hasattr(message, 'db_message'):
db_message = message.db_message
db_mess
|
age.handled_by = 'poll'
db_message.save()
if response_msg and response_msg.strip():
self.respond_to_message(message,response_msg,poll)
else:
response_obj.delete()
return False
else:
return False
else:
response_obj, response_msg = poll.process_response(message)
if hasattr(message, 'db_message'):
# if no other app handles this message, we want
# the handled_by field set appropriately,
# it won't since this app returns false
db_message = message.db_message
db_message.handled_by = 'poll'
db_message.save()
if response_msg and response_msg.strip():
self.respond_to_message(message,response_msg,poll)
elif poll.default_response :
#send default response anyway even for errors
self.respond_to_message(message,poll.default_response,poll)
# play nice, let other things handle responses
return False
except Poll.DoesNotExist:
pass
return False
|
wangjun/pythoner.net
|
pythoner/topic/views.py
|
Python
|
gpl-3.0
| 9,196
| 0.015263
|
#encoding:utf-8
"""
pythoner.net
Copyright (C) 2013 PYTHONER.ORG
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.http import HttpResponse,Http404,HttpResponseRedirect
from django.template import RequestContext
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.views.decorators.csrf import csrf_protect
from models import *
from forms import *
from django.core.paginator import Paginator,InvalidPage,EmptyPage
from django.shortcuts import render_to_response as render
from signals import new_topic_was_posted
from accounts.signals import update_user_repulation
import datetime
import time
def list(request,page=1):
current_page = 'topic'
topic = Topic()
topic_all = topic.all()
paginator = Paginator(topic_all,20)
page_title = u'正在讨论'
page_description = u'大家正在讨论的话题'
pre_url ='topic'
tags = Tag.objects.all()
try:
page = int(page)
except ValueError:
page = 1
try:
entrys = topics = paginator.page(page)
except (InvalidPage,EmptyPage):
entrys = topics = paginator.page(paginator.num_pages)
return render('topic_index.html',locals(),context_instance=RequestContext(request))
def list_by_user(request,user_id,page=1):
try:
user = User.objects.get(id=1)
except User.DoesNotExist:
raise Http404()
current_page = 'topic'
topic_all = Topic.objects.filter(author=user,deleted=False)
paginator = Paginator(topic_all,20)
title = u'正在讨论'
url ='topic'
tags = Tag.objects.all()
try:
page = int(page)
except ValueError:
page = 1
try:
entrys = topics = paginator.page(page)
except (InvalidPage,EmptyPage):
entrys = topics = paginator.page(paginator.num_pages)
return render('topic_index.html',locals(),context_instance=RequestContext(request))
@login_required
@csrf_protect
def add(request):
current_page = 'topic'
title = '发起新话题'
"""
写新的话题
"""
form_action = '/topic/add/'
if request.method == 'GET':
form = TopicForm()
return render('topic_edit.html',locals(),context_instance=RequestContext(request))
form = TopicForm(request.POST)
if form.is_valid():
data = form.clean()
new_topic = Topic(**data)
new_topic.author = request.user
new_topic.latest_response = datetime.datetime.now()
new_topic.ip = request.META.get('REMOTE_ADDR','0.0.0.0')
try:
new_topic.save()
except Exception,e:
messages.error(request,'服务器出现了错误,发表话题失败,请稍候重试')
return render('topic_edit.html',locals(),context_instance=RequestContext(request))
else:
# 发送增加声望的信号
update_user_repulation.send(
sender = __name__,
request = request,
user = request.user,
action = 'add',
content_type = 'topic',
message = u'发起新话题成功',
title = new_topic.title,
url = new_topic.get_absolute_url(),
)
#发送信号
new_topic_was_posted.send(
sender = new_topic.__class__,
topic = new_topic
)
return HttpResponseRedirect('/topic/{0}/'.format(new_topic.id))
# 数据有限性验证失败
else:
messages.error(request,'服务器出现了错误,发表话题失败,请稍候重试')
return render('topic_edit.html',locals(),context_instance=RequestContext(request))
@csrf_protect
@login_required
def edit(request,topic_id):
current_page = 'topic'
title = '修改话题'
"""
编辑话题
"""
try:
topic_id = int(topic_id)
topic = Topic.objects.get(deleted=False,id=topic_id,author=request.user)
except (ValueError,Topic.DoesNotExist):
raise Http404()
form_action = '/topic/%d/edit/' %topic.id
# 处理GET请求
if request.method == 'GET':
print 'get.',topic.md_content
form = TopicForm(initial={'title':topic.title,'md_content':topic.md_content})
return render('topic_edit.html',locals(),context_instance=RequestContext(request))
# 处理POST请求
form = TopicForm(request.POST)
if form.is_valid():
data = form.clean()
topic.title = data['title']
topic.md_content = data['md_content']
print 'reques.',request.POST.get('md_content')
print 'data.',data['md_content']
try:
topic.save()
except :
messages.error(request,'服务器出现了错误,保存数据失败,请稍候再试')
return render('topic_edit.html',locals(),context_instance=RequestContext(request))
return HttpResponseRedirect('/topic/%d/' %topic.id)
# 数据有效性验证失败
else:
return render('topic_edit.html',locals(),context_instance=RequestContext(request))
def delete(request,topic_id):
current_page = 'topic'
"""
删除话题
"""
try:
topic_id = int(topic_id)
topic = Topic.objects.get(id=topic_id,author
|
=request.user,deleted=False)
|
except (ValueError,Topic.DoesNotExist):
raise Http404()
else:
topic.deleted = True
topic.save()
set(request) # 记录用户操作次数
return HttpResponseRedirect('/topic/')
def detail(request,topic_id):
current_page = 'topic'
"""
话题详细页面
"""
next = '/topic/%d/' %int(topic_id)
timestamp = time.time()
try:
topic_id = int(topic_id)
topic = Topic.objects.get(id=topic_id,deleted=False)
except (ValueError,Topic.DoesNotExist):
raise Http404()
topic.view()
page_title = topic.title
page_description = u'大家正在讨论:{}'.format(topic.title)
return render('topic_detail.html',locals(),context_instance=RequestContext(request))
def list_by_tag(request,tag_name):
current_page = 'topic'
"""
按标签列出话题
"""
try:
tag_name = tag_name
tag = Tag.objects.get(name=tag_name)
except (ValueError,Tag.DoesNotExist):
raise Http404()
try:
page = int(request.GET.get('page',20))
except ValueError:
page = 1
topic_all = Topic.objects.filter(tag=tag,deleted=False)
paginator = Paginator(topic_all,1)
title = u'标签:%s' %tag.name
try:
entrys = paginator.page(page)
except (EmptyPage,InvalidPage):
entrys = paginator.page(paginator.num_pages)
return render('topic_index.html',locals(),context_instance=RequestContext(request))
@login_required
def favorite(request):
"""
用户收藏列表
"""
favorite_all = Favorite.objects.all(user=request.user)
paginator = Paginator(favorite_all,2)
try:
page = int(request.GET.get('page',20))
except ValueError:
page = 1
try:
entrys = favorites = paginator.page(page)
except (InvalidPage,EmptyPage):
entrys = favorites = paginator.page(paginator.num_pages)
return render('topic_favorite.html',locals(),context_instance=RequestContext(request))
def favorite_mark(request,topic_id):
"""
添加收藏(ajax方式)
"""
# 判断数据提交方式
if request.method == 'GET':
return HttpResponseRedirect('/')
# 验证用户是否登录
if not request.user.id:
return HttpResponse( "{'status':0}")
# 验证话题是否存在
try:
topic_id = int(topic_id)
|
ricardonhuang/blog
|
app/decorators.py
|
Python
|
gpl-3.0
| 516
| 0.001938
|
#coding=utf-8
from functools import wraps
from flask import abort
from flask_login
|
import current_user
from app.models import Permission
def permission_required(permission):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not current_user.can(permission):
abort(403)
return f(*args, **kwargs)
return decorated_function
return decorator
de
|
f admin_required(f):
return permission_required(Permission.ADMINISTER)(f)
|
bootcamptropa/django
|
races/migrations/0002_auto_20160119_1628.py
|
Python
|
mit
| 870
| 0.002299
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-19 15:28
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('races', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='race',
name='created_at',
field=models.DateTimeField(auto_now_add=True, default=datetime.datetime(2016, 1, 19, 15, 28, 29, 178154, tzinfo=utc)),
preserve_default=False,
),
migra
|
tions.AddField(
model_name='race',
name='updated_at',
field=models.DateTimeField(auto_now=True, defa
|
ult=datetime.datetime(2016, 1, 19, 15, 28, 36, 816957, tzinfo=utc)),
preserve_default=False,
),
]
|
dbischof90/sdetools
|
tests/test_scheme_implementations.py
|
Python
|
mit
| 6,216
| 0.005952
|
import functools as ft
import math
from collections import OrderedDict
import numba
import numpy as np
from sde import SDE
# from simulation.strong.explicit.predictorcorrector import Order_10 as pc_e
from simulation.strong.explicit.rk import Order_10 as rk_e
from simulation.strong.explicit.taylor import Order_05 as Euler_e
from simulation.strong.explicit.taylor import Order_10 as Milstein_e
from simulation.strong.implicit.taylor import Order_05_Trapez as Euler_i
from simulation.strong.implicit.taylor import Order_10_Trapez as Milstein_i
def map_scheme_to_arguments(cls, *args, **kwargs):
class MappedScheme(cls):
__init__ = ft.partialmethod(cls.__init__, *args, **kwargs)
return MappedScheme
def build_instance_list_of_mapped_schemes(mapped_scheme, step_list, differentials):
if len(step_list) != len(differentials):
raise ValueError('Wrong number of resolutions or differentials!')
scheme_list = []
for steps, diff in zip(step_list, differentials):
scheme_list.append(mapped_scheme(steps=steps, path=diff))
return scheme_list
def list_has_equal_strong_convergence_order(list_of_schemes, resolutions, order):
stepsizes = [int(np.ceil(end_point / i)) for i in resolutions]
differentials = [np.random.standard_normal(max(stepsizes)) * math.sqrt(end_point / max(stepsizes)) for i in
range(num_samples)]
analytical_values = np.full([num_samples, len(stepsizes)], np.nan)
scheme_values = [np.full([num_samples, len(stepsizes)], np.nan) for s in list_of_schemes]
list_scheme_instances = [list_of_schemes for i in range(num_samples * len(resolutions))]
for i in range(num_samples):
dW_full = differentials.pop()
for r_count, res in enumerate(stepsizes, start=0):
dW = [sum(dW_full[int(i * len(dW_full) / res): int((i + 1) * len(dW_full) / res)]) for i in range(res)]
scheme_instance = list_scheme_instances.pop()
for idx, scheme in enumerate(scheme_instance):
for path_value in scheme(steps=res, path=dW): pass
scheme_values[idx][i, r_count] = path_value
analytical_values[i, r_count] = gbm_endval_given_bm_endval(end_point, 1, 0.8, 0.6, np.cumsum(dW)[-1])
scheme_errors = [np.mean(abs((scheme_values[idx] - analytical_values)), axis=0) for idx in
range(len(list_of_schemes))]
log_errors = np.log2(resolutions)
error_regression_matrix = np.array([np.ones(log_errors.shape), log_errors]).transpose()
scheme_coefficients = [np.linalg.solve(error_regression_matrix.T.dot(error_regression_matrix),
error_regression_matrix.T.dot(np.log2(scheme_errors[idx]))) for idx in
range(len(list_of_schemes))]
scheme_orders = [coeff[1] for coeff in scheme_coefficients]
print(' Tested {} schemes of order {}.'.format(len(list_of_schemes), order))
return all(np.isclose(scheme_orders, order, 10e-2))
@numba.jit
def gbm_endval_given_bm_endval(t, x0, mu, sigma, bm_t):
return x0 * np.exp((mu - 0.5 * sigma ** 2) * t + sigma * bm_t)
@numba.jit
def gbm_drift(x, mu):
return mu * x
@numba.jit
def gbm_diffusion(x, sigma):
return sigma * x
@numba.jit
def gbm_difusion_x(sigma):
return sigma
end_point = 1
num_samples = 500
gbm_process = SDE(gbm_drift, gbm_diffusion, timerange=[0, end_point])
resolutions = [2 ** -4, 2 ** -5, 2 ** -6, 2 ** -7, 2 ** -8, 2 ** -9]
gbm_para_sample = OrderedDict(mu=0.8, sigma=0.6)
gbm_derivatives = {'diffusion_x': gbm_difusion_x}
stepsizes = [int(np.ceil(end_point / i)) for i in resolutions]
def test_convergence_order_05():
mapped_euler_e = map_scheme_to_arguments(Euler_e, sde=gbm_process, parameter=gbm_para_sample)
mapped_euler_i = map_scheme_to_arguments(Euler_i, sde=gbm_process, parameter=gbm_para_sample)
# mapped_pc_e = map_scheme_to_arguments(pc_e, sde=gbm_process, parameter=gbm_para_sample, derivatives=gbm_derivatives, beta=1)
list_schemes = [mapped_euler_e, mapped_euler_i] #mapped_pc_e]
assert list_has_equal_strong_convergence_order(list_schemes, resolutions, 0.5)
def test_convergence_order_10():
mapped_milstein_e = map_scheme_to_arguments(Milstein_e, sde=gbm_process, parameter=gbm_para_sample, derivatives=gbm_derivatives)
mapped_milstein_i = map_scheme_to_arguments(Milstein_i, sde=gbm_process, parameter=gbm_para_sample, derivatives=gbm_derivatives)
mapped_rk_e = map_scheme_to_arguments(rk_e, sde=gbm_process, parameter=gbm_para_sample)
list_schemes = [mapped_milstein_e, mapped_milstein_i, mapped_rk_e]
assert list_has_equal_strong_convergence_order(list_schemes, resolutions, 1.0)
def test_if_path_is_handed_through_correctly():
steps_used = 50
sample_differential_brownian_motion = np.array([0.03329902, 0.20850244, 0.12094308, -0.14159548, 0.02973983,
0.06
|
103259, -0.00915205, 0.01928274, 0.09207789, -0.13199381,
0.17663064, 0.1333172, -0.01288733, -0.31281056, -0.05924482,
-0.01702982, 0.18025385, -0.17514341, 0.03477228, 0.31712905,
-0.25351569, -0.19384718, -0.29929325, 0.20444405, 0.08353272,
0.09427778, 0.05516237, -0.18329133, -0.18365
|
494, -0.13901742,
-0.15492822, 0.0384501, -0.0544241, -0.15041881, -0.07649629,
0.07692755, -0.12122493, 0.18393892, 0.12113368, 0.10871338,
-0.1328373, -0.05468304, 0.08074539, 0.52846189, -0.00426639,
0.04982364, 0.16280621, -0.03664431, 0.22651330, -0.08565257])
sample_sde = SDE(lambda x: x, lambda x: x)
euler_instance = Euler_e(sample_sde, parameter=OrderedDict(), steps=steps_used, path=sample_differential_brownian_motion)
for _ in euler_instance: pass
assert all(euler_instance.return_path() == sample_differential_brownian_motion)
|
mationic/pyload
|
module/plugins/hoster/Keep2ShareCc.py
|
Python
|
gpl-3.0
| 4,411
| 0.007255
|
# -*- coding: utf-8 -*-
import re
import urlparse
from module.plugins.captcha.ReCaptcha import ReCaptcha
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class Keep2ShareCc(SimpleHoster):
__name__ = "Keep2ShareCc"
__type__ = "hoster"
__version__ = "0.25"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?(keep2share|k2s|keep2s)\.cc/file/(?P<ID>\w+)'
__config__ = [("use_premium", "bool", "Use premium account if available", True)]
__description__ = """Keep2Share.cc hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("stickell", "l.stickell@yahoo.it"),
("Walter Purcaro", "vuolter@gmail.com")]
URL_REPLACEMENTS = [(__pattern__ + ".*", "http://keep2s.cc/file/\g<ID>")]
NAME_PATTERN = r'File: <span>(?P<N>.+?)</span>'
SIZE_PATTERN = r'Size: (?P<S>[^<]+)</div>'
OFFLINE_PATTERN = r'File not found or deleted|Sorry, this file is blocked or deleted|Error 404'
TEMP_OFFLINE_PATTERN = r'Downloading blocked due to'
LINK_FREE_PATTERN = r'"(.+?url.html\?file=.+?)"|window\.location\.href = \'(.+?)\';'
LINK_PREMIUM_PATTERN = r'window\.location\.href = \'(.+?)\';'
CAPTCHA_PATTERN = r'src="(/file/captcha\.html.+?)"'
WAIT_PATTERN = r'Please wait ([\d:]+) to download this file'
TEMP_ERROR_PATTERN = r'>\s*(Download count files exceed|Traffic limit exceed|Free account does not allow to download more than one file at the same time)'
ERROR_PATTERN = r'>\s*(Free user can\'t download large files|You no can access to this file|This download available only for premium users|This is private file)'
def check_errors(self):
m = re.search(self.TEMP_ERROR_PATTERN, self.html)
if m:
self.info['error'] = m.group(1)
self.wantReconnect = True
self.retry(wait_time=30 * 60, msg=m.group(0))
m = re.search(self.ERROR_PATTERN, self.html)
if m:
errmsg = self.info['error'] = m.group(1)
self.error
|
(errmsg)
m = re.search(self.WAIT_PATTERN, self.html)
if m:
self.log_debug("Hoster told us to wait for %s" % m.group(1))
#: String to time convert court
|
esy of https://stackoverflow.com/questions/10663720
ftr = [3600, 60, 1]
wait_time = sum(a * b for a, b in zip(ftr, map(int, m.group(1).split(':'))))
self.wantReconnect = True
self.retry(wait_time=wait_time, msg="Please wait to download this file")
self.info.pop('error', None)
def handle_free(self, pyfile):
self.fid = re.search(r'<input type="hidden" name="slow_id" value="(.+?)">', self.html).group(1)
self.html = self.load(pyfile.url, post={'yt0': '', 'slow_id': self.fid})
# self.log_debug(self.fid)
# self.log_debug(pyfile.url)
self.check_errors()
m = re.search(self.LINK_FREE_PATTERN, self.html)
if m is None:
self.handle_captcha()
self.wait(31)
self.html = self.load(pyfile.url)
m = re.search(self.LINK_FREE_PATTERN, self.html)
if m is None:
self.error(_("Free download link not found"))
self.link = m.group(1)
def handle_captcha(self):
post_data = {'free' : 1,
'freeDownloadRequest': 1,
'uniqueId' : self.fid,
'yt0' : ''}
m = re.search(r'id="(captcha\-form)"', self.html)
self.log_debug("captcha-form found %s" % m)
m = re.search(self.CAPTCHA_PATTERN, self.html)
self.log_debug("CAPTCHA_PATTERN found %s" % m)
if m:
captcha_url = urlparse.urljoin("http://keep2s.cc/", m.group(1))
post_data['CaptchaForm[code]'] = self.captcha.decrypt(captcha_url)
else:
recaptcha = ReCaptcha(self)
response, challenge = recaptcha.challenge()
post_data.update({'recaptcha_challenge_field': challenge,
'recaptcha_response_field' : response})
self.html = self.load(self.pyfile.url, post=post_data)
if 'verification code is incorrect' not in self.html:
self.captcha.correct()
else:
self.captcha.invalid()
getInfo = create_getInfo(Keep2ShareCc)
|
fengyqf/wildmatch
|
wildmatch.py
|
Python
|
gpl-2.0
| 2,849
| 0.023619
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
待匹配的数据 (tofill)及匹配数据池(pool),存储于csv文件中,两个csv都要包含主键列,
及数据列。脚本将对tofill的每一条数据字段,在pool文件的数据列中做匹配,如果pool
数据列包含tofile列数据,则将各自主键列建立一条关系记录,存储在结果文件csv中
以上提到的csv文件,格式要求:
使用逗号分隔,字段可以使用双引号括起。
不支持字段内换行
必须包含主键列,主键列不得重复
主键列在数据列后
----
事实上,两表主键列可以重复,只不过对应关系也是重复的;恰好需此效果可以用。
---------------------------------
示例:
tofill.csv
----
id,data
201,AAAA
202,BBBB
203,CCCC
204,DDDD
205,EEEE
pool.csv
----
id,data
90001,AAAA/xyz
20002,BBBBabc
20003,12BBBB
20004,EEwowEE
20005,AABBCC
配置参数
----
pool_path='pool.csv'
pool_pk_index=0 # 主键列的列号,从0起编号,下同
pool_data_index=1
tofill_path='tofill.csv'
tofill_pk_index=0
tofill_data_index=1
----
匹配结果将如下
output.csv
----
tofill_pk,tofill_data,pool_pk,pool_data
id,data,id,data
2
|
01,AAAA,90001,AAAA/xyz
202,BBBB,20003,12BBBB
---------------------------------
"""
pool_path='pool.csv'
pool_pk_index=0
pool_data_index=1
tofill_path='tofill.csv'
tofill_pk_index=0
tofill_data_index=1
# 每处理指定次数,报告一次处理行数
tick_every_lines=100
import sys
import csv
pool=[]
tofill=[]
match=[]
reader=csv.reader(file(pool_path,'rb'))
for line in reader:
if len(line)>=pool_data_index:
pool.append([line[pool_pk
|
_index],line[pool_data_index]])
print 'pool file loaded, %s lines'%(len(pool))
reader=csv.reader(file(tofill_path,'rb'))
for line in reader:
if len(line)>=pool_data_index:
tofill.append([line[pool_pk_index],line[pool_data_index]])
tofill_count=len(tofill)
print 'tofill file loaded, %s lines'%(tofill_count)
writer=csv.writer(file('output.csv','wb'))
writer.writerow(['tofill_pk','tofill_data','pool_pk','pool_data'])
print 'matching start ...'
src=''
ok=0
tick=0
for f in tofill:
ok=0
src=''
tick+=1
if tick % tick_every_lines == 0:
matched_count=len(match)
print 'matched %d/%s lines (%.2f%%), %s matched....'%(tick,tofill_count*100.0,tick/tofill_count,matched_count)
for p in pool:
if p[1].find(f[1]) != -1:
line=[f[0],f[1],p[0],p[1]]
writer.writerow(line)
match.append(line)
ok=1
src=p[1]
break
if ok:
print "toofill[%s]:%s found in pool[%s]:%s ...... %s"%(f[0],f[1],p[0],p[1],ok)
else:
pass
print "\n%s records matched, written to file output.csv."%(len(match))
#raw_input(' press any key to exit')
|
qubs/climate-data-api
|
herbarium_data/models.py
|
Python
|
apache-2.0
| 3,109
| 0.002252
|
import datetime
from django.db import models
from django.core.exceptions import ValidationError
def validate_year(year):
if year is None:
return # Years can be null
if year < 1800 or year > datetime.datetime.now().year:
raise ValidationError("Not a valid year.")
def validate_day_of_month(day):
if day is None:
return # Days can be null
elif day > 31 or day < 1:
raise ValidationError("Not a valid day.")
class Specimen(models.Model):
"""
A model of a herbarium_data specimen entry.
"""
MONTH_CHOICES = (
(1, "January"),
(2, "February"),
(3, "March"),
(4, "April"),
(5, "May"),
(6, "June"),
(7, "July"),
(8, "August"),
(9, "September"),
(10, "October"),
(11, "November"),
(12, "December")
)
# Generated Attributes
def latin_name(self):
return "{} {}".format(self.genus, self.species)
def date_collected_str(self):
if self.year_collected:
if self.month_collected:
if self.day_collected:
return "{} {}, {}".format(self.get_month_collected_display(), self.day_collected,
self.year_collected)
else:
return "{}, {}".format(self.get_month_collected_display(), self.year_collected)
else:
return self.year_collected
return None
date_collected_str.short_description = "Date Collected"
# Schema Attributes
created = models.DateTimeField(auto_now_add=True)
|
updated = models.DateTimeField(auto_now=True)
dataset = models.CharField(max_length=50, default="")
genus = models.CharField(max_length=50, default="", db_index=True)
species = models.CharField(max_length=50, default="", db_index=True)
common_name = models.CharField(max_length=255, default="")
dth = models.CharField(max_length=10, default="")
accession = models.CharField(max_length=20, default="")
|
year_collected = models.PositiveSmallIntegerField(null=True, validators=[validate_year])
month_collected = models.PositiveSmallIntegerField(null=True, choices=MONTH_CHOICES)
day_collected = models.PositiveSmallIntegerField(null=True, validators=[validate_day_of_month])
collectors = models.TextField(default="")
map_included = models.NullBooleanField()
map_reference = models.CharField(max_length=255, default="")
county = models.CharField(max_length=127, default="")
township = models.CharField(max_length=127, default="")
country = models.CharField(max_length=127, default="")
location = models.CharField(max_length=127, default="")
habitat = models.CharField(max_length=127, default="")
notes = models.TextField(default="")
image = models.ImageField(null=True)
def __repr__(self):
return "<Specimen {} | {} {}>".format(self.accession, self.genus, self.species)
def __str__(self):
return "Specimen {}: {} {}".format(self.accession, self.genus, self.species)
|
pdunnigan/namefileparse
|
namefileparse/__init__.py
|
Python
|
mit
| 1,123
| 0.016919
|
# Copyright
|
(c) 2012 Patrick Dunnigan
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute
|
, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__version__ = '1.0'
|
Jgarcia-IAS/Fidelizacion_odoo
|
openerp/extras/jasper_server/__openerp__.py
|
Python
|
agpl-3.0
| 2,744
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
#
# jasper_server module for OpenERP
# Copyright (c) 2008-2009 EVERLIBRE (http://everlibre.fr) Eric VERNICHON
# Copyright (C) 2009-2011 SYLEAM ([http://www.syleam.fr]) Christophe CHAUVET
#
# This file is a part of jasper_server
#
# jasper_server is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# jasper_server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
#
####################################################################
|
##########
{
'name': 'JasperReport Server Interface',
'version': '6.3',
'category': 'Reporting',
'sequence': 20,
'complexity': "expert",
'description': """This module interface JasperReport Server with OpenERP
Features:
- Document source must be in CSV, XML
- Save document as attachment on object
- Retrieve attachment if present
- Launch multiple reports and merge in one printing action
- Add additionnals parameters (ex fro
|
m fields function)
- Affect group on report
- Use context to display or not the print button
(eg: in stock.picking separate per type)
- Execute SQL query before and after treatement
- Launch report based on SQL View
- Add additional pages at the begining or at the end of the document
This module required library to work properly
# pip install httplib2 (>= 0.6.0)
# pip install pyPdf (>= 1.13)
In collaboration with Eric Vernichon (from Everlibre)
""",
'author': 'SYLEAM',
'website': 'http://www.syleam.fr',
'images': ['images/accueil.png', 'images/palette.png',
'images/document_form.png'],
'depends': [
'base',
],
'data': [
'security/groups.xml',
'security/ir.model.access.csv',
'data/jasper_document_extension.xml',
'wizard/wizard.xml',
'wizard/load_file_view.xml',
'obj_server_view.xml',
'obj_document_view.xml',
],
'demo': [
'demo/jasper_document.xml',
],
'installable': True,
'auto_install': False,
'external_dependencies': {'python': ['httplib2', 'pyPdf', 'dime']},
'application': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
vincentdavis/corrupt_image_finder
|
test_images/RandomMutateImage.py
|
Python
|
bsd-3-clause
| 73
| 0.013699
|
im = open('006993
|
_photoA.
|
tif', 'rb')
ord(im.read(1))
chr(ord(im.read(1)))
|
polyaxon/polyaxon
|
core/tests/test_polypod/test_contexts.py
|
Python
|
apache-2.0
| 14,317
| 0.000279
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from polyaxon.connections.kinds import V1ConnectionKind
from polyaxon.connections.schemas import V1ClaimConnection
from polyaxon.containers import contexts as container_contexts
from polyaxon.polyaxonfile.specs import kinds
from polyaxon.polyflow import V1CloningKind, V1CompiledOperation, V1RunKind
from polyaxon.polypod.compiler.contexts import resolve_contexts
from polyaxon.schemas.types import V1ConnectionType
from polyaxon.utils.test_utils import BaseTestCase
from polyaxon.utils.tz_utils import now
class V1CloningKin(object):
pass
@pytest.mark.polypod_mark
class TestResolveContexts(BaseTestCase):
def test_resolver_default_contexts(self):
context_root = container_contexts.CONTEXT_ROOT
compiled_operation = V1CompiledOperation.read(
{
"version": 1.1,
"kind": kinds.COMPILED_OPERATION,
"plugins": {
"auth": False,
"shm": False,
"collectLogs": False,
"collectArtifacts": False,
"collectResources": False,
},
"run": {"kind": V1RunKind.JOB, "container": {"image": "test"}},
}
)
spec = resolve_contexts(
namespace="test",
owner_name="user",
project_name="project",
project_uuid="uuid",
run_uuid="uuid",
run_name="run",
run_path="test",
compiled_operation=compiled_operation,
artifacts_store=None,
connection_by_names={},
iteration=None,
created_at=None,
compiled_at=None,
)
assert spec == {
"globals": {
"owner_name": "user",
"project_unique_name": "user.project",
"project_name": "project",
"project_uuid": "uuid",
"run_info": "user.project.runs.uuid",
"context_path": context_root,
"artifacts_path": "{}/artifacts".format(con
|
text_root),
"name": "run",
"uuid": "uu
|
id",
"namespace": "test",
"iteration": None,
"created_at": None,
"compiled_at": None,
"schedule_at": None,
"started_at": None,
"finished_at": None,
"duration": None,
"cloning_kind": None,
"original_uuid": None,
"is_independent": True,
"store_path": "",
},
"init": {},
"connections": {},
}
def test_resolver_init_and_connections_contexts(self):
context_root = container_contexts.CONTEXT_ROOT
store = V1ConnectionType(
name="test_claim",
kind=V1ConnectionKind.VOLUME_CLAIM,
schema=V1ClaimConnection(
mount_path="/claim/path", volume_claim="claim", read_only=True
),
)
compiled_operation = V1CompiledOperation.read(
{
"version": 1.1,
"kind": kinds.COMPILED_OPERATION,
"plugins": {
"auth": False,
"shm": False,
"mountArtifactsStore": True,
"collectLogs": False,
"collectArtifacts": False,
"collectResources": False,
},
"run": {
"kind": V1RunKind.JOB,
"container": {"image": "test"},
"connections": [store.name],
"init": [{"connection": store.name}],
},
}
)
date_value = now()
spec = resolve_contexts(
namespace="test",
owner_name="user",
project_name="project",
project_uuid="uuid",
run_uuid="uuid",
run_name="run",
run_path="test",
compiled_operation=compiled_operation,
artifacts_store=store,
connection_by_names={store.name: store},
iteration=12,
created_at=date_value,
compiled_at=date_value,
cloning_kind=V1CloningKind.COPY,
original_uuid="uuid-copy",
is_independent=False,
)
assert spec == {
"globals": {
"owner_name": "user",
"project_unique_name": "user.project",
"project_name": "project",
"project_uuid": "uuid",
"name": "run",
"uuid": "uuid",
"context_path": context_root,
"artifacts_path": "{}/artifacts".format(context_root),
"run_artifacts_path": "/claim/path/test",
"run_outputs_path": "/claim/path/test/outputs",
"namespace": "test",
"iteration": 12,
"run_info": "user.project.runs.uuid",
"created_at": date_value,
"compiled_at": date_value,
"schedule_at": None,
"started_at": None,
"finished_at": None,
"duration": None,
"is_independent": False,
"cloning_kind": V1CloningKind.COPY,
"original_uuid": "uuid-copy",
"store_path": "/claim/path",
},
"init": {"test_claim": store.schema.to_dict()},
"connections": {"test_claim": store.schema.to_dict()},
}
def test_resolver_outputs_collections(self):
context_root = container_contexts.CONTEXT_ROOT
store = V1ConnectionType(
name="test_claim",
kind=V1ConnectionKind.VOLUME_CLAIM,
schema=V1ClaimConnection(
mount_path="/claim/path", volume_claim="claim", read_only=True
),
)
compiled_operation = V1CompiledOperation.read(
{
"version": 1.1,
"kind": kinds.COMPILED_OPERATION,
"plugins": {
"auth": False,
"shm": False,
"mountArtifactsStore": False,
"collectLogs": False,
"collectArtifacts": True,
"collectResources": True,
},
"run": {
"kind": V1RunKind.JOB,
"container": {"image": "test"},
"connections": [store.name],
"init": [{"connection": store.name}],
},
}
)
spec = resolve_contexts(
namespace="test",
owner_name="user",
project_name="project",
project_uuid="uuid",
run_uuid="uuid",
run_name="run",
run_path="test",
compiled_operation=compiled_operation,
artifacts_store=store,
connection_by_names={store.name: store},
iteration=12,
created_at=None,
compiled_at=None,
is_independent=True,
)
assert spec == {
"globals": {
"owner_name": "user",
"project_name": "project",
"project_unique_name": "user.project",
"project_uuid": "uuid",
"name": "run",
"uuid": "uuid",
"run_info": "user.project.runs.uuid
|
skraghu/softlayer-python
|
SoftLayer/CLI/block/replication/order.py
|
Python
|
mit
| 2,315
| 0
|
"""Order a block storage replica volume."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
CONTEXT_SETTINGS = {'token_normalize_func': lambda x: x.upper()}
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument('volume_id')
@click.option('--snapshot-schedule', '-s',
help='Snapshot schedule to use for replication, '
'(HOURLY | DAILY | WEEKLY)',
required=True,
type=click.Choice(['HOURLY', 'DAILY', 'WEEKLY']))
@click.option('--location', '-l',
help='Short name of the data center for the replicant '
'(e.g.: dal09)',
required=True)
@click.option('--tier',
help='Endurance Storage Tier (IOPS per GB) of the primary'
' volume for which a replicant is ordered [optional]',
type=click.Choice(['0.25', '2', '4', '10']))
@click.option('--os-type',
help='Operating System Type (e.g.: LINUX) of the primary'
' volume for which a replica is ordered [optional]',
type=click.Choice([
'HYPER_V',
'LINUX',
'VMWARE',
'WINDOWS_2008',
'WINDOWS_GPT',
'WINDOWS',
'XEN']))
@environment.pass_env
def cli(env, volume_id, snapshot_schedule, location, tier, os_type):
"""Order a block storage replica volume."""
block_manager = SoftLayer.BlockStorageManager(env.client)
if tier is not None:
tier = float(tier)
try:
order = block_manager.order_replicant_volume(
volume_id,
snapshot_schedule=sna
|
pshot_schedule,
location=location,
tier=tier,
os_type=os_type,
|
)
except ValueError as ex:
raise exceptions.ArgumentError(str(ex))
if 'placedOrder' in order.keys():
click.echo("Order #{0} placed successfully!".format(
order['placedOrder']['id']))
for item in order['placedOrder']['items']:
click.echo(" > %s" % item['description'])
else:
click.echo("Order could not be placed! Please verify your options " +
"and try again.")
|
ReapOmen/open-media
|
openmedia/player/track.py
|
Python
|
gpl-3.0
| 2,287
| 0
|
import os.path
from gi.repository import Gst, GstPbutils
DEFAULT_IMAGE_PATH = './openmedia/gui/res/img/no_image.png'
class Track(object):
"""
This holds data abo
|
ut media files.
The inform
|
ation about the media file associated with this includes
the name, file path, duration and metadata.
"""
def __init__(self, file_path):
"""
Create a track using data from the specified file.
Parameters
----------
:param file_path: the path of the file out of which to create a track
:type file_path: str
"""
self._file_path = os.path.abspath(file_path)
self._metadata = {}
discoverer_info = self._get_discoverer_info(self._file_path)
tags = discoverer_info.get_tags()
if tags.get_string(Gst.TAG_TITLE)[0]:
self._metadata['title'] = tags.get_string(Gst.TAG_TITLE)[1]
else:
self._metadata['title'] = 'untitled'
if tags.get_sample(Gst.TAG_IMAGE)[0]:
self._metadata['image'] = tags.get_sample(Gst.TAG_IMAGE)[1]
else:
self._metadata['image'] = os.path.abspath(DEFAULT_IMAGE_PATH)
self._duration = discoverer_info.get_duration() / Gst.SECOND
def _get_discoverer_info(self, file_path):
Gst.init()
discoverer = GstPbutils.Discoverer()
return discoverer.discover_uri("file://" + file_path)
@property
def name(self):
"""
The name of this track.
:getter: Return this track's name.
:type: str
"""
return self._metadata['title']
@property
def file_path(self):
"""
The path of the file corresponding to this track.
:getter: Return this track's file path.
:type: str
"""
return self._file_path
@property
def metadata(self):
"""
The metadata associated with this track.
The metadata is a dictionary mapping tags to values.
:getter: Return this track's metadata.
:type: dict
"""
return self._metadata
@property
def duration(self):
"""
The duration of this track.
:getter: Return this track's duration.
:type: int
"""
return self._duration
|
srio/oasys-comsyl
|
orangecontrib/comsyl/scripts/load_results_from_file.py
|
Python
|
mit
| 2,411
| 0.014102
|
import numpy
from srxraylib.plot.gol import plot_image, plot
import sys
from comsyl.scripts.CompactAFReader import CompactAFReader
def plot_stack(mystack,what="intensity",title0="X",title1="Y",title2="Z"):
from silx.gui.plot.StackView import StackViewMainWindow
from silx.gui import qt
app = qt.QApplication(sys.argv[1:])
sv = StackViewMainWindow()
sv.setColormap("jet", autoscale=True)
if what == "intensity":
sv.setStack(numpy.absolute(mystack))
elif what == "real":
sv.setStack(numpy.real(mystack))
elif what == "imaginary":
sv.setStack(numpy.imag(mystack))
elif what == "phase":
sv.setStack(numpy.angle(mystack))
elif what == "phase_deg":
|
sv.setStack(numpy.angle(mystack,deg=True))
else:
raise Exception("Undefined label "+what)
sv.se
|
tLabels([title0,title1,title2])
sv.show()
app.exec_()
def load_stack(filename):
# filename = "/users/srio/OASYS_VE/comsyl_srio/calculations/new_u18_2m_1h_s2.5"
reader = CompactAFReader(filename)
print("File %s:" % filename)
print("contains")
print("%i modes" % reader.number_modes())
print("on the grid")
print("x: from %e to %e" % (reader.x_coordinates().min(), reader.x_coordinates().max()))
print("y: from %e to %e" % (reader.y_coordinates().min(), reader.y_coordinates().max()))
print("calculated at %f eV" % reader.photon_energy())
print("with total intensity in (maybe improper) normalization: %e" % reader.total_intensity().real.sum())
print("Occupation and max abs value of the mode")
x = reader.x_coordinates()
y = reader.y_coordinates()
eigenvalues = numpy.zeros(reader.number_modes())
mystack = numpy.zeros((reader.number_modes(),y.size,x.size),dtype=complex)
for i_mode in range(reader.number_modes()):
eigenvalues[i_mode] = reader.occupation(i_mode)
mode = reader.mode(i_mode)
mystack[i_mode,:,:] = mode.T
return x,y,mystack, eigenvalues
if __name__ == "__main__":
h,v,mystack, occupation = load_stack("/users/srio/OASYS_VE/comsyl_srio/calculations/new_u18_2m_1h_s2.5")
plot_stack(mystack,what="intensity", title0="Mode index",
title1="V from %3.2f to %3.2f um"%(1e3*v.min(),1e3*v.max()),
title2="H from %3.2f to %3.2f um"%(1e3*h.min(),1e3*h.max()))
plot(numpy.arange(occupation.size),occupation)
|
DayGitH/Python-Challenges
|
DailyProgrammer/DP20170414C.py
|
Python
|
mit
| 1,976
| 0.005567
|
"""
[2017-04-14] Challenge #310 [Hard] The Guards and the Mansion
https://www.reddit.com/r/dailyprogrammer/comments/65fkwh/20170414_challenge_310_hard_the_guards_and_the/
# Description
I recently came into some money and built myself a mansion. And I'm afraid of robbers who now want to come and steal
the rest of my money. I built my house in the middle of my property, but now I need some guard towers. I didn't make
*that* much money, so I can't build an *infinite* number of towers with an infinite number of guards - I can only
afford 3. But I do need your help - how many towers do I need to build to give my house adequate coverage, and
sufficient variety of coverage to keep thieves at bay?
For this problem ...
- Assume a Euclidean 2 dimensional space with my mansion at the center (0,0)
- My mansion is circular with a unit radius of 1
- I'll tell you the locations of the guard towers as Euclidean coordinates, for example (1,1). They may be negative.
- The towers only work if they form a triangle that fully emcompasses my mansion (remember, a circle centered at (0,0))
I'll give you the locations of the towers, one at a time, as a pair of integers *x* and *y* re
|
presenting the
coordinates. For *every* row of input please tell me how many different triangles I can have - that is arrangements of
3 occupied towers. I like diversity, let's keep the
|
thieves guessing as to where the towers are occupied every night.
# Input Description
You'll be given an integer on the first line telling you how many lines of tower coordinate pairs to read. Example:
4
3 -1
-1 3
-1 -1
-5 -2
# Output Description
For *every row of input* tell me how many triangles I can make that fully enclose my mansion at (0,0) with a unit
radius of 1. Example:
0
0
1
2
# Challenge Input
10
2 -7
2 2
4 -9
-4 -6
9 3
-8 -7
6 0
-5 -6
-1 -1
-7 10
"""
def main():
pass
if __name__ == "__main__":
main()
|
google/gnxi
|
oc_config_validate/oc_config_validate/models/vlan.py
|
Python
|
apache-2.0
| 1,672
| 0.009569
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class openconfig_vlan_types(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-vlan-types - based on the path /openconfig-vlan-types. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines configuration and state variables for VLANs,
in addition to VLAN parameters
|
associated with interfaces
"""
_pyangbind_elements = {}
class openconfig_vlan(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-vlan - based on the path /openconfig-
|
vlan. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines configuration and state variables for VLANs,
in addition to VLAN parameters associated with interfaces
"""
_pyangbind_elements = {}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.