repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
j2ali/FlightScraper
|
Scraper.py
|
1
|
1950
|
from bs4 import BeautifulSoup
import helper
from datetime import datetime
import click
import time
import calendar
#Example values
#START_DATE = datetime(2014, 05, 15)
#END_DATE = datetime(2015, 05, 15)
#DAY_DELTA = 7
#TIMEOUT_SECONDS = 30
#Example Command
#python Scraper.py 2014/05/25 2015/05/15 4 0 YYZ POS
@click.command()
@click.argument('start_date')
@click.argument('end_date')
@click.argument('day_delta')
@click.argument('time_out')
@click.argument('origin_airport')
@click.argument('destination_airport')
def find_flights(start_date, end_date, day_delta, time_out, origin_airport, destination_airport):
start_date = datetime.strptime(start_date, "%Y/%m/%d")
end_date = datetime.strptime(end_date, "%Y/%m/%d")
day_delta = int(day_delta)
time_out = int(time_out)
flight_dates = helper.generate_dates(start_date, end_date, day_delta)
#There is a new output file for each run.
#Use something like time.ctime(int("1284101485")) to get back date
filename = calendar.timegm(datetime.utcnow().utctimetuple())
file = open('DataOut/output_'+str(filename)+'.txt', "a")
for flight_date in flight_dates:
(depart_date, return_date) = flight_date
response = helper.hit_the_site(depart_date,
return_date,
origin_airport,
destination_airport)
soup = BeautifulSoup(response)
data = helper.parse_data(soup)
if len(data) == 0:
file.writelines('No data received might have encounter captcha')
file.close()
break
for a in data:
print a
file.writelines(a.encode('utf-8'))
# Trying to avoid captcha here but looks like timeout is over 30 seconds
# I can go 10 hit then its turned on
time.sleep(time_out)
file.close()
if __name__ == '__main__':
find_flights()
|
bsd-3-clause
| -3,644,755,502,807,923,000
| 31.516667
| 97
| 0.625128
| false
| 3.597786
| false
| false
| false
|
sagarjauhari/BCIpy
|
process_series_files.py
|
1
|
3765
|
# /usr/bin/env python
# Copyright 2013, 2014 Justis Grant Peters and Sagar Jauhari
# This file is part of BCIpy.
#
# BCIpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BCIpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BCIpy. If not, see <http://www.gnu.org/licenses/>.
import sys, os
from os.path import isfile, join
import re
import dateutil.tz
import pandas as pd
import numpy as np
from datetime import datetime
# Create dict of machine data
def create_dict_machine_data(raw_dir):
onlyfiles_raw = [ f for f in os.listdir(raw_dir) if isfile(join(raw_dir,f)) ]
pat_raw = re.compile("[0-9]*\.[a-z0-9]*\.rawwave\.csv")
temp_dat_raw = [f.split('.')[0:2] for f in onlyfiles_raw if pat_raw.match(f)]
mach_dict = {i[1]: i[0] for i in temp_dat_raw}
return mach_dict
def create_raw_incremental(in_file, out_file, time_t, tzinfo=dateutil.tz.tzlocal()):
"Create raw file with incremental miliseconds"
raw = pd.read_csv(in_file, skipinitialspace=True, index_col=False) # avoid index to keep it from sorting
day = time_t[0:4]+"-"+time_t[4:6]+"-"+time_t[6:8]
#print day #debug
# Incoming data has 512Hz samples with timestamps at resolution of one
# second. For each second, convert the first timestamp to epoch time and
# blank out the others so that we can do linear interpolation.
# TODO estimate microseconds on first and last second, to avoid timestretch
# TODO analyze clock skew, since some seconds have more or less samples
# TODO consider a pandas.DatetimeIndex with just a start time and frequency
prev_time = None
for i,row in raw.iterrows():
timestamp = row['%Time']
if timestamp==prev_time:
raw.set_value(i, '%Time', np.NaN)
else:
timestring = day + ' ' + timestamp + '.0'
dt = datetime.strptime(timestring, '%Y-%m-%d %H:%M:%S.%f')\
.replace(tzinfo=tzinfo) # set specified tz before conversion
# time since UTC 1970-01-01 00:00:00, in seconds
dt = float(dt.strftime('%s.%f'))
raw.set_value(i, '%Time', dt)
prev_time = timestamp
timestring = day + ' ' + prev_time + '.0'
dt = datetime.strptime(timestring, '%Y-%m-%d %H:%M:%S.%f')\
.replace(tzinfo=tzinfo) # set specified tz before conversion
# time since UTC 1970-01-01 00:00:00, in seconds
dt = float(dt.strftime('%s.%f'))
raw.set_value(i, '%Time', dt+1)
# reindex with interpolated timestamps
raw.index = pd.DatetimeIndex(
pd.to_datetime(raw['%Time']\
.convert_objects(convert_numeric=True)\
.interpolate(), unit='s')
).tz_localize('UTC').tz_convert(tzinfo) # convert back to original tz
raw.to_csv(out_file, index=True, cols=['Value'])
return raw
def process_all_in_dir(indir, outdir):
if not os.path.exists(outdir):
os.makedirs(outdir)
mach_dict = create_dict_machine_data(indir)
for i in mach_dict:
file_in = join(indir, mach_dict[i]+"."+i+".rawwave.csv")
print "processing file %s" % file_in
file_out =join(outdir, mach_dict[i]+"."+i+".rawwave_microsec.csv")
create_raw_incremental(file_in,file_out, mach_dict[i])
if __name__ == '__main__':
indir,outdir=sys.argv[1:3]
process_all_in_dir(indir,outdir)
|
gpl-3.0
| 3,405,553,341,204,929,000
| 40.833333
| 108
| 0.655511
| false
| 3.388839
| false
| false
| false
|
IngoBongo/rpg_texual_Rumbo_a
|
Rumbo_A_V0.0.1/rumbo.py
|
1
|
1682
|
# -*- coding: utf-8 -*-
# sintaxis for Python 2.7
from random import randint
import jugador
from jugador import Jugador
from criaturas import Enemigo
comand = jugador.Comandos
def main():
jug = Jugador()
jug.nombre = raw_input("¿Cual es tu nombre viager@? : ")
print "escribe: 'ayuda' para ver una lista de acciones.\n"
print "%s se adentra en una oscura cueva, en busca de aventuras."% jug.nombre
while (jug.salud > 0):
linea = raw_input("> ")
arg = linea.split()
if len(arg) > 0:
comando_valido = False
for c in comand.keys():
if arg[0] == c[: len(arg[0])]:
comand[c](jug)
comando_valido = True
break
if not comando_valido:
print "%s no entiende tu sugerencia.\n(escribe 'ayuda' para obtener una lista de obciones.)"% jug.nombre
if __name__ == '__main__':
main()
"""
{Rumbo A... pretende ser un juego textual de aventuras}
Copyright (C) {2017} {by Igor Iglesia Gonzalez}
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
|
gpl-3.0
| 850,313,411,132,253,200
| 28.491228
| 120
| 0.650803
| false
| 3.389113
| false
| false
| false
|
johnmgregoire/NanoCalorimetry
|
accalinitcode_Sn1.py
|
1
|
2899
|
import numpy, h5py, pylab
from PnSC_h5io import *
from matplotlib.ticker import FuncFormatter
def myexpformat(x, pos):
for ndigs in range(5):
lab=(('%.'+'%d' %ndigs+'e') %x).replace('e+0','e').replace('e+','e').replace('e0','').replace('e-0','e-')
if eval(lab)==x:
return lab
return lab
ExpTickLabels=FuncFormatter(myexpformat)
p='C:/Users/JohnnyG/Documents/PythonCode/Vlassak/NanoCalorimetry/20110714_SnACcal.h5'
#f=h5py.File(p,mode='r')
seg=3
exp='Sn_1kHz'
skip=200
skipe=100
f, hpp=experimenthppaths(p, exp)
daqHz=f[hpp[0]].attrs['daqHz']
f.close()
hpp=['/Calorimetry/Sn_1kHz/measurement/HeatProgram/cell29_17.5dc17ac_269ms_1kHz_1_of_1', '/Calorimetry/Sn_1kHz/measurement/HeatProgram/cell7_17.5dc17ac_269ms_1kHz_1_of_1', '/Calorimetry/Sn_1kHz/measurement/HeatProgram/cell7_17.5dc17ac_269ms_1kHzagain_1_of_1']
labs=['1kHz, 10Ohm Res','slow ramp, scan1', 'slow ramp, scan2']
targetf=1.e3
#labs=[hp.rpartition('/')[2] for hp in hpp]
nplots=4
pylab.figure(figsize=(20, 8))
for i, (hp, title) in enumerate(zip(hpp, labs)):
hpsdl=CreateHeatProgSegDictList(p, exp, hp.rpartition('/')[2])
sampv=hpsdl[seg]['samplevoltage'][0][skip:-1*skipe]
diffv=hpsdl[seg]['samplehighpassacvoltage'][0][skip:-1*skipe]
t=hpsdl[seg]['cycletime'][0][skip:-1*skipe]
pylab.subplot(len(hpp), nplots, nplots*i+1)
sy=sampv
pylab.plot((t*1000.)[:4000], sy[:4000], 'g.', markersize=1)
pylab.gca().yaxis.set_major_formatter(ExpTickLabels)
#pylab.ylim(-620, 620)
pylab.title(title)
pylab.ylabel('sample channel V')
pylab.subplot(len(hpp), nplots, nplots*i+2)
y=diffv
pylab.plot((t*1000.)[:4000], y[:4000], 'r.', markersize=1)
pylab.gca().yaxis.set_major_formatter(ExpTickLabels)
#pylab.ylim(-620, 620)
pylab.title(title)
pylab.ylabel('filtered channel, V')
pylab.subplot(len(hpp), nplots, nplots*i+3)
fft=numpy.fft.fft(y)
freq=numpy.fft.fftfreq(len(y))*daqHz
pylab.loglog(freq[:len(freq)//2], numpy.abs(fft[:len(freq)//2]))
pylab.ylabel('filtered channel fft mag.')
pylab.subplot(len(hpp), nplots, nplots*i+4)
pylab.loglog(freq[:len(freq)//2], numpy.abs(fft[:len(freq)//2]))
pylab.xlim(.9*targetf, 4*targetf)
pylab.xticks([targetf, 2.*targetf, 3.*targetf])
pylab.ylabel('filtered channel fft mag.')
pylab.subplot(len(hpp), nplots, nplots*i+1)
pylab.xlabel('time (ms)')
pylab.subplot(len(hpp), nplots, nplots*i+2)
pylab.xlabel('time (ms)')
pylab.subplot(len(hpp), nplots, nplots*i+3)
pylab.xlabel('freq (Hz)')
pylab.subplot(len(hpp), nplots, nplots*i+4)
pylab.xlabel('freq (Hz)')
pylab.suptitle('response for 10mAdc+9mAac into 10$\Omega$')
pylab.subplots_adjust(left=.07, right=.97, wspace=.35, hspace=.25)
if True:
pylab.savefig(os.path.join('C:/Users/JohnnyG/Documents/HarvardWork/ACcal/20110714_Sn_analysis', '_'.join(('FFT', exp)))+'.png')
pylab.show()
|
bsd-3-clause
| -1,861,564,002,955,912,200
| 34.353659
| 259
| 0.673681
| false
| 2.540754
| false
| false
| false
|
nicko7i/vcnc
|
api-python/velstor/vclc/__main__.py
|
1
|
3489
|
#!python3.5
# For command aliases prior to 3.2 - https://bugs.python.org/issue25299
#
# https://pythonconquerstheuniverse.wordpress.com/2011/08/29/lambda_tutorial/
from __future__ import print_function
import sys
import re
import json
import requests
import errno
from velstor.restapi import Session
from functools import partial
from velstor.vclc.vclc_parser import vclc_parser
from velstor.vclc.handler import Handler
from velstor.vclc.handler import error_response
from velstor.vclc.vClcException import vClcException
print_error = partial(print, file=sys.stderr)
#
# Yeah, yeah, globals are bad form...
#
quiet = False
def main(args=None):
"""The main routine."""
if args is None:
args = sys.argv[1:]
with Session() as session:
handler = Handler(session)
parser = vclc_parser(handler)
#
try:
global quiet
results = parser.parse_args(args, handler)
quiet = results.quiet
return results.action()
except requests.exceptions.RequestException as e:
#
# Requests raised an exception. Probably couldn't reach the vCNC
# server There is no HTTP code for this error, so we adopt 504,
# which is similar.
#
# Yes, it would have been cooler to have done this with a single
# RE.
#
details = str(e)
match_host = re.search("host='(\S+)'", details)
match_port = re.search("port=(\d+)", details)
match_error = re.search('NewConnectionError', details)
suffix = '.'
#
# If the server happens to match the vCNC server's default value,
# then add the additional suggestion to check configuration.
#
if match_host and match_port and match_error:
host = match_host.group(1)
port = match_port.group(1)
if host == 'vcnc' and port == "6130":
suffix = ''.join([
' Did you mean to set a command line switch',
' or environment variable?'])
return error_response('Could not reach vCNC server at '
+ match_host.group(1)
+ ':'
+ match_port.group(1)
+ suffix,
http_status=504,
error_sym='EHOSTDOWN')
else:
#
# We don't really know what happened. Just dump the raw data
# as the message.
#
return error_response(details)
#
#
except vClcException:
#
# Calling 'vclc' with no arguments isn't trapped as an error by
# argparse.
#
m = parser.format_usage()
m = re.sub('\n[ ]+', ' ', m)
return error_response(m, http_status=400, error_sym='EINVAL')
except SystemExit:
raise
except KeyboardInterrupt:
sys.exit(errno.EINVAL)
except BaseException:
raise
if __name__ == "__main__":
(exit_code, response) = main()
if not quiet:
print(json.dumps(response, sort_keys=True, indent=2))
sys.exit(127 if (exit_code > 127) else exit_code)
|
apache-2.0
| -6,190,060,355,032,620,000
| 33.205882
| 78
| 0.524792
| false
| 4.302096
| false
| false
| false
|
FernanOrtega/DAT210x
|
Module6/assignment1.py
|
1
|
5367
|
import matplotlib as mpl
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import time
#
# INFO: Your Parameters.
# You can adjust them after completing the lab
C = 1
kernel = 'linear'
gamma = 'auto'
iterations = 5000 # TODO: Change to 200000 once you get to Question#2
#
# INFO: You can set this to false if you want to
# draw the full square matrix
FAST_DRAW = True
def drawPlots(model, X_train, X_test, y_train, y_test, wintitle='Figure 1'):
# INFO: A convenience function for you
# You can use this to break any higher-dimensional space down
# And view cross sections of it.
# If this line throws an error, use plt.style.use('ggplot') instead
mpl.style.use('ggplot') # Look Pretty
padding = 3
resolution = 0.5
max_2d_score = 0
y_colors = ['#ff0000', '#00ff00', '#0000ff']
my_cmap = mpl.colors.ListedColormap(['#ffaaaa', '#aaffaa', '#aaaaff'])
colors = [y_colors[i] for i in y_train]
num_columns = len(X_train.columns)
fig = plt.figure()
fig.canvas.set_window_title(wintitle)
cnt = 0
for col in range(num_columns):
for row in range(num_columns):
# Easy out
if FAST_DRAW and col > row:
cnt += 1
continue
ax = plt.subplot(num_columns, num_columns, cnt + 1)
plt.xticks(())
plt.yticks(())
# Intersection:
if col == row:
plt.text(0.5, 0.5, X_train.columns[row], verticalalignment='center', horizontalalignment='center', fontsize=12)
cnt += 1
continue
# Only select two features to display, then train the model
X_train_bag = X_train.ix[:, [row,col]]
X_test_bag = X_test.ix[:, [row,col]]
model.fit(X_train_bag, y_train)
# Create a mesh to plot in
x_min, x_max = X_train_bag.ix[:, 0].min() - padding, X_train_bag.ix[:, 0].max() + padding
y_min, y_max = X_train_bag.ix[:, 1].min() - padding, X_train_bag.ix[:, 1].max() + padding
xx, yy = np.meshgrid(np.arange(x_min, x_max, resolution),
np.arange(y_min, y_max, resolution))
# Plot Boundaries
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
# Prepare the contour
Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.contourf(xx, yy, Z, cmap=my_cmap, alpha=0.8)
plt.scatter(X_train_bag.ix[:, 0], X_train_bag.ix[:, 1], c=colors, alpha=0.5)
score = round(model.score(X_test_bag, y_test) * 100, 3)
plt.text(0.5, 0, "Score: {0}".format(score), transform = ax.transAxes, horizontalalignment='center', fontsize=8)
max_2d_score = score if score > max_2d_score else max_2d_score
cnt += 1
print "Max 2D Score: ", max_2d_score
fig.set_tight_layout(True)
def benchmark(model, X_train, X_test, y_train, y_test, wintitle='Figure 1'):
print '\n\n' + wintitle + ' Results'
s = time.time()
for i in range(iterations):
#
# TODO: train the classifier on the training data / labels:
#
model.fit(X_train, y_train)
print "{0} Iterations Training Time: ".format(iterations), time.time() - s
s = time.time()
for i in range(iterations):
#
# TODO: score the classifier on the testing data / labels:
#
# .. your code here ..
score = model.score(X_test, y_test)
print "{0} Iterations Scoring Time: ".format(iterations), time.time() - s
print "High-Dimensionality Score: ", round((score*100), 3)
#
# TODO: Load up the wheat dataset into dataframe 'X'
# Verify you did it properly.
# Indices shouldn't be doubled, nor weird headers...
#
X = pd.read_csv('Datasets/wheat.data', index_col=0)
# INFO: An easy way to show which rows have nans in them
# print X[pd.isnull(X).any(axis=1)]
#
# TODO: Go ahead and drop any row with a nan
#
X = X.dropna(axis=0)
# print X[pd.isnull(X).any(axis=1)]
#
# INFO: # In the future, you might try setting the nan values to the
# mean value of that column, the mean should only be calculated for
# the specific class rather than across all classes, now that you
# have the labels
#
# TODO: Copy the labels out of the dset into variable 'y' then Remove
# them from X. Encode the labels, using the .map() trick we showed
# you in Module 5 -- canadian:0, kama:1, and rosa:2
#
print X['wheat_type'].unique()
y = X['wheat_type'].map({'kama':0, 'canadian':1, 'rosa':2})
X = X.drop(labels=['wheat_type'], axis=1)
#
# TODO: Split your data into test / train sets
# Your test size can be 30% with random_state 7.
# Use variable names: X_train, X_test, y_train, y_test
#
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=7)
#
# TODO: Create an SVC classifier named svc
# Use a linear kernel, and set the C value to C
#
from sklearn.svm import SVC
svc = SVC(kernel=kernel, C=C, gamma=gamma)
#
# TODO: Create an KNeighbors classifier named knn
# Set the neighbor count to 5
#
from sklearn.neighbors import KNeighborsClassifier
knn = KNeighborsClassifier(n_neighbors=5)
benchmark(knn, X_train, X_test, y_train, y_test, 'KNeighbors')
drawPlots(knn, X_train, X_test, y_train, y_test, 'KNeighbors')
benchmark(svc, X_train, X_test, y_train, y_test, 'SVC')
drawPlots(svc, X_train, X_test, y_train, y_test, 'SVC')
plt.show()
#
# BONUS: After submitting your answers, toy around with
# gamma, kernel, and C.
|
mit
| -277,696,860,954,413,630
| 26.106061
| 119
| 0.646916
| false
| 3.035633
| true
| false
| false
|
juliancantillo/royal-films
|
royalfilms/cinemas/migrations/0003_auto_20160305_0344.py
|
1
|
1291
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-05 03:44
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
('cinemas', '0002_auto_20160303_0631'),
]
operations = [
migrations.CreateModel(
name='Show',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, unique=True)),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated at')),
('time', models.TimeField()),
],
options={
'abstract': False,
},
),
migrations.RemoveField(
model_name='function',
name='time',
),
migrations.AddField(
model_name='show',
name='function',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cinemas.Function'),
),
]
|
mit
| 6,041,118,326,002,203,000
| 32.102564
| 114
| 0.565453
| false
| 4.164516
| false
| false
| false
|
christophercrouzet/hienoi
|
demos/orbit.py
|
1
|
3478
|
#!/usr/bin/env python
"""Particles orbiting around the origin.
Features:
- user attributes: particles are initialized within a radius from the
origin and are, at each simulation step, updated to orbit around the origin.
- NumPy: operations are done directly on the particle data for increased
performances.
"""
import math
import os
import sys
import numpy
_HERE = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.abspath(os.path.join(_HERE, os.pardir)))
import hienoi.application
from hienoi import Vector2f, Vector4f
# Mass of the central object around which particles are orbiting.
_CENTRAL_MASS = 50.0
# Number of particles.
_COUNT = 1000
# Minimum radius of the disc used to distribute the particles.
_MIN_RADIUS = 2.0
# Maximum radius of the disc used to distribute the particles.
_MAX_RADIUS = 30.0
# Mass of each particle.
_MASS = 2.0
# Mass variance for each particle.
_MASS_VARIANCE = 1.0
# Size of a particle, relative to its mass.
_SIZE = 0.2
# Squared distance to the origin where particles are drawn in the 'far color'.
_FAR_SQUARED_DISTANCE = 500.0
# Color to use for far particles.
_FAR_COLOR = Vector4f(0.0, 1.0, 1.0, 1.0)
# Color to use for near particles.
_NEAR_COLOR = Vector4f(1.0, 0.0, 0.0, 1.0)
def initialize_particle_simulation(sim):
"""Callback to initialize the particle simulation state.
Parameters
----------
sim : hienoi.dynamics.ParticleSimulation
Particle simulation.
"""
numpy.random.seed(_COUNT + 611)
# Add a few particles at random positions within a given radius and with
# initial velocities suitable for elliptical orbiting.
particles = sim.add_particles(_COUNT)
data = particles.data
r = numpy.random.uniform(low=_MIN_RADIUS, high=_MAX_RADIUS, size=_COUNT)
t = numpy.random.uniform(high=2.0 * numpy.pi, size=_COUNT)
data['position'][:, 0] = r * numpy.cos(t)
data['position'][:, 1] = r * numpy.sin(t)
data['mass'] = numpy.random.uniform(low=_MASS - _MASS_VARIANCE,
high=_MASS + _MASS_VARIANCE,
size=_COUNT)
speeds = numpy.sqrt(data['mass'] / r)
data['velocity'][:, 0] = data['position'][:, 1] * speeds
data['velocity'][:, 1] = -data['position'][:, 0] * speeds
data['size'] = data['mass'] * _SIZE / _MASS
def update_particle_simulation(sim):
"""Callback to update the particle simulation state.
Parameters
----------
sim : hienoi.dynamics.ParticleSimulation
Particle simulation.
"""
data = sim.particles.data
squared_distances = numpy.sum(data['position'][numpy.newaxis, :] ** 2,
axis=-1)
squared_distances = squared_distances.reshape(-1, 1)
data['force'] -= (data['position']
* _CENTRAL_MASS
* data['mass'][:, numpy.newaxis]
/ squared_distances)
data['color'] = (_FAR_COLOR - _NEAR_COLOR)
data['color'] *= squared_distances / _FAR_SQUARED_DISTANCE
data['color'] += _NEAR_COLOR
def run():
"""Run the application."""
return hienoi.application.run(
gui = {
'window_title': 'orbit',
'show_grid': False,
},
particle_simulation={
'initialize_callback': initialize_particle_simulation,
'postsolve_callback': update_particle_simulation,
})
if __name__ == '__main__':
sys.exit(run())
|
mit
| -1,000,051,599,808,256,300
| 28.726496
| 78
| 0.625359
| false
| 3.567179
| false
| false
| false
|
Ratfink/asciiplayback
|
src/asciiplaybackgtk.py
|
1
|
2940
|
#!/usr/bin/env python
import os.path
import sys
from gi.repository import Gtk, Gio, Gdk, GObject
from asciiplayback import *
from asciimation import *
from gtkasciiplayer import *
class ASCIIPlaybackGtk(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title="ASCIIPlayback")
self.set_default_size(0, 0)
if len(sys.argv) > 1:
self.filename = sys.argv[1]
self.player = ASCIIPlayback(ASCIImation(filename=self.filename))
else:
self.filename = ""
blank_asciimation = ASCIImation(font_family='monospace', size=[19, 3])
blank_asciimation.frames.append(Frame(text='\n No file loaded! \n'))
self.player = ASCIIPlayback(asciimation=blank_asciimation)
self.hb = Gtk.HeaderBar()
self.update_headerbar()
button = Gtk.Button(image=Gtk.Image.new_from_gicon(Gio.ThemedIcon(
name="document-open-symbolic"),
Gtk.IconSize.BUTTON))
button.connect("clicked", self.do_open)
self.hb.pack_start(button)
box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.video = GtkASCIIPlayer(self.player)
box.pack_start(self.video, True, True, 0)
ab = Gtk.ActionBar()
self.controls = GtkASCIIControls(self.player)
ab.set_center_widget(self.controls)
box.pack_end(ab, False, False, 0)
self.add(box)
def do_open(self, button):
dialog = Gtk.FileChooserDialog("Open", self,
Gtk.FileChooserAction.OPEN,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OPEN, Gtk.ResponseType.OK))
self.add_filters(dialog)
response = dialog.run()
if response == Gtk.ResponseType.OK:
self.filename = dialog.get_filename()
self.player = ASCIIPlayback(ASCIImation(filename=self.filename))
self.video.player = self.player
self.controls.player = self.player
self.update_headerbar()
elif response == Gtk.ResponseType.CANCEL:
pass
dialog.destroy()
def add_filters(self, dialog):
filter_json = Gtk.FileFilter()
filter_json.set_name("JSON files")
filter_json.add_mime_type("application/json")
dialog.add_filter(filter_json)
filter_any = Gtk.FileFilter()
filter_any.set_name("All files")
filter_any.add_pattern("*")
dialog.add_filter(filter_any)
def update_headerbar(self):
self.hb.props.show_close_button = True
self.hb.props.title = "ASCIIPlayback"
self.hb.props.subtitle = os.path.basename(self.filename)
self.hb.props.has_subtitle = True
self.set_titlebar(self.hb)
def run():
win = ASCIIPlaybackGtk()
win.connect("delete-event", Gtk.main_quit)
win.show_all()
Gtk.main()
if __name__ == '__main__':
run()
|
mit
| -1,430,004,682,715,594,200
| 30.956522
| 82
| 0.608163
| false
| 3.611794
| false
| false
| false
|
iworm/taobao-image-splitter
|
taobao-image-splitter.py
|
1
|
7554
|
#!/usr/bin/python3
# coding=utf-8
"""
The MIT License (MIT)
Copyright (c) 2015 iworm roger@iworm.net
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import glob
import os
import random
import sys
from PIL import Image
class TaobaoImageSplitter(object):
root_path = ''
watermark_img = ''
def __init__(self, root_path):
self.root_path = root_path
watermark_path = os.path.join(root_path, 'watermark.png')
self.watermark_img = self._get_watermark_image(watermark_path)
def _get_watermark_image(self, watermark_path):
watermark_img = Image.open(watermark_path)
watermark_img = watermark_img.convert('RGBA')
return watermark_img
def calc_resized_size(self, original_size, width):
original_width = original_size[0]
original_height = original_size[1]
ratio = original_width / original_height
new_height = (int)(width / ratio)
return (width, new_height)
def calc_watermark_position(self, size):
width = int(size[0]*0.5)
height = int(size[1]*0.5)
return (width, height)
def resize_to_specified_width(self, imgs, min_width, watermark):
resized_imgs = []
for img in imgs:
new_size = self.calc_resized_size(img.size, min_width)
watermark_layer = Image.new('RGBA', new_size)
watermark_position = self.calc_watermark_position(new_size)
watermark_layer.paste(watermark, watermark_position)
resized = img.resize(new_size, Image.ANTIALIAS)
resized = Image.composite(watermark_layer, resized, watermark_layer)
resized_imgs = resized_imgs + [resized]
return resized_imgs
def get_all_images(self, path):
all_imgs = []
for filename in glob.glob(path):
all_imgs = all_imgs + [Image.open(filename)]
return all_imgs
def get_min_width(self, imgs):
min_width = 800
for img in imgs:
if(img.size[0] < min_width):
min_width = img.size[0]
return min_width
def get_total_height(self, imgs):
total_height = 0
for img in imgs:
total_height += img.size[1]
return total_height
def create_new_image_and_paste(self, size, imgs_to_paste):
final_img = Image.new('RGB', size)
current_height = 0
for img in imgs_to_paste:
final_img.paste(img, (0, current_height))
current_height += img.size[1]
return final_img
def get_random_height(self):
min_crop_height = 300
max_crop_height = 500
return random.randint(min_crop_height, max_crop_height)
def split_image(self, img):
cropped_height = 0
cropped_imgs = []
height = self.get_random_height()
while(cropped_height < img.size[1]):
if(img.size[1] - cropped_height - height < 300):
height = img.size[1] - cropped_height
cropped = img.crop(
(0, cropped_height, img.size[0], cropped_height + height))
cropped_imgs = cropped_imgs + [cropped]
cropped_height += height
height = self.get_random_height()
return cropped_imgs
def delete_all_files(self, path):
for filename in os.listdir(path):
current_file = os.path.join(path, filename)
if os.path.isfile(current_file):
os.remove(current_file)
def get_main_output_path(self, path):
output_path = os.path.join(path, 'main')
if os.path.exists(output_path):
self.delete_all_files(output_path)
else:
os.makedirs(output_path)
return output_path
def get_mobile_output_path(self, path):
output_path = os.path.join(path, 'mobile')
if os.path.exists(output_path):
self.delete_all_files(output_path)
else:
os.makedirs(output_path)
return output_path
def get_splitted_output_path(self, path):
output_path = os.path.join(path, 'splitted')
if os.path.exists(output_path):
self.delete_all_files(output_path)
else:
os.makedirs(output_path)
return output_path
def save(self, cropped_imgs, output_path):
index = 0
for img in cropped_imgs:
index += 1
output_file_name = os.path.join(output_path, str(index) + '.jpg')
img.save(output_file_name)
def process(self):
for directory in os.listdir(self.root_path):
current_path = os.path.join(self.root_path, directory)
if not os.path.isdir(current_path):
continue
src_img_path = os.path.join(current_path, 'src')
all_imgs = self.get_all_images(os.path.join(src_img_path, '*.jpg'))
min_width = self.get_min_width(all_imgs)
resized_imgs = self.resize_to_specified_width(
all_imgs, min_width, self.watermark_img)
self.save(resized_imgs, self.get_mobile_output_path(current_path))
total_height = self.get_total_height(resized_imgs)
final_img = self.create_new_image_and_paste(
(min_width, total_height), resized_imgs)
splitted_img = self.split_image(final_img)
self.save(splitted_img, self.get_splitted_output_path(current_path))
main_src_img_path = os.path.join(current_path, 'main_src')
all_main_imgs = self.get_all_images(os.path.join(main_src_img_path, '*.jpg'))
min_main_width = self.get_min_width(all_main_imgs)
resized_main_imgs = self.resize_to_specified_width(
all_main_imgs, min_main_width, self.watermark_img)
self.save(resized_main_imgs, self.get_main_output_path(current_path))
def print_usage(script_name):
print('''Usage:
*nix System:
python3 {0} '/Users/username/path/'
Windows System:
python3 {0} 'd:\\images\\'
'''.format(script_name))
if __name__ == '__main__':
if len(sys.argv) != 2:
print_usage(sys.argv[0])
quit()
if not os.path.isdir(sys.argv[1]):
print_usage(sys.argv[0])
quit()
a = TaobaoImageSplitter(sys.argv[1])
a.process()
|
mit
| -2,058,913,852,656,968,000
| 29.344398
| 89
| 0.595181
| false
| 3.642237
| false
| false
| false
|
nemesiscodex/JukyOS-sugar
|
extensions/cpsection/power/view.py
|
1
|
4563
|
# Copyright (C) 2008, OLPC
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import gtk
from gettext import gettext as _
from sugar.graphics import style
from jarabe.controlpanel.sectionview import SectionView
from jarabe.controlpanel.inlinealert import InlineAlert
class Power(SectionView):
def __init__(self, model, alerts):
SectionView.__init__(self)
self._model = model
self.restart_alerts = alerts
self._automatic_pm_valid = True
self._automatic_pm_change_handler = None
self.set_border_width(style.DEFAULT_SPACING * 2)
self.set_spacing(style.DEFAULT_SPACING)
group = gtk.SizeGroup(gtk.SIZE_GROUP_HORIZONTAL)
self._automatic_pm_alert_box = gtk.HBox(spacing=style.DEFAULT_SPACING)
separator_pm = gtk.HSeparator()
self.pack_start(separator_pm, expand=False)
separator_pm.show()
label_pm = gtk.Label(_('Power management'))
label_pm.set_alignment(0, 0)
self.pack_start(label_pm, expand=False)
label_pm.show()
box_pm = gtk.VBox()
box_pm.set_border_width(style.DEFAULT_SPACING * 2)
box_pm.set_spacing(style.DEFAULT_SPACING)
box_automatic_pm = gtk.HBox(spacing=style.DEFAULT_SPACING)
label_automatic_pm = gtk.Label(
_('Automatic power management (increases battery life)'))
label_automatic_pm.set_alignment(0, 0.5)
self._automatic_button = gtk.CheckButton()
self._automatic_button.set_alignment(0, 0)
box_automatic_pm.pack_start(self._automatic_button, expand=False)
box_automatic_pm.pack_start(label_automatic_pm, expand=False)
self._automatic_button.show()
label_automatic_pm.show()
group.add_widget(label_automatic_pm)
box_pm.pack_start(box_automatic_pm, expand=False)
box_automatic_pm.show()
self._automatic_pm_alert = InlineAlert()
label_automatic_pm_error = gtk.Label()
group.add_widget(label_automatic_pm_error)
self._automatic_pm_alert_box.pack_start(label_automatic_pm_error,
expand=False)
label_automatic_pm_error.show()
self._automatic_pm_alert_box.pack_start(self._automatic_pm_alert,
expand=False)
box_pm.pack_end(self._automatic_pm_alert_box, expand=False)
self._automatic_pm_alert_box.show()
if 'automatic_pm' in self.restart_alerts:
self._automatic_pm_alert.props.msg = self.restart_msg
self._automatic_pm_alert.show()
self.pack_start(box_pm, expand=False)
box_pm.show()
self.setup()
def setup(self):
try:
automatic_state = self._model.get_automatic_pm()
except Exception, detail:
self._automatic_pm_alert.props.msg = detail
self._automatic_pm_alert.show()
else:
self._automatic_button.set_active(automatic_state)
self._automatic_pm_valid = True
self.needs_restart = False
self._automatic_pm_change_handler = self._automatic_button.connect( \
'toggled', self.__automatic_pm_toggled_cb)
def undo(self):
self._automatic_button.disconnect(self._automatic_pm_change_handler)
self._model.undo()
self._automatic_pm_alert.hide()
def _validate(self):
if self._automatic_pm_valid:
self.props.is_valid = True
else:
self.props.is_valid = False
def __automatic_pm_toggled_cb(self, widget, data=None):
state = widget.get_active()
try:
self._model.set_automatic_pm(state)
except Exception, detail:
print detail
self._automatic_pm_alert.props.msg = detail
else:
self._automatic_pm_valid = True
self._validate()
return False
|
gpl-2.0
| 4,088,517,647,482,016,300
| 36.401639
| 78
| 0.636862
| false
| 3.834454
| false
| false
| false
|
ESOedX/edx-platform
|
lms/djangoapps/discussion/django_comment_client/base/tests.py
|
1
|
90626
|
# pylint: skip-file
# -*- coding: utf-8 -*-
"""Tests for django comment client views."""
from __future__ import absolute_import
import json
import logging
from contextlib import contextmanager
import ddt
import mock
import six
from django.contrib.auth.models import User
from django.core.management import call_command
from django.test.client import RequestFactory
from django.urls import reverse
from eventtracking.processors.exceptions import EventEmissionExit
from mock import ANY, Mock, patch
from opaque_keys.edx.keys import CourseKey
from six import text_type
from six.moves import range
from common.test.utils import MockSignalHandlerMixin, disable_signal
from course_modes.models import CourseMode
from course_modes.tests.factories import CourseModeFactory
from lms.djangoapps.discussion.django_comment_client.base import views
from lms.djangoapps.discussion.django_comment_client.tests.group_id import (
CohortedTopicGroupIdTestMixin,
GroupIdAssertionMixin,
NonCohortedTopicGroupIdTestMixin
)
from lms.djangoapps.discussion.django_comment_client.tests.unicode import UnicodeTestMixin
from lms.djangoapps.discussion.django_comment_client.tests.utils import CohortedTestCase, ForumsEnableMixin
from lms.djangoapps.teams.tests.factories import CourseTeamFactory, CourseTeamMembershipFactory
from openedx.core.djangoapps.course_groups.cohorts import set_course_cohorted
from openedx.core.djangoapps.course_groups.tests.helpers import CohortFactory
from openedx.core.djangoapps.django_comment_common.comment_client import Thread
from openedx.core.djangoapps.django_comment_common.models import (
FORUM_ROLE_STUDENT,
CourseDiscussionSettings,
Role,
assign_role
)
from openedx.core.djangoapps.django_comment_common.utils import (
ThreadContext,
seed_permissions_roles,
set_course_discussion_settings
)
from openedx.core.djangoapps.waffle_utils.testutils import WAFFLE_TABLES
from student.roles import CourseStaffRole, UserBasedRole
from student.tests.factories import CourseAccessRoleFactory, CourseEnrollmentFactory, UserFactory
from track.middleware import TrackMiddleware
from track.views import segmentio
from track.views.tests.base import SEGMENTIO_TEST_USER_ID, SegmentIOTrackingTestCaseBase
from util.testing import UrlResetMixin
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase, SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls
from .event_transformers import ForumThreadViewedEventTransformer
log = logging.getLogger(__name__)
CS_PREFIX = "http://localhost:4567/api/v1"
QUERY_COUNT_TABLE_BLACKLIST = WAFFLE_TABLES
# pylint: disable=missing-docstring
class MockRequestSetupMixin(object):
def _create_response_mock(self, data):
return Mock(
text=json.dumps(data),
json=Mock(return_value=data),
status_code=200
)
def _set_mock_request_data(self, mock_request, data):
mock_request.return_value = self._create_response_mock(data)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
class CreateThreadGroupIdTestCase(
MockRequestSetupMixin,
CohortedTestCase,
CohortedTopicGroupIdTestMixin,
NonCohortedTopicGroupIdTestMixin
):
cs_endpoint = "/threads"
def call_view(self, mock_request, commentable_id, user, group_id, pass_group_id=True):
self._set_mock_request_data(mock_request, {})
request_data = {"body": "body", "title": "title", "thread_type": "discussion"}
if pass_group_id:
request_data["group_id"] = group_id
request = RequestFactory().post("dummy_url", request_data)
request.user = user
request.view_name = "create_thread"
return views.create_thread(
request,
course_id=six.text_type(self.course.id),
commentable_id=commentable_id
)
def test_group_info_in_response(self, mock_request):
response = self.call_view(
mock_request,
"cohorted_topic",
self.student,
None
)
self._assert_json_response_contains_group_info(response)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
@disable_signal(views, 'thread_edited')
@disable_signal(views, 'thread_voted')
@disable_signal(views, 'thread_deleted')
class ThreadActionGroupIdTestCase(
MockRequestSetupMixin,
CohortedTestCase,
GroupIdAssertionMixin
):
def call_view(
self,
view_name,
mock_request,
user=None,
post_params=None,
view_args=None
):
self._set_mock_request_data(
mock_request,
{
"user_id": str(self.student.id),
"group_id": self.student_cohort.id,
"closed": False,
"type": "thread",
"commentable_id": "non_team_dummy_id"
}
)
request = RequestFactory().post("dummy_url", post_params or {})
request.user = user or self.student
request.view_name = view_name
return getattr(views, view_name)(
request,
course_id=six.text_type(self.course.id),
thread_id="dummy",
**(view_args or {})
)
def test_update(self, mock_request):
response = self.call_view(
"update_thread",
mock_request,
post_params={"body": "body", "title": "title"}
)
self._assert_json_response_contains_group_info(response)
def test_delete(self, mock_request):
response = self.call_view("delete_thread", mock_request)
self._assert_json_response_contains_group_info(response)
def test_vote(self, mock_request):
response = self.call_view(
"vote_for_thread",
mock_request,
view_args={"value": "up"}
)
self._assert_json_response_contains_group_info(response)
response = self.call_view("undo_vote_for_thread", mock_request)
self._assert_json_response_contains_group_info(response)
def test_flag(self, mock_request):
response = self.call_view("flag_abuse_for_thread", mock_request)
self._assert_json_response_contains_group_info(response)
response = self.call_view("un_flag_abuse_for_thread", mock_request)
self._assert_json_response_contains_group_info(response)
def test_pin(self, mock_request):
response = self.call_view(
"pin_thread",
mock_request,
user=self.moderator
)
self._assert_json_response_contains_group_info(response)
response = self.call_view(
"un_pin_thread",
mock_request,
user=self.moderator
)
self._assert_json_response_contains_group_info(response)
def test_openclose(self, mock_request):
response = self.call_view(
"openclose_thread",
mock_request,
user=self.moderator
)
self._assert_json_response_contains_group_info(
response,
lambda d: d['content']
)
class ViewsTestCaseMixin(object):
def set_up_course(self, module_count=0):
"""
Creates a course, optionally with module_count discussion modules, and
a user with appropriate permissions.
"""
# create a course
self.course = CourseFactory.create(
org='MITx', course='999',
discussion_topics={"Some Topic": {"id": "some_topic"}},
display_name='Robot Super Course',
)
self.course_id = self.course.id
# add some discussion modules
for i in range(module_count):
ItemFactory.create(
parent_location=self.course.location,
category='discussion',
discussion_id='id_module_{}'.format(i),
discussion_category=u'Category {}'.format(i),
discussion_target=u'Discussion {}'.format(i)
)
# seed the forums permissions and roles
call_command('seed_permissions_roles', six.text_type(self.course_id))
# Patch the comment client user save method so it does not try
# to create a new cc user when creating a django user
with patch('student.models.cc.User.save'):
uname = 'student'
email = 'student@edx.org'
self.password = 'test'
# Create the user and make them active so we can log them in.
self.student = User.objects.create_user(uname, email, self.password)
self.student.is_active = True
self.student.save()
# Add a discussion moderator
self.moderator = UserFactory.create(password=self.password)
# Enroll the student in the course
CourseEnrollmentFactory(user=self.student,
course_id=self.course_id)
# Enroll the moderator and give them the appropriate roles
CourseEnrollmentFactory(user=self.moderator, course_id=self.course.id)
self.moderator.roles.add(Role.objects.get(name="Moderator", course_id=self.course.id))
assert self.client.login(username='student', password=self.password)
def _setup_mock_request(self, mock_request, include_depth=False):
"""
Ensure that mock_request returns the data necessary to make views
function correctly
"""
data = {
"user_id": str(self.student.id),
"closed": False,
"commentable_id": "non_team_dummy_id"
}
if include_depth:
data["depth"] = 0
self._set_mock_request_data(mock_request, data)
def create_thread_helper(self, mock_request, extra_request_data=None, extra_response_data=None):
"""
Issues a request to create a thread and verifies the result.
"""
self._set_mock_request_data(mock_request, {
"thread_type": "discussion",
"title": "Hello",
"body": "this is a post",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": False,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [],
"type": "thread",
"group_id": None,
"pinned": False,
"endorsed": False,
"unread_comments_count": 0,
"read": False,
"comments_count": 0,
})
thread = {
"thread_type": "discussion",
"body": ["this is a post"],
"anonymous_to_peers": ["false"],
"auto_subscribe": ["false"],
"anonymous": ["false"],
"title": ["Hello"],
}
if extra_request_data:
thread.update(extra_request_data)
url = reverse('create_thread', kwargs={'commentable_id': 'i4x-MITx-999-course-Robot_Super_Course',
'course_id': six.text_type(self.course_id)})
response = self.client.post(url, data=thread)
assert mock_request.called
expected_data = {
'thread_type': 'discussion',
'body': u'this is a post',
'context': ThreadContext.COURSE,
'anonymous_to_peers': False, 'user_id': 1,
'title': u'Hello',
'commentable_id': u'i4x-MITx-999-course-Robot_Super_Course',
'anonymous': False,
'course_id': six.text_type(self.course_id),
}
if extra_response_data:
expected_data.update(extra_response_data)
mock_request.assert_called_with(
'post',
'{prefix}/i4x-MITx-999-course-Robot_Super_Course/threads'.format(prefix=CS_PREFIX),
data=expected_data,
params={'request_id': ANY},
headers=ANY,
timeout=5
)
assert response.status_code == 200
def update_thread_helper(self, mock_request):
"""
Issues a request to update a thread and verifies the result.
"""
self._setup_mock_request(mock_request)
# Mock out saving in order to test that content is correctly
# updated. Otherwise, the call to thread.save() receives the
# same mocked request data that the original call to retrieve
# the thread did, overwriting any changes.
with patch.object(Thread, 'save'):
response = self.client.post(
reverse("update_thread", kwargs={
"thread_id": "dummy",
"course_id": six.text_type(self.course_id)
}),
data={"body": "foo", "title": "foo", "commentable_id": "some_topic"}
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data['body'], 'foo')
self.assertEqual(data['title'], 'foo')
self.assertEqual(data['commentable_id'], 'some_topic')
@ddt.ddt
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
@disable_signal(views, 'thread_created')
@disable_signal(views, 'thread_edited')
class ViewsQueryCountTestCase(
ForumsEnableMixin,
UrlResetMixin,
ModuleStoreTestCase,
MockRequestSetupMixin,
ViewsTestCaseMixin
):
CREATE_USER = False
ENABLED_CACHES = ['default', 'mongo_metadata_inheritance', 'loc_cache']
ENABLED_SIGNALS = ['course_published']
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(ViewsQueryCountTestCase, self).setUp()
def count_queries(func): # pylint: disable=no-self-argument
"""
Decorates test methods to count mongo and SQL calls for a
particular modulestore.
"""
def inner(self, default_store, module_count, mongo_calls, sql_queries, *args, **kwargs):
with modulestore().default_store(default_store):
self.set_up_course(module_count=module_count)
self.clear_caches()
with self.assertNumQueries(sql_queries, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(mongo_calls):
func(self, *args, **kwargs)
return inner
@ddt.data(
(ModuleStoreEnum.Type.mongo, 3, 4, 41),
(ModuleStoreEnum.Type.split, 3, 13, 41),
)
@ddt.unpack
@count_queries
def test_create_thread(self, mock_request):
self.create_thread_helper(mock_request)
@ddt.data(
(ModuleStoreEnum.Type.mongo, 3, 3, 37),
(ModuleStoreEnum.Type.split, 3, 10, 37),
)
@ddt.unpack
@count_queries
def test_update_thread(self, mock_request):
self.update_thread_helper(mock_request)
@ddt.ddt
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
class ViewsTestCase(
ForumsEnableMixin,
UrlResetMixin,
SharedModuleStoreTestCase,
MockRequestSetupMixin,
ViewsTestCaseMixin,
MockSignalHandlerMixin
):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(ViewsTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create(
org='MITx', course='999',
discussion_topics={"Some Topic": {"id": "some_topic"}},
display_name='Robot Super Course',
)
@classmethod
def setUpTestData(cls):
super(ViewsTestCase, cls).setUpTestData()
cls.course_id = cls.course.id
# seed the forums permissions and roles
call_command('seed_permissions_roles', six.text_type(cls.course_id))
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
# Patching the ENABLE_DISCUSSION_SERVICE value affects the contents of urls.py,
# so we need to call super.setUp() which reloads urls.py (because
# of the UrlResetMixin)
super(ViewsTestCase, self).setUp()
# Patch the comment client user save method so it does not try
# to create a new cc user when creating a django user
with patch('student.models.cc.User.save'):
uname = 'student'
email = 'student@edx.org'
self.password = 'test'
# Create the user and make them active so we can log them in.
self.student = User.objects.create_user(uname, email, self.password)
self.student.is_active = True
self.student.save()
# Add a discussion moderator
self.moderator = UserFactory.create(password=self.password)
# Enroll the student in the course
CourseEnrollmentFactory(user=self.student,
course_id=self.course_id)
# Enroll the moderator and give them the appropriate roles
CourseEnrollmentFactory(user=self.moderator, course_id=self.course.id)
self.moderator.roles.add(Role.objects.get(name="Moderator", course_id=self.course.id))
assert self.client.login(username='student', password=self.password)
@contextmanager
def assert_discussion_signals(self, signal, user=None):
if user is None:
user = self.student
with self.assert_signal_sent(views, signal, sender=None, user=user, exclude_args=('post',)):
yield
def test_create_thread(self, mock_request):
with self.assert_discussion_signals('thread_created'):
self.create_thread_helper(mock_request)
def test_create_thread_standalone(self, mock_request):
team = CourseTeamFactory.create(
name="A Team",
course_id=self.course_id,
topic_id='topic_id',
discussion_topic_id="i4x-MITx-999-course-Robot_Super_Course"
)
# Add the student to the team so they can post to the commentable.
team.add_user(self.student)
# create_thread_helper verifies that extra data are passed through to the comments service
self.create_thread_helper(mock_request, extra_response_data={'context': ThreadContext.STANDALONE})
@ddt.data(
('follow_thread', 'thread_followed'),
('unfollow_thread', 'thread_unfollowed'),
)
@ddt.unpack
def test_follow_unfollow_thread_signals(self, view_name, signal, mock_request):
self.create_thread_helper(mock_request)
with self.assert_discussion_signals(signal):
response = self.client.post(
reverse(
view_name,
kwargs={"course_id": six.text_type(self.course_id), "thread_id": 'i4x-MITx-999-course-Robot_Super_Course'}
)
)
self.assertEqual(response.status_code, 200)
def test_delete_thread(self, mock_request):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
test_thread_id = "test_thread_id"
request = RequestFactory().post("dummy_url", {"id": test_thread_id})
request.user = self.student
request.view_name = "delete_thread"
with self.assert_discussion_signals('thread_deleted'):
response = views.delete_thread(
request,
course_id=six.text_type(self.course.id),
thread_id=test_thread_id
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
def test_delete_comment(self, mock_request):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
test_comment_id = "test_comment_id"
request = RequestFactory().post("dummy_url", {"id": test_comment_id})
request.user = self.student
request.view_name = "delete_comment"
with self.assert_discussion_signals('comment_deleted'):
response = views.delete_comment(
request,
course_id=six.text_type(self.course.id),
comment_id=test_comment_id
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
args = mock_request.call_args[0]
self.assertEqual(args[0], "delete")
self.assertTrue(args[1].endswith("/{}".format(test_comment_id)))
def _test_request_error(self, view_name, view_kwargs, data, mock_request):
"""
Submit a request against the given view with the given data and ensure
that the result is a 400 error and that no data was posted using
mock_request
"""
self._setup_mock_request(mock_request, include_depth=(view_name == "create_sub_comment"))
response = self.client.post(reverse(view_name, kwargs=view_kwargs), data=data)
self.assertEqual(response.status_code, 400)
for call in mock_request.call_args_list:
self.assertEqual(call[0][0].lower(), "get")
def test_create_thread_no_title(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": "foo"},
mock_request
)
def test_create_thread_empty_title(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": "foo", "title": " "},
mock_request
)
def test_create_thread_no_body(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": six.text_type(self.course_id)},
{"title": "foo"},
mock_request
)
def test_create_thread_empty_body(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": " ", "title": "foo"},
mock_request
)
def test_update_thread_no_title(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": "foo"},
mock_request
)
def test_update_thread_empty_title(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": "foo", "title": " "},
mock_request
)
def test_update_thread_no_body(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{"title": "foo"},
mock_request
)
def test_update_thread_empty_body(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": " ", "title": "foo"},
mock_request
)
def test_update_thread_course_topic(self, mock_request):
with self.assert_discussion_signals('thread_edited'):
self.update_thread_helper(mock_request)
@patch(
'lms.djangoapps.discussion.django_comment_client.utils.get_discussion_categories_ids',
return_value=["test_commentable"],
)
def test_update_thread_wrong_commentable_id(self, mock_get_discussion_id_map, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": "foo", "title": "foo", "commentable_id": "wrong_commentable"},
mock_request
)
def test_create_comment(self, mock_request):
self._setup_mock_request(mock_request)
with self.assert_discussion_signals('comment_created'):
response = self.client.post(
reverse(
"create_comment",
kwargs={"course_id": six.text_type(self.course_id), "thread_id": "dummy"}
),
data={"body": "body"}
)
self.assertEqual(response.status_code, 200)
def test_create_comment_no_body(self, mock_request):
self._test_request_error(
"create_comment",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{},
mock_request
)
def test_create_comment_empty_body(self, mock_request):
self._test_request_error(
"create_comment",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": " "},
mock_request
)
def test_create_sub_comment_no_body(self, mock_request):
self._test_request_error(
"create_sub_comment",
{"comment_id": "dummy", "course_id": six.text_type(self.course_id)},
{},
mock_request
)
def test_create_sub_comment_empty_body(self, mock_request):
self._test_request_error(
"create_sub_comment",
{"comment_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": " "},
mock_request
)
def test_update_comment_no_body(self, mock_request):
self._test_request_error(
"update_comment",
{"comment_id": "dummy", "course_id": six.text_type(self.course_id)},
{},
mock_request
)
def test_update_comment_empty_body(self, mock_request):
self._test_request_error(
"update_comment",
{"comment_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": " "},
mock_request
)
def test_update_comment_basic(self, mock_request):
self._setup_mock_request(mock_request)
comment_id = "test_comment_id"
updated_body = "updated body"
with self.assert_discussion_signals('comment_edited'):
response = self.client.post(
reverse(
"update_comment",
kwargs={"course_id": six.text_type(self.course_id), "comment_id": comment_id}
),
data={"body": updated_body}
)
self.assertEqual(response.status_code, 200)
mock_request.assert_called_with(
"put",
"{prefix}/comments/{comment_id}".format(prefix=CS_PREFIX, comment_id=comment_id),
headers=ANY,
params=ANY,
timeout=ANY,
data={"body": updated_body}
)
def test_flag_thread_open(self, mock_request):
self.flag_thread(mock_request, False)
def test_flag_thread_close(self, mock_request):
self.flag_thread(mock_request, True)
def flag_thread(self, mock_request, is_closed):
self._set_mock_request_data(mock_request, {
"title": "Hello",
"body": "this is a post",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1", "username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [1],
"type": "thread",
"group_id": None,
"pinned": False,
"endorsed": False,
"unread_comments_count": 0,
"read": False,
"comments_count": 0,
})
url = reverse('flag_abuse_for_thread', kwargs={
'thread_id': '518d4237b023791dca00000d',
'course_id': six.text_type(self.course_id)
})
response = self.client.post(url)
assert mock_request.called
call_list = [
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY, 'with_responses': False},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/threads/518d4237b023791dca00000d/abuse_flag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY, 'with_responses': False},
'headers': ANY,
'timeout': 5
}
)
]
assert mock_request.call_args_list == call_list
assert response.status_code == 200
def test_un_flag_thread_open(self, mock_request):
self.un_flag_thread(mock_request, False)
def test_un_flag_thread_close(self, mock_request):
self.un_flag_thread(mock_request, True)
def un_flag_thread(self, mock_request, is_closed):
self._set_mock_request_data(mock_request, {
"title": "Hello",
"body": "this is a post",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [],
"type": "thread",
"group_id": None,
"pinned": False,
"endorsed": False,
"unread_comments_count": 0,
"read": False,
"comments_count": 0
})
url = reverse('un_flag_abuse_for_thread', kwargs={
'thread_id': '518d4237b023791dca00000d',
'course_id': six.text_type(self.course_id)
})
response = self.client.post(url)
assert mock_request.called
call_list = [
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY, 'with_responses': False},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/threads/518d4237b023791dca00000d/abuse_unflag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY, 'with_responses': False},
'headers': ANY,
'timeout': 5
}
)
]
assert mock_request.call_args_list == call_list
assert response.status_code == 200
def test_flag_comment_open(self, mock_request):
self.flag_comment(mock_request, False)
def test_flag_comment_close(self, mock_request):
self.flag_comment(mock_request, True)
def flag_comment(self, mock_request, is_closed):
self._set_mock_request_data(mock_request, {
"body": "this is a comment",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [1],
"type": "comment",
"endorsed": False
})
url = reverse('flag_abuse_for_comment', kwargs={
'comment_id': '518d4237b023791dca00000d',
'course_id': six.text_type(self.course_id)
})
response = self.client.post(url)
assert mock_request.called
call_list = [
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/comments/518d4237b023791dca00000d/abuse_flag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
)
]
assert mock_request.call_args_list == call_list
assert response.status_code == 200
def test_un_flag_comment_open(self, mock_request):
self.un_flag_comment(mock_request, False)
def test_un_flag_comment_close(self, mock_request):
self.un_flag_comment(mock_request, True)
def un_flag_comment(self, mock_request, is_closed):
self._set_mock_request_data(mock_request, {
"body": "this is a comment",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [],
"type": "comment",
"endorsed": False
})
url = reverse('un_flag_abuse_for_comment', kwargs={
'comment_id': '518d4237b023791dca00000d',
'course_id': six.text_type(self.course_id)
})
response = self.client.post(url)
assert mock_request.called
call_list = [
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/comments/518d4237b023791dca00000d/abuse_unflag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
)
]
assert mock_request.call_args_list == call_list
assert response.status_code == 200
@ddt.data(
('upvote_thread', 'thread_id', 'thread_voted'),
('upvote_comment', 'comment_id', 'comment_voted'),
('downvote_thread', 'thread_id', 'thread_voted'),
('downvote_comment', 'comment_id', 'comment_voted')
)
@ddt.unpack
def test_voting(self, view_name, item_id, signal, mock_request):
self._setup_mock_request(mock_request)
with self.assert_discussion_signals(signal):
response = self.client.post(
reverse(
view_name,
kwargs={item_id: 'dummy', 'course_id': six.text_type(self.course_id)}
)
)
self.assertEqual(response.status_code, 200)
def test_endorse_comment(self, mock_request):
self._setup_mock_request(mock_request)
self.client.login(username=self.moderator.username, password=self.password)
with self.assert_discussion_signals('comment_endorsed', user=self.moderator):
response = self.client.post(
reverse(
'endorse_comment',
kwargs={'comment_id': 'dummy', 'course_id': six.text_type(self.course_id)}
)
)
self.assertEqual(response.status_code, 200)
@patch("openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request", autospec=True)
@disable_signal(views, 'comment_endorsed')
class ViewPermissionsTestCase(ForumsEnableMixin, UrlResetMixin, SharedModuleStoreTestCase, MockRequestSetupMixin):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(ViewPermissionsTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(ViewPermissionsTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.password = "test password"
cls.student = UserFactory.create(password=cls.password)
cls.moderator = UserFactory.create(password=cls.password)
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
CourseEnrollmentFactory(user=cls.moderator, course_id=cls.course.id)
cls.moderator.roles.add(Role.objects.get(name="Moderator", course_id=cls.course.id))
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(ViewPermissionsTestCase, self).setUp()
def test_pin_thread_as_student(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("pin_thread", kwargs={"course_id": six.text_type(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 401)
def test_pin_thread_as_moderator(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.moderator.username, password=self.password)
response = self.client.post(
reverse("pin_thread", kwargs={"course_id": six.text_type(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
def test_un_pin_thread_as_student(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("un_pin_thread", kwargs={"course_id": six.text_type(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 401)
def test_un_pin_thread_as_moderator(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.moderator.username, password=self.password)
response = self.client.post(
reverse("un_pin_thread", kwargs={"course_id": six.text_type(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
def _set_mock_request_thread_and_comment(self, mock_request, thread_data, comment_data):
def handle_request(*args, **kwargs):
url = args[1]
if "/threads/" in url:
return self._create_response_mock(thread_data)
elif "/comments/" in url:
return self._create_response_mock(comment_data)
else:
raise ArgumentError("Bad url to mock request")
mock_request.side_effect = handle_request
def test_endorse_response_as_staff(self, mock_request):
self._set_mock_request_thread_and_comment(
mock_request,
{"type": "thread", "thread_type": "question", "user_id": str(self.student.id)},
{"type": "comment", "thread_id": "dummy"}
)
self.client.login(username=self.moderator.username, password=self.password)
response = self.client.post(
reverse("endorse_comment", kwargs={"course_id": six.text_type(self.course.id), "comment_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
def test_endorse_response_as_student(self, mock_request):
self._set_mock_request_thread_and_comment(
mock_request,
{"type": "thread", "thread_type": "question", "user_id": str(self.moderator.id)},
{"type": "comment", "thread_id": "dummy"}
)
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("endorse_comment", kwargs={"course_id": six.text_type(self.course.id), "comment_id": "dummy"})
)
self.assertEqual(response.status_code, 401)
def test_endorse_response_as_student_question_author(self, mock_request):
self._set_mock_request_thread_and_comment(
mock_request,
{"type": "thread", "thread_type": "question", "user_id": str(self.student.id)},
{"type": "comment", "thread_id": "dummy"}
)
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("endorse_comment", kwargs={"course_id": six.text_type(self.course.id), "comment_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
class CreateThreadUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(CreateThreadUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(CreateThreadUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request,):
"""
Test to make sure unicode data in a thread doesn't break it.
"""
self._set_mock_request_data(mock_request, {})
request = RequestFactory().post("dummy_url", {"thread_type": "discussion", "body": text, "title": text})
request.user = self.student
request.view_name = "create_thread"
response = views.create_thread(
# The commentable ID contains a username, the Unicode char below ensures it works fine
request, course_id=six.text_type(self.course.id), commentable_id=u"non_tåem_dummy_id"
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
self.assertEqual(mock_request.call_args[1]["data"]["title"], text)
@disable_signal(views, 'thread_edited')
class UpdateThreadUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin
):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(UpdateThreadUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(UpdateThreadUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch(
'lms.djangoapps.discussion.django_comment_client.utils.get_discussion_categories_ids',
return_value=["test_commentable"],
)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request, mock_get_discussion_id_map):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
request = RequestFactory().post("dummy_url", {"body": text, "title": text, "thread_type": "question", "commentable_id": "test_commentable"})
request.user = self.student
request.view_name = "update_thread"
response = views.update_thread(request, course_id=six.text_type(self.course.id), thread_id="dummy_thread_id")
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
self.assertEqual(mock_request.call_args[1]["data"]["title"], text)
self.assertEqual(mock_request.call_args[1]["data"]["thread_type"], "question")
self.assertEqual(mock_request.call_args[1]["data"]["commentable_id"], "test_commentable")
@disable_signal(views, 'comment_created')
class CreateCommentUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin
):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(CreateCommentUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(CreateCommentUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request):
commentable_id = "non_team_dummy_id"
self._set_mock_request_data(mock_request, {
"closed": False,
"commentable_id": commentable_id
})
# We have to get clever here due to Thread's setters and getters.
# Patch won't work with it.
try:
Thread.commentable_id = commentable_id
request = RequestFactory().post("dummy_url", {"body": text})
request.user = self.student
request.view_name = "create_comment"
response = views.create_comment(
request, course_id=six.text_type(self.course.id), thread_id="dummy_thread_id"
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
finally:
del Thread.commentable_id
@disable_signal(views, 'comment_edited')
class UpdateCommentUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin
):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(UpdateCommentUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(UpdateCommentUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
request = RequestFactory().post("dummy_url", {"body": text})
request.user = self.student
request.view_name = "update_comment"
response = views.update_comment(request, course_id=six.text_type(self.course.id), comment_id="dummy_comment_id")
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
@disable_signal(views, 'comment_created')
class CreateSubCommentUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin
):
"""
Make sure comments under a response can handle unicode.
"""
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(CreateSubCommentUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(CreateSubCommentUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request):
"""
Create a comment with unicode in it.
"""
self._set_mock_request_data(mock_request, {
"closed": False,
"depth": 1,
"thread_id": "test_thread",
"commentable_id": "non_team_dummy_id"
})
request = RequestFactory().post("dummy_url", {"body": text})
request.user = self.student
request.view_name = "create_sub_comment"
Thread.commentable_id = "test_commentable"
try:
response = views.create_sub_comment(
request, course_id=six.text_type(self.course.id), comment_id="dummy_comment_id"
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
finally:
del Thread.commentable_id
@ddt.ddt
@patch("openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request", autospec=True)
@disable_signal(views, 'thread_voted')
@disable_signal(views, 'thread_edited')
@disable_signal(views, 'comment_created')
@disable_signal(views, 'comment_voted')
@disable_signal(views, 'comment_deleted')
class TeamsPermissionsTestCase(ForumsEnableMixin, UrlResetMixin, SharedModuleStoreTestCase, MockRequestSetupMixin):
# Most of the test points use the same ddt data.
# args: user, commentable_id, status_code
ddt_permissions_args = [
# Student in team can do operations on threads/comments within the team commentable.
('student_in_team', 'team_commentable_id', 200),
# Non-team commentables can be edited by any student.
('student_in_team', 'course_commentable_id', 200),
# Student not in team cannot do operations within the team commentable.
('student_not_in_team', 'team_commentable_id', 401),
# Non-team commentables can be edited by any student.
('student_not_in_team', 'course_commentable_id', 200),
# Moderators can always operator on threads within a team, regardless of team membership.
('moderator', 'team_commentable_id', 200),
# Group moderators have regular student privileges for creating a thread and commenting
('group_moderator', 'course_commentable_id', 200)
]
def change_divided_discussion_settings(self, scheme):
"""
Change divided discussion settings for the current course.
If dividing by cohorts, create and assign users to a cohort.
"""
enable_cohorts = True if scheme is CourseDiscussionSettings.COHORT else False
set_course_discussion_settings(
self.course.id,
enable_cohorts=enable_cohorts,
divided_discussions=[],
always_divide_inline_discussions=True,
division_scheme=scheme,
)
set_course_cohorted(self.course.id, enable_cohorts)
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(TeamsPermissionsTestCase, cls).setUpClassAndTestData():
teams_configuration = {
'topics': [{'id': "topic_id", 'name': 'Solar Power', 'description': 'Solar power is hot'}]
}
cls.course = CourseFactory.create(teams_configuration=teams_configuration)
@classmethod
def setUpTestData(cls):
super(TeamsPermissionsTestCase, cls).setUpTestData()
cls.course = CourseFactory.create()
cls.password = "test password"
seed_permissions_roles(cls.course.id)
# Create enrollment tracks
CourseModeFactory.create(
course_id=cls.course.id,
mode_slug=CourseMode.VERIFIED
)
CourseModeFactory.create(
course_id=cls.course.id,
mode_slug=CourseMode.AUDIT
)
# Create 6 users--
# student in team (in the team, audit)
# student not in team (not in the team, audit)
# cohorted (in the cohort, audit)
# verified (not in the cohort, verified)
# moderator (in the cohort, audit, moderator permissions)
# group moderator (in the cohort, verified, group moderator permissions)
def create_users_and_enroll(coursemode):
student = UserFactory.create(password=cls.password)
CourseEnrollmentFactory(
course_id=cls.course.id,
user=student,
mode=coursemode
)
return student
cls.student_in_team, cls.student_not_in_team, cls.moderator, cls.cohorted = (
[create_users_and_enroll(CourseMode.AUDIT) for _ in range(4)])
cls.verified, cls.group_moderator = [create_users_and_enroll(CourseMode.VERIFIED) for _ in range(2)]
# Give moderator and group moderator permissions
cls.moderator.roles.add(Role.objects.get(name="Moderator", course_id=cls.course.id))
assign_role(cls.course.id, cls.group_moderator, 'Group Moderator')
# Create a team
cls.team_commentable_id = "team_discussion_id"
cls.team = CourseTeamFactory.create(
name=u'The Only Team',
course_id=cls.course.id,
topic_id='topic_id',
discussion_topic_id=cls.team_commentable_id
)
CourseTeamMembershipFactory.create(team=cls.team, user=cls.student_in_team)
# Dummy commentable ID not linked to a team
cls.course_commentable_id = "course_level_commentable"
# Create cohort and add students to it
CohortFactory(
course_id=cls.course.id,
name='Test Cohort',
users=[cls.group_moderator, cls.cohorted]
)
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(TeamsPermissionsTestCase, self).setUp()
def _setup_mock(self, user, mock_request, data):
user = getattr(self, user)
self._set_mock_request_data(mock_request, data)
self.client.login(username=user.username, password=self.password)
@ddt.data(
# student_in_team will be able to update his own post, regardless of team membership
('student_in_team', 'student_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
('student_in_team', 'student_in_team', 'course_commentable_id', 200, CourseDiscussionSettings.NONE),
# students can only update their own posts
('student_in_team', 'moderator', 'team_commentable_id', 401, CourseDiscussionSettings.NONE),
# Even though student_not_in_team is not in the team, he can still modify posts he created while in the team.
('student_not_in_team', 'student_not_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
# Moderators can change their own posts and other people's posts.
('moderator', 'moderator', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
('moderator', 'student_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
# Group moderator can do operations on commentables within their group if the course is divided
('group_moderator', 'verified', 'course_commentable_id', 200, CourseDiscussionSettings.ENROLLMENT_TRACK),
('group_moderator', 'cohorted', 'course_commentable_id', 200, CourseDiscussionSettings.COHORT),
# Group moderators cannot do operations on commentables outside of their group
('group_moderator', 'verified', 'course_commentable_id', 401, CourseDiscussionSettings.COHORT),
('group_moderator', 'cohorted', 'course_commentable_id', 401, CourseDiscussionSettings.ENROLLMENT_TRACK),
# Group moderators cannot do operations when the course is not divided
('group_moderator', 'verified', 'course_commentable_id', 401, CourseDiscussionSettings.NONE),
('group_moderator', 'cohorted', 'course_commentable_id', 401, CourseDiscussionSettings.NONE)
)
@ddt.unpack
def test_update_thread(self, user, thread_author, commentable_id, status_code, division_scheme, mock_request):
"""
Verify that update_thread is limited to thread authors and privileged users (team membership does not matter).
"""
self.change_divided_discussion_settings(division_scheme)
commentable_id = getattr(self, commentable_id)
# thread_author is who is marked as the author of the thread being updated.
thread_author = getattr(self, thread_author)
self._setup_mock(
user, mock_request, # user is the person making the request.
{
"user_id": str(thread_author.id),
"closed": False, "commentable_id": commentable_id,
"context": "standalone",
"username": thread_author.username,
"course_id": six.text_type(self.course.id)
}
)
response = self.client.post(
reverse(
"update_thread",
kwargs={
"course_id": six.text_type(self.course.id),
"thread_id": "dummy"
}
),
data={"body": "foo", "title": "foo", "commentable_id": commentable_id}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(
# Students can delete their own posts
('student_in_team', 'student_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
# Moderators can delete any post
('moderator', 'student_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
# Others cannot delete posts
('student_in_team', 'moderator', 'team_commentable_id', 401, CourseDiscussionSettings.NONE),
('student_not_in_team', 'student_in_team', 'team_commentable_id', 401, CourseDiscussionSettings.NONE),
# Group moderator can do operations on commentables within their group if the course is divided
('group_moderator', 'verified', 'team_commentable_id', 200, CourseDiscussionSettings.ENROLLMENT_TRACK),
('group_moderator', 'cohorted', 'team_commentable_id', 200, CourseDiscussionSettings.COHORT),
# Group moderators cannot do operations on commentables outside of their group
('group_moderator', 'verified', 'team_commentable_id', 401, CourseDiscussionSettings.COHORT),
('group_moderator', 'cohorted', 'team_commentable_id', 401, CourseDiscussionSettings.ENROLLMENT_TRACK),
# Group moderators cannot do operations when the course is not divided
('group_moderator', 'verified', 'team_commentable_id', 401, CourseDiscussionSettings.NONE),
('group_moderator', 'cohorted', 'team_commentable_id', 401, CourseDiscussionSettings.NONE)
)
@ddt.unpack
def test_delete_comment(self, user, comment_author, commentable_id, status_code, division_scheme, mock_request):
commentable_id = getattr(self, commentable_id)
comment_author = getattr(self, comment_author)
self.change_divided_discussion_settings(division_scheme)
self._setup_mock(user, mock_request, {
"closed": False,
"commentable_id": commentable_id,
"user_id": str(comment_author.id),
"username": comment_author.username,
"course_id": six.text_type(self.course.id)
})
response = self.client.post(
reverse(
"delete_comment",
kwargs={
"course_id": six.text_type(self.course.id),
"comment_id": "dummy"
}
),
data={"body": "foo", "title": "foo"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_create_comment(self, user, commentable_id, status_code, mock_request):
"""
Verify that create_comment is limited to members of the team or users with 'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(user, mock_request, {"closed": False, "commentable_id": commentable_id})
response = self.client.post(
reverse(
"create_comment",
kwargs={
"course_id": six.text_type(self.course.id),
"thread_id": "dummy"
}
),
data={"body": "foo", "title": "foo"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_create_sub_comment(self, user, commentable_id, status_code, mock_request):
"""
Verify that create_subcomment is limited to members of the team or users with 'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(
user, mock_request,
{"closed": False, "commentable_id": commentable_id, "thread_id": "dummy_thread"},
)
response = self.client.post(
reverse(
"create_sub_comment",
kwargs={
"course_id": six.text_type(self.course.id),
"comment_id": "dummy_comment"
}
),
data={"body": "foo", "title": "foo"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_comment_actions(self, user, commentable_id, status_code, mock_request):
"""
Verify that voting and flagging of comments is limited to members of the team or users with
'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(
user, mock_request,
{"closed": False, "commentable_id": commentable_id, "thread_id": "dummy_thread"},
)
for action in ["upvote_comment", "downvote_comment", "un_flag_abuse_for_comment", "flag_abuse_for_comment"]:
response = self.client.post(
reverse(
action,
kwargs={"course_id": six.text_type(self.course.id), "comment_id": "dummy_comment"}
)
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_threads_actions(self, user, commentable_id, status_code, mock_request):
"""
Verify that voting, flagging, and following of threads is limited to members of the team or users with
'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(
user, mock_request,
{"closed": False, "commentable_id": commentable_id},
)
for action in ["upvote_thread", "downvote_thread", "un_flag_abuse_for_thread", "flag_abuse_for_thread",
"follow_thread", "unfollow_thread"]:
response = self.client.post(
reverse(
action,
kwargs={"course_id": six.text_type(self.course.id), "thread_id": "dummy_thread"}
)
)
self.assertEqual(response.status_code, status_code)
TEAM_COMMENTABLE_ID = 'test-team-discussion'
@disable_signal(views, 'comment_created')
@ddt.ddt
class ForumEventTestCase(ForumsEnableMixin, SharedModuleStoreTestCase, MockRequestSetupMixin):
"""
Forum actions are expected to launch analytics events. Test these here.
"""
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(ForumEventTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(ForumEventTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
cls.student.roles.add(Role.objects.get(name="Student", course_id=cls.course.id))
CourseAccessRoleFactory(course_id=cls.course.id, user=cls.student, role='Wizard')
@patch('eventtracking.tracker.emit')
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def test_response_event(self, mock_request, mock_emit):
"""
Check to make sure an event is fired when a user responds to a thread.
"""
self._set_mock_request_data(mock_request, {
"closed": False,
"commentable_id": 'test_commentable_id',
'thread_id': 'test_thread_id',
})
request = RequestFactory().post("dummy_url", {"body": "Test comment", 'auto_subscribe': True})
request.user = self.student
request.view_name = "create_comment"
views.create_comment(request, course_id=six.text_type(self.course.id), thread_id='test_thread_id')
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, 'edx.forum.response.created')
self.assertEqual(event['body'], "Test comment")
self.assertEqual(event['commentable_id'], 'test_commentable_id')
self.assertEqual(event['user_forums_roles'], ['Student'])
self.assertEqual(event['user_course_roles'], ['Wizard'])
self.assertEqual(event['discussion']['id'], 'test_thread_id')
self.assertEqual(event['options']['followed'], True)
@patch('eventtracking.tracker.emit')
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def test_comment_event(self, mock_request, mock_emit):
"""
Ensure an event is fired when someone comments on a response.
"""
self._set_mock_request_data(mock_request, {
"closed": False,
"depth": 1,
"thread_id": "test_thread_id",
"commentable_id": "test_commentable_id",
"parent_id": "test_response_id"
})
request = RequestFactory().post("dummy_url", {"body": "Another comment"})
request.user = self.student
request.view_name = "create_sub_comment"
views.create_sub_comment(request, course_id=six.text_type(self.course.id), comment_id="dummy_comment_id")
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, "edx.forum.comment.created")
self.assertEqual(event['body'], 'Another comment')
self.assertEqual(event['discussion']['id'], 'test_thread_id')
self.assertEqual(event['response']['id'], 'test_response_id')
self.assertEqual(event['user_forums_roles'], ['Student'])
self.assertEqual(event['user_course_roles'], ['Wizard'])
self.assertEqual(event['options']['followed'], False)
@patch('eventtracking.tracker.emit')
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
@ddt.data((
'create_thread',
'edx.forum.thread.created', {
'thread_type': 'discussion',
'body': 'Test text',
'title': 'Test',
'auto_subscribe': True
},
{'commentable_id': TEAM_COMMENTABLE_ID}
), (
'create_comment',
'edx.forum.response.created',
{'body': 'Test comment', 'auto_subscribe': True},
{'thread_id': 'test_thread_id'}
), (
'create_sub_comment',
'edx.forum.comment.created',
{'body': 'Another comment'},
{'comment_id': 'dummy_comment_id'}
))
@ddt.unpack
def test_team_events(self, view_name, event_name, view_data, view_kwargs, mock_request, mock_emit):
user = self.student
team = CourseTeamFactory.create(discussion_topic_id=TEAM_COMMENTABLE_ID)
CourseTeamMembershipFactory.create(team=team, user=user)
self._set_mock_request_data(mock_request, {
'closed': False,
'commentable_id': TEAM_COMMENTABLE_ID,
'thread_id': 'test_thread_id',
})
request = RequestFactory().post('dummy_url', view_data)
request.user = user
request.view_name = view_name
getattr(views, view_name)(request, course_id=six.text_type(self.course.id), **view_kwargs)
name, event = mock_emit.call_args[0]
self.assertEqual(name, event_name)
self.assertEqual(event['team_id'], team.team_id)
@ddt.data(
('vote_for_thread', 'thread_id', 'thread'),
('undo_vote_for_thread', 'thread_id', 'thread'),
('vote_for_comment', 'comment_id', 'response'),
('undo_vote_for_comment', 'comment_id', 'response'),
)
@ddt.unpack
@patch('eventtracking.tracker.emit')
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def test_thread_voted_event(self, view_name, obj_id_name, obj_type, mock_request, mock_emit):
undo = view_name.startswith('undo')
self._set_mock_request_data(mock_request, {
'closed': False,
'commentable_id': 'test_commentable_id',
'username': 'gumprecht',
})
request = RequestFactory().post('dummy_url', {})
request.user = self.student
request.view_name = view_name
view_function = getattr(views, view_name)
kwargs = dict(course_id=six.text_type(self.course.id))
kwargs[obj_id_name] = obj_id_name
if not undo:
kwargs.update(value='up')
view_function(request, **kwargs)
self.assertTrue(mock_emit.called)
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, 'edx.forum.{}.voted'.format(obj_type))
self.assertEqual(event['target_username'], 'gumprecht')
self.assertEqual(event['undo_vote'], undo)
self.assertEqual(event['vote_value'], 'up')
class UsersEndpointTestCase(ForumsEnableMixin, SharedModuleStoreTestCase, MockRequestSetupMixin):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(UsersEndpointTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(UsersEndpointTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
cls.enrollment = CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
cls.other_user = UserFactory.create(username="other")
CourseEnrollmentFactory(user=cls.other_user, course_id=cls.course.id)
def set_post_counts(self, mock_request, threads_count=1, comments_count=1):
"""
sets up a mock response from the comments service for getting post counts for our other_user
"""
self._set_mock_request_data(mock_request, {
"threads_count": threads_count,
"comments_count": comments_count,
})
def make_request(self, method='get', course_id=None, **kwargs):
course_id = course_id or self.course.id
request = getattr(RequestFactory(), method)("dummy_url", kwargs)
request.user = self.student
request.view_name = "users"
return views.users(request, course_id=text_type(course_id))
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def test_finds_exact_match(self, mock_request):
self.set_post_counts(mock_request)
response = self.make_request(username="other")
self.assertEqual(response.status_code, 200)
self.assertEqual(
json.loads(response.content.decode('utf-8'))["users"],
[{"id": self.other_user.id, "username": self.other_user.username}]
)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def test_finds_no_match(self, mock_request):
self.set_post_counts(mock_request)
response = self.make_request(username="othor")
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content.decode('utf-8'))["users"], [])
def test_requires_GET(self):
response = self.make_request(method='post', username="other")
self.assertEqual(response.status_code, 405)
def test_requires_username_param(self):
response = self.make_request()
self.assertEqual(response.status_code, 400)
content = json.loads(response.content.decode('utf-8'))
self.assertIn("errors", content)
self.assertNotIn("users", content)
def test_course_does_not_exist(self):
course_id = CourseKey.from_string("does/not/exist")
response = self.make_request(course_id=course_id, username="other")
self.assertEqual(response.status_code, 404)
content = json.loads(response.content.decode('utf-8'))
self.assertIn("errors", content)
self.assertNotIn("users", content)
def test_requires_requestor_enrolled_in_course(self):
# unenroll self.student from the course.
self.enrollment.delete()
response = self.make_request(username="other")
self.assertEqual(response.status_code, 404)
content = json.loads(response.content.decode('utf-8'))
self.assertIn("errors", content)
self.assertNotIn("users", content)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def test_requires_matched_user_has_forum_content(self, mock_request):
self.set_post_counts(mock_request, 0, 0)
response = self.make_request(username="other")
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content.decode('utf-8'))["users"], [])
@ddt.ddt
class SegmentIOForumThreadViewedEventTestCase(SegmentIOTrackingTestCaseBase):
def _raise_navigation_event(self, label, include_name):
middleware = TrackMiddleware()
kwargs = {'label': label}
if include_name:
kwargs['name'] = 'edx.bi.app.navigation.screen'
else:
kwargs['exclude_name'] = True
request = self.create_request(
data=self.create_segmentio_event_json(**kwargs),
content_type='application/json',
)
User.objects.create(pk=SEGMENTIO_TEST_USER_ID, username=str(mock.sentinel.username))
middleware.process_request(request)
try:
response = segmentio.segmentio_event(request)
self.assertEqual(response.status_code, 200)
finally:
middleware.process_response(request, None)
@ddt.data(True, False)
def test_thread_viewed(self, include_name):
"""
Tests that a SegmentIO thread viewed event is accepted and transformed.
Only tests that the transformation happens at all; does not
comprehensively test that it happens correctly.
ForumThreadViewedEventTransformerTestCase tests for correctness.
"""
self._raise_navigation_event('Forum: View Thread', include_name)
event = self.get_event()
self.assertEqual(event['name'], 'edx.forum.thread.viewed')
self.assertEqual(event['event_type'], event['name'])
@ddt.data(True, False)
def test_non_thread_viewed(self, include_name):
"""
Tests that other BI events are thrown out.
"""
self._raise_navigation_event('Forum: Create Thread', include_name)
self.assert_no_events_emitted()
def _get_transformed_event(input_event):
transformer = ForumThreadViewedEventTransformer(**input_event)
transformer.transform()
return transformer
def _create_event(
label='Forum: View Thread',
include_context=True,
inner_context=None,
username=None,
course_id=None,
**event_data
):
result = {'name': 'edx.bi.app.navigation.screen'}
if include_context:
result['context'] = {'label': label}
if course_id:
result['context']['course_id'] = str(course_id)
if username:
result['username'] = username
if event_data:
result['event'] = event_data
if inner_context:
if not event_data:
result['event'] = {}
result['event']['context'] = inner_context
return result
def _create_and_transform_event(**kwargs):
event = _create_event(**kwargs)
return event, _get_transformed_event(event)
@ddt.ddt
class ForumThreadViewedEventTransformerTestCase(ForumsEnableMixin, UrlResetMixin, ModuleStoreTestCase):
"""
Test that the ForumThreadViewedEventTransformer transforms events correctly
and without raising exceptions.
Because the events passed through the transformer can come from external
sources (e.g., a mobile app), we carefully test a myriad of cases, including
those with incomplete and malformed events.
"""
CATEGORY_ID = 'i4x-edx-discussion-id'
CATEGORY_NAME = 'Discussion 1'
PARENT_CATEGORY_NAME = 'Chapter 1'
TEAM_CATEGORY_ID = 'i4x-edx-team-discussion-id'
TEAM_CATEGORY_NAME = 'Team Chat'
TEAM_PARENT_CATEGORY_NAME = PARENT_CATEGORY_NAME
DUMMY_CATEGORY_ID = 'i4x-edx-dummy-commentable-id'
DUMMY_THREAD_ID = 'dummy_thread_id'
@mock.patch.dict("student.models.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(ForumThreadViewedEventTransformerTestCase, self).setUp()
self.courses_by_store = {
ModuleStoreEnum.Type.mongo: CourseFactory.create(
org='TestX',
course='TR-101',
run='Event_Transform_Test',
default_store=ModuleStoreEnum.Type.mongo,
),
ModuleStoreEnum.Type.split: CourseFactory.create(
org='TestX',
course='TR-101S',
run='Event_Transform_Test_Split',
default_store=ModuleStoreEnum.Type.split,
),
}
self.course = self.courses_by_store['mongo']
self.student = UserFactory.create()
self.staff = UserFactory.create(is_staff=True)
UserBasedRole(user=self.staff, role=CourseStaffRole.ROLE).add_course(self.course.id)
CourseEnrollmentFactory.create(user=self.student, course_id=self.course.id)
self.category = ItemFactory.create(
parent_location=self.course.location,
category='discussion',
discussion_id=self.CATEGORY_ID,
discussion_category=self.PARENT_CATEGORY_NAME,
discussion_target=self.CATEGORY_NAME,
)
self.team_category = ItemFactory.create(
parent_location=self.course.location,
category='discussion',
discussion_id=self.TEAM_CATEGORY_ID,
discussion_category=self.TEAM_PARENT_CATEGORY_NAME,
discussion_target=self.TEAM_CATEGORY_NAME,
)
self.team = CourseTeamFactory.create(
name='Team 1',
course_id=self.course.id,
topic_id='arbitrary-topic-id',
discussion_topic_id=self.team_category.discussion_id,
)
def test_missing_context(self):
event = _create_event(include_context=False)
with self.assertRaises(EventEmissionExit):
_get_transformed_event(event)
def test_no_data(self):
event, event_trans = _create_and_transform_event()
event['name'] = 'edx.forum.thread.viewed'
event['event_type'] = event['name']
event['event'] = {}
self.assertDictEqual(event_trans, event)
def test_inner_context(self):
_, event_trans = _create_and_transform_event(inner_context={})
self.assertNotIn('context', event_trans['event'])
def test_non_thread_view(self):
event = _create_event(
label='Forum: Create Thread',
course_id=self.course.id,
topic_id=self.DUMMY_CATEGORY_ID,
thread_id=self.DUMMY_THREAD_ID,
)
with self.assertRaises(EventEmissionExit):
_get_transformed_event(event)
def test_bad_field_types(self):
event, event_trans = _create_and_transform_event(
course_id={},
topic_id=3,
thread_id=object(),
action=3.14,
)
event['name'] = 'edx.forum.thread.viewed'
event['event_type'] = event['name']
self.assertDictEqual(event_trans, event)
def test_bad_course_id(self):
event, event_trans = _create_and_transform_event(course_id='non-existent-course-id')
event_data = event_trans['event']
self.assertNotIn('category_id', event_data)
self.assertNotIn('category_name', event_data)
self.assertNotIn('url', event_data)
self.assertNotIn('user_forums_roles', event_data)
self.assertNotIn('user_course_roles', event_data)
def test_bad_username(self):
event, event_trans = _create_and_transform_event(username='non-existent-username')
event_data = event_trans['event']
self.assertNotIn('category_id', event_data)
self.assertNotIn('category_name', event_data)
self.assertNotIn('user_forums_roles', event_data)
self.assertNotIn('user_course_roles', event_data)
def test_bad_url(self):
event, event_trans = _create_and_transform_event(
course_id=self.course.id,
topic_id='malformed/commentable/id',
thread_id='malformed/thread/id',
)
self.assertNotIn('url', event_trans['event'])
def test_renamed_fields(self):
AUTHOR = 'joe-the-plumber'
event, event_trans = _create_and_transform_event(
course_id=self.course.id,
topic_id=self.DUMMY_CATEGORY_ID,
thread_id=self.DUMMY_THREAD_ID,
author=AUTHOR,
)
self.assertEqual(event_trans['event']['commentable_id'], self.DUMMY_CATEGORY_ID)
self.assertEqual(event_trans['event']['id'], self.DUMMY_THREAD_ID)
self.assertEqual(event_trans['event']['target_username'], AUTHOR)
def test_titles(self):
# No title
_, event_1_trans = _create_and_transform_event()
self.assertNotIn('title', event_1_trans['event'])
self.assertNotIn('title_truncated', event_1_trans['event'])
# Short title
_, event_2_trans = _create_and_transform_event(
action='!',
)
self.assertIn('title', event_2_trans['event'])
self.assertIn('title_truncated', event_2_trans['event'])
self.assertFalse(event_2_trans['event']['title_truncated'])
# Long title
_, event_3_trans = _create_and_transform_event(
action=('covfefe' * 200),
)
self.assertIn('title', event_3_trans['event'])
self.assertIn('title_truncated', event_3_trans['event'])
self.assertTrue(event_3_trans['event']['title_truncated'])
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_urls(self, store):
course = self.courses_by_store[store]
commentable_id = self.DUMMY_CATEGORY_ID
thread_id = self.DUMMY_THREAD_ID
_, event_trans = _create_and_transform_event(
course_id=course.id,
topic_id=commentable_id,
thread_id=thread_id,
)
expected_path = '/courses/{0}/discussion/forum/{1}/threads/{2}'.format(
course.id, commentable_id, thread_id
)
self.assertTrue(event_trans['event'].get('url').endswith(expected_path))
def test_categories(self):
# Bad category
_, event_trans_1 = _create_and_transform_event(
username=self.student.username,
course_id=self.course.id,
topic_id='non-existent-category-id',
)
self.assertNotIn('category_id', event_trans_1['event'])
self.assertNotIn('category_name', event_trans_1['event'])
# Good category
_, event_trans_2 = _create_and_transform_event(
username=self.student.username,
course_id=self.course.id,
topic_id=self.category.discussion_id,
)
self.assertEqual(event_trans_2['event'].get('category_id'), self.category.discussion_id)
full_category_name = u'{0} / {1}'.format(self.category.discussion_category, self.category.discussion_target)
self.assertEqual(event_trans_2['event'].get('category_name'), full_category_name)
def test_roles(self):
# No user
_, event_trans_1 = _create_and_transform_event(
course_id=self.course.id,
)
self.assertNotIn('user_forums_roles', event_trans_1['event'])
self.assertNotIn('user_course_roles', event_trans_1['event'])
# Student user
_, event_trans_2 = _create_and_transform_event(
course_id=self.course.id,
username=self.student.username,
)
self.assertEqual(event_trans_2['event'].get('user_forums_roles'), [FORUM_ROLE_STUDENT])
self.assertEqual(event_trans_2['event'].get('user_course_roles'), [])
# Course staff user
_, event_trans_3 = _create_and_transform_event(
course_id=self.course.id,
username=self.staff.username,
)
self.assertEqual(event_trans_3['event'].get('user_forums_roles'), [])
self.assertEqual(event_trans_3['event'].get('user_course_roles'), [CourseStaffRole.ROLE])
def test_teams(self):
# No category
_, event_trans_1 = _create_and_transform_event(
course_id=self.course.id,
)
self.assertNotIn('team_id', event_trans_1)
# Non-team category
_, event_trans_2 = _create_and_transform_event(
course_id=self.course.id,
topic_id=self.CATEGORY_ID,
)
self.assertNotIn('team_id', event_trans_2)
# Team category
_, event_trans_3 = _create_and_transform_event(
course_id=self.course.id,
topic_id=self.TEAM_CATEGORY_ID,
)
self.assertEqual(event_trans_3['event'].get('team_id'), self.team.team_id)
|
agpl-3.0
| 6,711,102,984,654,132,000
| 38.870216
| 148
| 0.599713
| false
| 3.932523
| true
| false
| false
|
agdsn/pycroft
|
pycroft/model/address.py
|
1
|
3378
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
from typing import List
from sqlalchemy import Column, String, UniqueConstraint
from pycroft.model import ddl
from pycroft.model.base import IntegerIdModel
DEFAULT_CITY = "Dresden"
DEFAULT_COUNTRY = "Germany"
class Address(IntegerIdModel):
"""A known address.
Addresses differ from most other entities such as users or rooms in the following ways:
- Their identity is provided by their value, i.e. if two addresses have equal values,
they should be identitcal
- Their existence is justified solely by the reference of another object.
At no point in time should there be any unreferenced address records in the db.
- They should be immutable: This implies that editing e.g. the street of a user's address
should not change the street of the corresponding room's address.
This implies that addresses are *stateless*, i.e. have no life cycle.
Establishing these consistencies requires triggers.
"""
street = Column(String(), nullable=False)
number = Column(String(), nullable=False)
addition = Column(String(), nullable=False, server_default="")
# Sometimes, zipcodes can contain things like dashes, so rather take String().
# we could probably impose some format by a check but that would be over engineering
zip_code = Column(String(), nullable=False)
city = Column(String(), nullable=False, server_default=DEFAULT_CITY)
state = Column(String(), nullable=False, server_default="")
country = Column(String(), nullable=False, server_default=DEFAULT_COUNTRY)
__table_args__ = (
UniqueConstraint('street', 'number', 'addition', 'zip_code', 'city', 'state', 'country'),
)
def __str__(self):
return f"{self:short}"
def __format__(self, spec="short"):
"""Return the address items separated by the format specifier"""
city = self.city.upper() if self.country and self.country != DEFAULT_COUNTRY else self.city
items: List[str] = [f"{self.street} {self.number} // {self.addition}" if self.addition
else f"{self.street} {self.number}", f"{self.zip_code} {city}"]
if self.state:
state = self.state.upper() if self.country and self.country != DEFAULT_COUNTRY else self.state
items.append(f"{state}")
if self.country and self.country != DEFAULT_COUNTRY:
items.append(f"{self.country.upper()}")
glue = ", " if spec == "short" else "\n" if spec == "long" else spec
return glue.join(items)
manager = ddl.DDLManager()
address_remove_orphans = ddl.Function(
'address_remove_orphans', [], 'trigger',
""" BEGIN
delete from address
where not exists (select 1 from room where room.address_id = address.id)
and not exists (select 1 from "user" where "user".address_id = address.id);
RETURN NULL;
END;""",
volatility='volatile', strict=True, language='plpgsql'
)
manager.add_function(Address.__table__, address_remove_orphans)
# User trigger for the respective backref added in `user.py`
# Room trigger for the respective backref added in `facilities.py`
manager.register()
|
apache-2.0
| -3,732,029,067,401,948,700
| 41.759494
| 106
| 0.68206
| false
| 3.923345
| false
| false
| false
|
Kruehlio/MUSEspec
|
utils/starlight.py
|
1
|
15375
|
# -*- coding: utf-8 -*-
""" Spectrum class for running starlight on spectra. Particularly for
MUSE cubes
"""
import matplotlib
matplotlib.use('Agg')
import os
import numpy as np
import scipy as sp
import shutil
import time
import platform
import matplotlib.pyplot as plt
import logging
from ..MUSEio.museio import asciiout, cubeout
logfmt = '%(levelname)s [%(asctime)s]: %(message)s'
datefmt= '%Y-%m-%d %H:%M:%S'
formatter = logging.Formatter(fmt=logfmt,datefmt=datefmt)
logger = logging.getLogger('__main__')
logging.root.setLevel(logging.DEBUG)
ch = logging.StreamHandler() #console handler
ch.setFormatter(formatter)
logger.handlers = []
logger.addHandler(ch)
SL_BASE_ALL = os.path.join(os.path.dirname(__file__), "../etc/Base.BC03.S")
SL_BASE_FEW = os.path.join(os.path.dirname(__file__), "../etc/Base.BC03.N")
SL_BASE_BB = os.path.join(os.path.dirname(__file__), "../etc/Base.BC03.15lh")
SL_CONFIG = os.path.join(os.path.dirname(__file__), "../etc/MUSE_SLv01.config")
SL_MASK = os.path.join(os.path.dirname(__file__), "../etc/Masks.EmLines.SDSS.gm")
SL_BASES = os.path.join(os.path.dirname(__file__), "../etc/bases")
if platform.platform().startswith('Linux'):
SL_EXE = os.path.join(os.path.dirname(__file__), "../etc/starlight")
else:
SL_EXE = os.path.join(os.path.dirname(__file__), "../etc/starlight_mac")
class StarLight:
""" StarLight class for fitting """
def __init__(self, filen, verbose=0, minwl=None, maxwl=None,
run=1, bases='FEW', inst='MUSE', red='CAL'):
self.specfile = filen
if minwl == None:
self.minwl=3330
else:
self.minwl=minwl
if maxwl == None:
self.maxwl=9400
else:
self.maxwl=maxwl
self.cwd = os.getcwd()
root, ext = os.path.splitext(filen)
self.output = os.path.join(root+'_sl_out'+ext)
self.sllog = root+'_sl_log'+ext
self.seed = np.random.randint(1E6, 9E6)
self.inst = inst
self.red = red
basewdir = os.path.join(self.cwd, 'bases')
if not os.path.isdir(basewdir):
os.makedirs(basewdir)
if bases == 'FEW':
shutil.copy(SL_BASE_FEW, self.cwd)
self.bases = SL_BASE_FEW
elif bases == 'ALL':
shutil.copy(SL_BASE_ALL, self.cwd)
self.bases = SL_BASE_ALL
elif bases == 'BB':
shutil.copy(SL_BASE_BB, self.cwd)
self.bases = SL_BASE_BB
shutil.copy(SL_CONFIG, self.cwd)
f = open(self.bases)
basescps = [g for g in f.readlines() if not g.startswith('#')]
f.close()
for basescp in basescps:
baseraw = os.path.join(SL_BASES, basescp.split()[0])
if os.path.isfile(baseraw):
shutil.copy(baseraw, basewdir)
if not os.path.isfile(SL_EXE):
print ('ERROR: STARLIGHT executable not found')
raise SystemExit
if run == 1:
self._makeGrid()
self._runGrid()
def _makeGrid(self, name='muse_grid.in'):
headkey = ['[Number of fits to run]',
'[base_dir]', '[obs_dir]', '[mask_dir]', '[out_dir]',
'[seed]', '[llow_SN]', '[lupp_SN]', '[Olsyn_ini]',
'[Olsyn_fin]', '[Odlsyn]', '[fscale_chi2]', '[FIT/FXK]',
'[IsErrSpecAvailable]', '[IsFlagSpecAvailable]']
speckey = ['spectrum', 'config', 'bases', 'masks', 'red', 'v0_start',
'vd_start', 'output']
header = {'[Number of fits to run]': '1',
'[base_dir]': self.cwd+'/bases/',
'[obs_dir]' :self.cwd+'/',
'[mask_dir]' : os.path.split(SL_MASK)[0]+'/',
'[out_dir]': self.cwd+'/',
'[seed]': self.seed,
'[llow_SN]': 5200,
'[lupp_SN]': 5400,
'[Olsyn_ini]': self.minwl,
'[Olsyn_fin]': self.maxwl,
'[Odlsyn]':1.0,
'[fscale_chi2]':1.0,
'[FIT/FXK]': 'FIT',
'[IsErrSpecAvailable]':'1',
'[IsFlagSpecAvailable]':'1'}
specline = {'spectrum': self.specfile,
'config': os.path.split(SL_CONFIG)[-1],
'bases': os.path.split(self.bases)[-1],
'masks': os.path.split(SL_MASK)[-1],
'red' : self.red,
'v0_start': 0,
'vd_start': 50,
'output': self.output}
f = open(name, 'w')
for head in headkey:
f.write('%s %s\n' %(header[head], head))
for spec in speckey:
f.write('%s ' %(specline[spec]))
f.write('\n')
self.grid = name
def _runGrid(self, cleanup=True):
t1 = time.time()
slarg = [SL_EXE, '<', self.grid, '>', self.sllog]
os.system(' '.join(slarg))
# Cleanup
if cleanup == True:
shutil.rmtree('bases')
os.remove(os.path.join(self.cwd, os.path.split(self.bases)[-1]))
os.remove(os.path.join(self.cwd, os.path.split(SL_CONFIG)[-1]))
return time.time()-t1
def modOut(self, plot=0, minwl=3860, maxwl=4470,
rm=True):
starwl, starfit = np.array([]), np.array([])
datawl, data, gas, stars = 4*[np.array([])]
success, run, norm, v0, vd, av = 0, 0, 1, -1, -1, -1
try:
f = open(self.output)
output = f.readlines()
f.close()
if rm == True:
os.remove(self.sllog)
slpath = os.path.join(self.cwd, 'sl_fits')
if not os.path.isdir(slpath):
os.makedirs(slpath)
slout = os.path.join(slpath, self.output)
if os.path.isfile(slout):
os.remove(slout)
shutil.move(self.output, os.path.join(self.cwd, 'sl_fits'))
run = 1
except IOError:
pass
if run == 1:
for out in output:
outsplit = out.split()
if outsplit[1:] == ['[fobs_norm', '(in', 'input', 'units)]']:
norm = float(outsplit[0])
success = 1
if outsplit[1:] == ['Run', 'aborted:(']:
break
if len(outsplit) == 4:
try:
outsplit = [float(a) for a in outsplit]
if float(outsplit[0]) >= self.minwl:
starfit = np.append(starfit, outsplit[2])
starwl = np.append(starwl, outsplit[0])
if outsplit[3] != -2:
data = np.append(data, outsplit[1])
gas = np.append(gas, outsplit[1]-outsplit[2] )
stars = np.append(stars, outsplit[2])
datawl = np.append(datawl, outsplit[0])
except ValueError:
pass
if len(outsplit) == 3:
if outsplit[1] == '[v0_min':
v0 = float(outsplit[0])
if outsplit[1] == '[vd_min':
vd = float(outsplit[0])
if outsplit[1] == '[AV_min':
av = float(outsplit[0])
if plot == 1:
sel0 = (datawl > minwl) * (datawl < maxwl)
sel1 = (datawl > 3860) * (datawl < 4630)
sel2 = (datawl > 4730) * (datawl < 5230)
sel3 = (datawl > 6420) * (datawl < 7020)
fig1 = plt.figure(figsize = (5,8.4))
fig1.subplots_adjust(bottom=0.10, top=0.99, left=0.15, right=0.98)
ax1 = fig1.add_subplot(3, 1, 1)
ax2 = fig1.add_subplot(3, 1, 2)
ax3 = fig1.add_subplot(3, 1, 3)
for ax in [ax1, ax2, ax3]:
ax.plot(datawl, 0*datawl, '--', color ='grey')
ax.plot(datawl, norm*gas, '-', color ='black')
ax.plot(datawl, norm*data, '-', color ='firebrick', lw=2)
ax.plot(starwl, norm*starfit, '-', color ='green')
ax.set_ylabel(r'$F_{\lambda}\,\rm{(10^{-17}\,erg\,s^{-1}\,cm^{-2}\, \AA^{-1})}$',
fontsize=16)
ax3.set_xlabel(r'Restframe wavelength $(\AA)$', fontsize=16)
ax1.set_xlim(3860, 4630)
ax3.set_xlim(6420, 6780)
ax2.set_xlim(4750, 5230)
ax1.set_ylim(norm*np.min(gas[sel1]), norm*np.max(data[sel1])*1.05)
ax2.set_ylim(norm*np.min(gas[sel2]), norm*np.max(data[sel2])*1.05)
ax3.set_ylim(norm*np.min(gas[sel3]), norm*np.max(data[sel3])*1.05)
fig1.savefig('%s_starlight.pdf' %(self.inst))
plt.close(fig1)
fig2 = plt.figure(figsize = (8,5))
fig2.subplots_adjust(bottom=0.14, top=0.99, left=0.12, right=0.98)
ax = fig2.add_subplot(1, 1, 1)
ax.plot(datawl, 0*datawl, '--', color ='grey')
ax.plot(datawl, norm*gas, '-', color ='black')
ax.plot(datawl, norm*data, '-', color ='firebrick', lw=2)
ax.plot(starwl, norm*starfit, '-', color ='green')
ax.set_ylabel(r'$F_{\lambda}\,\rm{(10^{-17}\,erg\,s^{-1}\,cm^{-2}\, \AA^{-1})}$',
fontsize=16)
ax.set_xlabel(r'Restframe wavelength $(\AA)$', fontsize=16)
ax.set_xlim(np.min(datawl[sel0]), np.max(datawl[sel0]))
ax.set_ylim(norm*np.min(gas[sel0]), norm*np.max(data[sel0])*1.05)
fig2.savefig('%s_starlight_all.pdf' %(self.inst))
plt.close(fig2)
return datawl, data, stars, norm, success, v0, vd, av
def runStar(s3d, ascii, starres = None, minwl=None, maxwl=None,
plot=0, verbose=1, rm=True, bases='ALL'):
""" Convinience function to run starlight on an ascii file returning its
spectral fit and bring it into original rest-frame wavelength scale again
Parameters
----------
ascii : str
Filename of spectrum in Format WL SPEC ERR FLAG
Returns
----------
data : np.array (array of zeros if starlight not sucessfull)
Original data (resampled twice, to check for accuracy)
star : np.array (array of zeros if starlight not sucessfull)
Starlight fit
success : int
Flag whether starlight was executed successully
"""
if verbose == 1:
logger.info('Starting starlight')
if starres == None:
starres = '%s_star_res.txt' %(s3d.inst)
if os.path.isfile(starres):
os.remove(starres)
t1 = time.time()
sl = StarLight(filen=ascii, bases=bases, minwl=minwl, maxwl=maxwl)
datawl, data, stars, norm, success, v0, vd, av =\
sl.modOut(plot=plot, rm=rm, minwl=minwl, maxwl=maxwl)
zerospec = np.zeros(s3d.wave.shape)
if success == 1:
if verbose == 1:
logger.info('Running starlight took %.2f s' %(time.time() - t1))
s = sp.interpolate.InterpolatedUnivariateSpline(datawl*(1+s3d.z),
data*1E3*norm/(1+s3d.z))
t = sp.interpolate.InterpolatedUnivariateSpline(datawl*(1+s3d.z),
stars*1E3*norm/(1+s3d.z))
return s(s3d.wave), t(s3d.wave), success, v0, vd, av
else:
if verbose ==1:
logger.info('Starlight failed in %.2f s' %(time.time() - t1))
return zerospec, zerospec, success, v0, vd, av
def subStars(s3d, x, y, size=0, verbose=1,
inst='MUSE', bases='ALL', starres=None):
""" Convinience function to subtract a starlight fit based on a single
spectrum from many spaxels
Parameters
----------
x : integer
x-Index of region center
y : integer
y-Index of region center
size : integer
Size of square around center (x,y +/- size)
"""
if starres == None:
starres = '%s_x%i_y%i_star_res.txt' %(s3d.inst, x, y)
if os.path.isfile(starres):
os.remove(starres)
wl, spec, err = s3d.extrSpec(x=x, y=y, size=size, verbose=0)
ascii = asciiout(s3d=s3d, wl=wl, spec=spec, err=err, frame='rest',
resample = 1, name='%s_%s_%s' %(x, y, size), fmt='txt')
data, stars, success, v0, vd, av = runStar(s3d, ascii, bases=bases, verbose=0)
f = open(starres, 'a')
f.write('%i\t%i\t%.1f\t%.1f\t%.3f\n' %(x, y, v0, vd, av))
f.close()
os.remove(ascii)
miny, maxy = max(0, y-size), min(s3d.leny-1, y+size+1)
minx, maxx = max(0, x-size), min(s3d.lenx-1, x+size+1)
xindizes = np.arange(minx, maxx, 1)
yindizes = np.arange(miny, maxy, 1)
zerospec = np.zeros(s3d.wave.shape)
if success == 1:
# rs = data/spec
# logger.info('Resampling accuracy %.3f +/- %.3f' \
# %(np.nanmedian(rs), np.nanstd(rs[1:-1])))
for xindx in xindizes:
for yindx in yindizes:
wl, spec, err = s3d.extrSpec(x=xindx, y=yindx, verbose=verbose)
# Renormalize to actual spectrum
substars = np.nanmedian(spec/data)*stars
# Overwrite starcube with fitted values
s3d.starcube[:, yindx, xindx] = substars
else:
for xindx in xindizes:
for yindx in yindizes:
# No sucess
s3d.starcube[:, yindx, xindx] = zerospec
return
def subAllStars(s3d, dx=2, nc=None, x1=None, x2=None, y1=None, y2=None,
bases = 'FEW'):
"""
Convinience function to subtract starlight fits on the full cube. Can work
with subcubes defined by x1, x2, y1, y2. Resamples by a factor of 2*dx+1.
"""
logger.info("Starting starlight on full cube with %i cores" %s3d.ncores)
logger.info("This might take a bit")
t1 = time.time()
if x1 != None and x2!= None:
logger.info("X-range: %i to %i" %(x1, x2))
xindizes = np.arange(x1, x2, 2*dx+1)
else:
xindizes = np.arange(dx, s3d.lenx, 2*dx+1)
if y1 != None and y2!= None:
logger.info("Y-range: %i to %i" %(y1, y2))
yindizes = np.arange(y1, y2, 2*dx+1)
else:
yindizes = np.arange(dx, s3d.leny, 2*dx+1)
starres = '%s_x%i_%i_y%i_%i_star_res.txt' \
%(s3d.inst, xindizes[0], xindizes[-1], yindizes[0], xindizes[-1])
if os.path.isfile(starres):
os.remove(starres)
for xindx in xindizes:
for yindx in yindizes:
subStars(s3d, xindx, yindx, dx,
bases=bases, verbose=0, starres=starres)
cubeout(s3d, s3d.starcube, err=s3d.erro, name='star')
cubeout(s3d, s3d.data-s3d.starcube, err=s3d.erro, name='gas')
logger.info("This took %.2f h" %((time.time()-t1)/3600.))
|
mit
| 2,258,833,824,051,038,000
| 36.317961
| 101
| 0.499382
| false
| 3.252591
| true
| false
| false
|
asm-products/sim
|
sim/settings.py
|
1
|
3046
|
"""
Django settings for sim project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#nf4&)bxyk9ybkd&$f=!#a&g9-+hexue%6^=!s9!m=8&u-!i1%'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = DEBUG
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
ALLOWED_HOSTS = []
ADMINS = (
('Nicolas Joseph', 'nicolas@nicolasjoseph.com')
)
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'djangular',
'rest_framework',
'public',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'sim.urls'
WSGI_APPLICATION = 'sim.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'simdb',
'USER': 'sim',
'PASSWORD': 'devsim',
'HOST': '192.168.33.10',
'PORT': '5432'
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Paris'
USE_I18N = True
USE_L10N = True
USE_TZ = True
APPEND_SLASH = True # Adds a file at the end of a URL
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATICFILES_DIRS = (
("css", os.path.join(BASE_DIR, "stylesheets")),
("js", os.path.join(BASE_DIR, "js")),
("img", os.path.join(BASE_DIR, "img")),
("bw", os.path.join(BASE_DIR, "bower_components")),
)
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "generatedFiles")
REST_FRAMEWORK = {
# Use hyperlinked styles by default.
# Only used if the `serializer_class` attribute is not set on a view.
'DEFAULT_MODEL_SERIALIZER_CLASS':
'rest_framework.serializers.HyperlinkedModelSerializer',
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
]
}
|
bsd-2-clause
| 1,686,597,243,547,752,400
| 25.034188
| 73
| 0.69107
| false
| 3.216473
| false
| false
| false
|
vaal-/il2_stats
|
src/stats/online.py
|
1
|
2169
|
from copy import deepcopy
import logging
from mission_report import parse_mission_log_line
from mission_report.constants import COUNTRIES_COALITION_DEFAULT, COALITION_ALIAS
from stats.models import PlayerOnline, Profile
logger = logging.getLogger('online')
_countries = deepcopy(COUNTRIES_COALITION_DEFAULT)
def update_online(m_report_files, online_timestamp):
for file_path in m_report_files:
if file_path.stat().st_mtime > online_timestamp:
online_timestamp = file_path.stat().st_mtime
with file_path.open() as f:
for line in f:
# игнорируем "плохие" строки без
if 'AType' not in line:
logger.warning('ignored bad string: [{}]'.format(line))
continue
try:
data = parse_mission_log_line.parse(line)
except parse_mission_log_line.UnexpectedATypeWarning:
logger.warning('unexpected atype: [{}]'.format(line))
continue
atype_id = data.pop('atype_id')
if atype_id == 10:
try:
profile = Profile.objects.get(uuid=data['account_id'])
except Profile.DoesNotExist:
profile = None
PlayerOnline.objects.update_or_create(uuid=data['account_id'], defaults={
'nickname': data['name'],
'coalition': _countries[data['country_id']],
'profile': profile,
})
elif atype_id == 21:
PlayerOnline.objects.filter(uuid=data['account_id']).delete()
elif atype_id == 0:
for country, coalition in data['countries'].items():
_countries[country] = COALITION_ALIAS[coalition]
return online_timestamp
def cleanup_online():
PlayerOnline.objects.all().delete()
|
mit
| -3,861,800,965,770,720,000
| 38.45283
| 97
| 0.506996
| false
| 4.561702
| false
| false
| false
|
Samsung/ADBI
|
idk/cachereader/debuginfo.py
|
1
|
7463
|
import sqlite3
import os.path
from .cfa import CallFrameAddress
from .files import Files
from .framepointers import Framepointers
from .function import Functions
from .insnset import InsnSet
from .lines import Lines
from .location import Locations
from .types import Types
from .variables import Variables
from .symbols import Symbols
from cachebuilder import DebugInfo as DebugInfoWriter
class DebugInfo:
def __init__(self, path, conn):
self.conn = conn
self.path = path
self.cfa = CallFrameAddress(self)
self.files = Files(self)
self.framepointers = Framepointers(self)
self.functions = Functions(self)
self.insnset = InsnSet(self)
self.lines = Lines(self)
self.locations = Locations(self)
self.types = Types(self)
self.variables = Variables(self)
self.symbols = Symbols(self)
@classmethod
def loadcached(cls, path, dbpath=None):
'''Load a new cache for the given file.'''
dbpath = dbpath or path + '.ac'
def get_file_time(path):
'''Get the modification time of the given file.'''
try:
return os.path.getmtime(path)
except OSError:
return 0
if not os.path.isfile(path):
raise IOError('Binary file does not exist: %s.' % path)
if not os.path.isfile(dbpath):
raise ValueError('No cache file exists for %s.' % path)
if get_file_time(dbpath) < get_file_time(path):
raise ValueError('Cache older than binary.')
return cls(path, sqlite3.connect(dbpath))
@classmethod
def load(cls, path, dbpath=None, store=True):
'''Load or create a debug cache for the given file.'''
try:
return cls.loadcached(path, dbpath)
except ValueError:
with open(path, 'rb') as elf:
writer = DebugInfoWriter(elf)
if store:
writer.store(dbpath)
return cls(path, writer.cache)
def get_CFA_expression(self, addr):
return self.cfa[addr]
def close(self):
self.conn.close()
def query_db(self, query, *args):
'''Query the database and yield rows as tuples or single objects.'''
for e in self.conn.execute(query, tuple(args)):
if len(e) == 1:
yield e[0]
else:
yield e
def query_db_one(self, query, *args):
'''Query the database and return one matching row as tuple or single object.'''
for e in self.conn.execute(query, tuple(args)):
if len(e) == 1:
return e[0]
else:
return e
break
return None
def iter_traceable_lines(self, filename):
'''Yield line-address pairs of traceable lines in the given file.'''
query = '''select locations.line, lines.addr
from locations join lines
on locations.id == lines.loc
where file=(select id from files where path=?)'''
return self.query_db(query, filename)
def func2addr(self, filename, fn):
'''Get function entry address.'''
if filename:
filename = self.files.expand(filename)
query = '''select lo from functions join locations
on locations.id == functions.loc
where functions.name = ?
and locations.file = (select id from files where path = ?)'''
ret = self.query_db(query, fn, filename)
else:
ret = self.query_db('select lo from functions where name = ?', fn)
ret = set(ret)
if len(ret) == 1:
return ret.pop()
elif ret:
raise ValueError('ambiguous function name %s. Found at: %s' % (fn, ', '.join([hex(addr) for addr in ret])))
else:
raise ValueError('no such function: %s.' % fn)
return ret.pop()
def sym2addr(self, name, symbol_type=None):
'''Get symbol entry address'''
if symbol_type:
ret = self.query_db('select value from symbols where name = ? and type = ?', name, symbol_type)
else:
ret = self.query_db('select value from symbols where name = ?', name)
ret = set(ret)
if len(ret) == 1:
return ret.pop()
elif ret:
raise ValueError('multiple symbols with name %s. addresses: %s' % (name, ', '.join([hex(value) for value in ret])))
else:
raise ValueError('no such symbol: %s.' % name)
return ret.pop()
def line2addr(self, path, line):
path = self.files.expand(path)
query = '''select lines.addr from lines join locations
on lines.loc == locations.id
where locations.line = ?
and locations.file = (select id from files where path = ?)'''
ret = self.query_db(query, line, path)
ret = list(ret)
if len(ret) == 1:
return ret.pop()
elif ret:
raise ValueError('location ambiguous: %s:%i.' % (self.files.simplify(path), line))
else:
raise ValueError('location invalid or not traceable: %s:%i.' % (self.files.simplify(path), line))
def get_addr(self, spec, use_symbols=False):
spec = spec.strip()
if spec.startswith('*'):
return int(spec[1:], 0)
colon_idx = spec.rfind(':')
if colon_idx == -1:
offset = 0
offset_idx = spec.rfind('+')
if offset_idx > -1:
offset = int(spec[offset_idx + 1:], 16)
spec = spec[:offset_idx]
# function
func = spec.strip()
if use_symbols:
return self.sym2addr(func, 'STT_FUNC') + offset
else:
return self.func2addr(None, func) + offset
else:
filename = spec[:colon_idx]
linefunc = spec[colon_idx + 1:]
try:
line = int(linefunc)
except ValueError:
func = linefunc.strip()
return self.func2addr(filename, func)
return self.line2addr(filename, line)
def get_datatype(self, spec):
pass
def iter_vars(self, address):
return (x[0] for x in self.conn.execute('select name from addr2vars where lo <= ? < hi', (address,)))
def iter_locals(self, address):
idx = self.addr2func_id(address)
if not idx:
return
return (x[0] for x in self.conn.execute('select distinct name from vars2func join vars on vars2func.var = vars.id where func = ?', (idx,)))
def addr2sym_id(self, address):
return self.conn.execute('select id from symbols where value <= ? and ? < value + size', (address, address,))
def addr2func(self, address):
ret = self.conn.execute('select func from addr2func where lo <= ? < hi', (address,))
if ret:
return ret[0]
def addr2func_id(self, address):
ret = self.conn.execute('select id from addr2func where lo <= ? < hi', (address,)).fetchone()
if ret:
return ret[0]
def get_func_range(self, address):
ret = self.conn.execute('select lo, hi from addr2func where lo <= ? < hi', (address,)).fetchone()
return ret
|
apache-2.0
| -4,911,396,537,872,181,000
| 32.022124
| 147
| 0.554201
| false
| 4.114112
| false
| false
| false
|
knowledgecommonsdc/kcdc3
|
kcdc3/apps/pinata/migrations/0008_auto__add_field_page_template__chg_field_page_status.py
|
1
|
5497
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Page.template'
db.add_column('pinata_page', 'template',
self.gf('django.db.models.fields.CharField')(default='basic.html', max_length=48),
keep_default=False)
# Changing field 'Page.status'
db.alter_column('pinata_page', 'status', self.gf('django.db.models.fields.CharField')(max_length=9))
def backwards(self, orm):
# Deleting field 'Page.template'
db.delete_column('pinata_page', 'template')
# Changing field 'Page.status'
db.alter_column('pinata_page', 'status', self.gf('django.db.models.fields.CharField')(max_length=48))
models = {
'pinata.notice': {
'Meta': {'ordering': "['sort_order', 'title']", 'object_name': 'Notice'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'live': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'main_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'default': '50', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'pinata.page': {
'Meta': {'ordering': "['path']", 'object_name': 'Page'},
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['pinata.Page']", 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'short_title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'sidebar_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'default': '50', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'PUBLISHED'", 'max_length': '9'}),
'teaser': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'default': "'basic.html'", 'max_length': '48'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'pinata.pressclipping': {
'Meta': {'ordering': "['date']", 'object_name': 'PressClipping'},
'date': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'destination_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'main_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'publication': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'PUBLISHED'", 'max_length': '9'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'pinata.slide': {
'Meta': {'ordering': "['sort_order', 'title']", 'object_name': 'Slide'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'live': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'main_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'default': '50', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'pinata.sponsor': {
'Meta': {'ordering': "['group', 'sort_order', 'title']", 'object_name': 'Sponsor'},
'destination_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'group': ('django.db.models.fields.CharField', [], {'default': "'B'", 'max_length': '3'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'main_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'default': '50', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'PUBLISHED'", 'max_length': '9'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['pinata']
|
mit
| 2,237,524,497,699,989,200
| 62.930233
| 136
| 0.541022
| false
| 3.609324
| false
| false
| false
|
ni/nifpga-python
|
nifpga/status.py
|
1
|
14615
|
"""
An set of status exception classes to be used when an NiFpga
function returns either a warning or error status.
Use check_status() to raise an appropriate exception if necessary.
Error and Warning exception class names are auto-generated from the
strings in 'codeToString' in this file.
For example, handle a fatal error like this:
>>> @check_status('frob', ['foo', 'bar', 'baz'])
... def frob(foo, bar, baz):
... return -61141
...
>>> try:
... frob(0, 1, 2)
... except FpgaBusyError as e:
... print(e) # doctest: +NORMALIZE_WHITESPACE
Error: FpgaBusy (-61141) when calling 'frob' with arguments:
foo: 0x0
bar: 0x1
baz: 0x2
Or handle a warning like this:
>>> @check_status('frob', ['foo', 'bar', 'baz'])
... def frob(foo, bar, baz):
... return 61003
...
>>> with warnings.catch_warnings(record=True) as w:
... frob(0, 1, 2)
... print(w[0].message) # doctest: +NORMALIZE_WHITESPACE
Warning: FpgaAlreadyRunning (61003) when calling 'frob' with arguments:
foo: 0x0
bar: 0x1
baz: 0x2
Copyright (c) 2017 National Instruments
"""
import functools
import warnings
def _raise_or_warn_if_nonzero_status(status, function_name, argument_names, *args):
"""
Helper for the 'check_status' decorator.
Raises the proper ErrorStatus subclass or warns the proper WarnStatus
subclass if status is not 0 (success).
function_name: the name of the function, e.g. "NiFpga_ConfigureFifo"
Used to make the exception message more useful.
argument_names: list of names of the arguments to the function
e.g. ["session", "fifo"]
args: the arguments that were passed to the function
'argument_names' and 'args' are used to make the exception message
more useful, and to find the arguments after catching an exception if
the function fails (e.g. 'e.get_args()["session"]').
"""
if status == 0:
return
if status in codes_to_exception_classes:
if status < 0:
raise codes_to_exception_classes[status](function_name, argument_names, *args)
else:
warning = codes_to_exception_classes[status](function_name, argument_names, *args)
warnings.warn(warning)
else:
if status < 0:
raise UnknownError(status, function_name, argument_names, *args)
else:
warnings.warn(UnknownWarning(status, function_name, argument_names, *args))
def check_status(function_name, argument_names):
"""
Decorator (that takes arguments) to call a function and raise
an appropriate subclass of Status if the
returned status is not zero.
Also validates that the number of parameters passed to the
function is correct.
function_name: the name of the function, e.g. "NiFpga_ConfigureFifo"
Used to make the exception message more useful.
argument_names: list of names of the arguments to the function
e.g. ["session", "fifo"]
Used to make the exception message more useful, and to find the
arguments after catching an exception if the function fails
(e.g. 'e.get_args()["session"]').
"""
def decorator(function):
@functools.wraps(function)
def internal(*args):
if hasattr(function, "argtypes") and len(args) != len(function.argtypes):
raise TypeError("%s takes exactly %u arguments (%u given)"
% (function_name, len(function.argtypes), len(args)))
status = function(*args)
_raise_or_warn_if_nonzero_status(status, function_name, argument_names, args)
return internal
return decorator
class Status(BaseException):
def __init__(self, code, code_string, function_name, argument_names,
function_args):
""" Base exception class for when an NiFpga function returns a non-zero
status.
Args:
code (int): e.g. -52000
code_string (str) : e.g. 'MemoryFull'
function_name (string): the function that returned the error or
warning status. e.g. 'NiFpga_ConfigureFifo'
argument_names (list): a list of the names of the arguments to the
function. e.g. ["session", "fifo", "requested depth"]
function_args (tuple) : a tuple of the arguments passed to the
function. The order of argument_names should correspond to the
order of function_args. e.g. '(session, fifo, depth)'
"""
self._code = code
self._code_string = code_string
self._function_name = function_name
self._named_args = []
for i, arg in enumerate(function_args):
self._named_args.append(
{
"name": argument_names[i],
"value": arg
})
# this is also necessary to properly reconstruct the object when
# passing it between processes
super(Status, self).__init__(self._code,
self._code_string,
self._function_name,
self._named_args)
def get_code(self):
return self._code
def get_code_string(self):
return self._code_string
def get_function_name(self):
""" Returns a string for the functions name, """
return self._function_name
def get_args(self):
"""
Returns a dictionary of argument names to argument values of
the function that caused the exception to be raised.
Returns:
arg_dict (dictionary): Converts ctypes args to their actual values
instead of the ctypes instance. e.g.
.. code-block:: python
{
"session":0x10000L,
"fifo" : 0x0,
...}
"""
arg_dict = {}
for arg in self._named_args:
# ctypes types all have a member named 'value'.
value = arg["value"].value if hasattr(arg["value"], "value") else arg["value"]
arg_dict[arg["name"]] = value
return arg_dict
def _stringify_arg(self, arg):
"""
Converts a function argument to a readable string for debugging.
Stringify ctypes values, instead of the ctypes instance itself.
Adds single quotes around strings (so it's obvious they are strings).
Stringify numbers as hex to make it easier to decode
bit packed sessions, attributes, etc.
"""
# ctypes types all have a member named 'value'.
if hasattr(arg, "value"):
return self._stringify_arg(arg.value)
if isinstance(arg, str):
return "'%s'" % arg
try:
return hex(arg)
except TypeError:
return str(arg)
def __str__(self):
"""
Returns the function name, status code, and arguments used.
Example:
.. code-block:: python
Error: FifoTimeout (-50400) when calling 'Dummy Function Name' with
arguments:
session: 0xbeef
fifo: 0xf1f0L
data: 0xda7aL
number of elements: 0x100L
timeout ms: 0x200L
elements remaining: 0x300L
a bogus string argument: 'I am a string'
"""
arg_string = ""
for arg in self._named_args:
arg_string += "\n\t%s: %s" % (arg["name"], self._stringify_arg(arg["value"]))
return "%s: %s (%d) when calling '%s' with arguments:%s" \
% ("Error" if self._code < 0 else "Warning",
self._code_string,
self._code,
self._function_name,
arg_string)
class WarningStatus(Status, RuntimeWarning):
"""
Base warning class for when an NiFpga function returns a warning (> 0)
status.
Useful if trying to catch warning and error status exceptions separately
"""
def __init__(self, code, code_string, function_name, argument_names,
function_args):
super(WarningStatus, self).__init__(code, code_string, function_name,
argument_names, function_args)
class ErrorStatus(Status, RuntimeError):
"""
Base Error class for when an NiFpga function returns an error (< 0)
status.
Useful if trying to catch warning and error status exceptions separately
"""
def __init__(self, code, code_string, function_name, argument_names,
function_args):
super(ErrorStatus, self).__init__(code, code_string, function_name,
argument_names, function_args)
class UnknownWarning(WarningStatus):
def __init__(self, code, function_name, argument_names, function_args):
super(UnknownWarning, self).__init__(code=code,
code_string="Unknown code",
function_name=function_name,
argument_names=argument_names,
function_args=function_args)
class UnknownError(ErrorStatus):
def __init__(self, code, function_name, argument_names, function_args):
super(UnknownError, self).__init__(code=code,
code_string="Unknown code",
function_name=function_name,
argument_names=argument_names,
function_args=function_args)
# Define error codes and their names.
# Each code in this list will be codegened into two classes, e.g.:
# FifoTimeoutError (for code -50400)
# FifoTimeoutWarning (for code 50400)
error_codes = [
(-50400, "FifoTimeout"),
(-50405, "TransferAborted"),
(-52000, "MemoryFull"),
(-52003, "SoftwareFault"),
(-52005, "InvalidParameter"),
(-52006, "ResourceNotFound"),
(-52007, "OperationTimedOut"),
(-52008, "OSFault"),
(-52010, "ResourceNotInitialized"),
(-52012, "EndOfData"),
(-52013, "ObjectNameCollision"),
(-61003, "FpgaAlreadyRunning"),
(-61018, "DownloadError"),
(-61024, "DeviceTypeMismatch"),
(-61046, "CommunicationTimeout"),
(-61060, "IrqTimeout"),
(-61070, "CorruptBitfile"),
(-61072, "BadDepth"),
(-61073, "BadReadWriteCount"),
(-61083, "ClockLostLock"),
(-61141, "FpgaBusy"),
(-61200, "FpgaBusyFpgaInterfaceCApi"),
(-61201, "FpgaBusyScanInterface"),
(-61202, "FpgaBusyFpgaInterface"),
(-61203, "FpgaBusyInteractive"),
(-61204, "FpgaBusyEmulation"),
(-61211, "ResetCalledWithImplicitEnableRemoval"),
(-61212, "AbortCalledWithImplicitEnableRemoval"),
(-61213, "CloseAndResetCalledWithImplicitEnableRemoval"),
(-61214, "ImplicitEnableRemovalButNotYetRun"),
(-61215, "RunAfterStoppedCalledWithImplicitEnableRemoval"),
(-61216, "GatedClockHandshakingViolation"),
(-61217, "RegionsOutstandingForSession"),
(-61219, "ElementsNotPermissibleToBeAcquired"),
(-61252, "FpgaBusyConfiguration"),
(-61253, "CloseCalledWithResetNotSupported"),
(-61254, "RunAfterStoppedNotSupported"),
(-61499, "InternalError"),
(-63003, "TotalDmaFifoDepthExceeded"),
(-63033, "AccessDenied"),
(-63038, "HostVersionMismatch"),
(-63040, "RpcConnectionError"),
(-63041, "RpcServerError"),
(-63042, "NetworkFault"),
(-63043, "RpcSessionError"),
(-63044, "RpcServerMissing"),
(-63045, "FeatureNotSupportedOverRpc"),
(-63046, "UsingRemoteSessionForLocalTarget"),
(-63050, "TriggerReserved"),
(-63051, "TriggerNotReserved"),
(-63080, "BufferInvalidSize"),
(-63081, "BufferNotAllocated"),
(-63082, "FifoReserved"),
(-63083, "FifoElementsCurrentlyAcquired"),
(-63084, "MisalignedAccess"),
(-63085, "ControlOrIndicatorTooLarge"),
(-63086, "OperationNotSupportedWhileStarted"),
(-63087, "TypesDoNotMatch"),
(-63088, "OutOfFifoRegions"),
(-63101, "BitfileReadError"),
(-63106, "SignatureMismatch"),
(-63107, "IncompatibleBitfile"),
(-63150, "HardwareFault"),
(-63170, "PowerShutdown"),
(-63171, "ThermalShutdown"),
(-63180, "InvalidAliasName"),
(-63181, "AliasNotFound"),
(-63182, "InvalidDeviceAccess"),
(-63183, "InvalidPort"),
(-63184, "ChildDeviceNotInserted"),
(-63192, "InvalidResourceName"),
(-63193, "FeatureNotSupported"),
(-63194, "VersionMismatch"),
(-63195, "InvalidSession"),
(-63196, "InvalidAttribute"),
(-63198, "OutOfHandles"),
]
# create an exception class for each error code and add to dictionary
# ie FifoTimeoutWarning, FifoTimeoutError
codes_to_exception_classes = {}
_g = globals()
for code, code_string in error_codes:
# we need introduce a scope, otherwise code, and code_string
# will all reference the same value.
def add_classes(code, code_string):
classname = code_string + 'Error'
def __init__(self, function_name, argument_names, function_args):
ErrorStatus.__init__(self,
code=code,
code_string=code_string,
function_name=function_name,
argument_names=argument_names,
function_args=function_args)
error_class = type(classname, (ErrorStatus,),
{'__init__': __init__, 'CODE': code})
codes_to_exception_classes[code] = error_class
# copy the exception type into module globals
_g[error_class.__name__] = error_class
classname = code_string + 'Warning'
def __init__(self, function_name, argument_names, function_args):
WarningStatus.__init__(self,
code=-code,
code_string=code_string,
function_name=function_name,
argument_names=argument_names,
function_args=function_args)
warning_class = type(classname, (WarningStatus,),
{'__init__': __init__, 'CODE': -code})
codes_to_exception_classes[-code] = warning_class
# copy the warning type into module globals
_g[warning_class.__name__] = warning_class
add_classes(code, code_string)
|
mit
| 8,977,066,302,177,029,000
| 36.474359
| 94
| 0.584536
| false
| 4.137882
| false
| false
| false
|
chienlieu2017/it_management
|
odoo/addons/l10n_multilang/models/account.py
|
1
|
1875
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
#in this file, we mostly add the tag translate=True on existing fields that we now want to be translated
class AccountAccountTag(models.Model):
_inherit = 'account.account.tag'
name = fields.Char(translate=True)
class AccountAccountTemplate(models.Model):
_inherit = 'account.account.template'
name = fields.Char(translate=True)
class AccountAccount(models.Model):
_inherit = 'account.account'
name = fields.Char(translate=True)
class AccountTax(models.Model):
_inherit = 'account.tax'
name = fields.Char(translate=True)
class AccountTaxTemplate(models.Model):
_inherit = 'account.tax.template'
name = fields.Char(translate=True)
class AccountChartTemplate(models.Model):
_inherit = 'account.chart.template'
_order = 'name'
name = fields.Char(translate=True)
spoken_languages = fields.Char(string='Spoken Languages', help="State here the languages for which the translations of templates could be loaded at the time of installation of this localization module and copied in the final object when generating them from templates. You must provide the language codes separated by ';'")
class AccountFiscalPosition(models.Model):
_inherit = 'account.fiscal.position'
name = fields.Char(translate=True)
note = fields.Text(translate=True)
class AccountFiscalPositionTemplate(models.Model):
_inherit = 'account.fiscal.position.template'
name = fields.Char(translate=True)
note = fields.Text(translate=True)
class AccountJournal(models.Model):
_inherit = 'account.journal'
name = fields.Char(translate=True)
class AccountAnalyticAccount(models.Model):
_inherit = 'account.analytic.account'
name = fields.Char(translate=True)
|
gpl-3.0
| 3,139,817,623,212,854,300
| 26.173913
| 327
| 0.7344
| false
| 3.914405
| false
| false
| false
|
google-research/google-research
|
goemotions/inspect_output_layer_weights.py
|
1
|
2106
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding=utf-8
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Save BERT output layer weights for inspection."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
import numpy as np
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_string("checkpoint_dir", None, "Model checkpoint directory.")
flags.DEFINE_string("tensor_names", "output_weights,new_output_weights",
"Comma separated list of tensor names to save.")
def save_tensor(reader, name):
tensor = reader.get_tensor(name)
np.save(os.path.join(FLAGS.checkpoint_dir, name + ".npy"), tensor)
def main(_):
checkpoint = tf.train.latest_checkpoint(FLAGS.checkpoint_dir)
reader = tf.train.NewCheckpointReader(checkpoint)
for name in FLAGS.tensor_names.split(","):
save_tensor(reader, name)
if __name__ == "__main__":
app.run(main)
|
apache-2.0
| -7,049,970,231,258,936,000
| 31.4
| 74
| 0.740266
| false
| 3.871324
| false
| false
| false
|
djhshih/genomic
|
utils/genompy/genompy/randomize.py
|
1
|
1657
|
#!/usr/bin/env python3
import random
from . import cn
from . import gp
def overlap_genes_in_regions(regions, geneDb, geneSets, overlaps, genes=None):
'''Track overlap of genes in regions with genes in each gene set, in place'''
# genes and overlaps will be modified in place
# store all genes from all regions
if genes is None:
genes = set()
for region in regions:
region_genes = geneDb.genes(region)
for gene in region_genes:
genes.add(gene)
overlap_genes_in_set(genes, geneSets, overlaps)
def overlap_genes_in_set(genes, geneSets, overlaps):
'''Track overlap of genes with each gene set, in place'''
# overlaps will be modified in place
# determine overlap of genes with gene sets
for name, gs in geneSets.sets.items():
gene_set = gs.genes
# count overlap
overlap = 0
for gene in gene_set:
if gene in genes:
overlap += 1
overlaps[name].append(overlap)
def randomized_genes(genes, universe):
'''Return set of re-sampled genes from universe'''
new_genes = set()
for i in range(len(genes)):
new_genes.add( random.choice(universe) )
return new_genes
def randomize_regions(regions, chrlens, rand_chrom=True):
'''Randomize regions in place'''
for region in regions:
randomize_region(region, chrlens, rand_chrom)
def randomize_region(region, chrlens, rand_chrom=True):
'''Randomize region in place'''
if rand_chrom:
# randomly choose a new chromosome
region.chromosome = random.choice([x for x in chrlens.keys()])
# randomize start position
size = region.size
max_end = chrlens[region.chromosome] - size + 1
region.start = random.randint(0, max_end)
region.end = region.start + size - 1
|
gpl-3.0
| -5,363,245,007,392,990,000
| 25.301587
| 78
| 0.715751
| false
| 3.16826
| false
| false
| false
|
arshbot/Slack-Analytics
|
scripts/help.py
|
1
|
1231
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# help script that details the list of commands
def help(response):
text = "PantherBot works by prefacing commands with \"!\"\n"
text += "Commands:\n"
text += "```!help\n"
text += "!coin\n"
text += "!helloworld\n"
text += "!version\n"
text += "!fortune\n"
text += "!flip <Optional:String>\n"
text += "!unflip <Optional:String>\n"
text += "!rage <Optional:String>\n"
text += "!catfact\n"
text += "!pugbomb <int>\n"
text += "!taskme\n"
text += "!poll <begin/start/end/results> [arguments followed by a `;`]"
text += "!talk <String>\n"
text += "\"Hey PantherBot\"```\n"
text += "Try saying `Hey PantherBot` or `!coin`"
motext = "Admins are able to use admin commands prefaced with \"$\"\n"
motext += "```$calendar add ; <Title> ; <Date in format YYYY-MM-DD> ; <Start time in format HH:mm> ; <End time in format HH:mm> ; <Description> ; <Location>\n" # noqa: 501
motext += "$admin <reconnect/update>\n"
motext += "$log <true/false> <channels>```\n"
motext += "Got suggestions for PantherBot? Fill out our typeform to leave your ideas! https://goo.gl/rEb0B7" # noqa: 501
return [text, motext]
|
mpl-2.0
| 8,409,772,346,828,189,000
| 40.033333
| 176
| 0.594639
| false
| 3.054591
| false
| false
| false
|
Ameriks/velo.lv
|
velo/team/views.py
|
1
|
20504
|
from django.contrib import messages
from django.http import Http404, HttpResponseRedirect
from django.utils import timezone
from django.views.generic import ListView, DetailView
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from difflib import get_close_matches
from django_tables2 import SingleTableView
from django.contrib.auth.mixins import LoginRequiredMixin
from extra_views import NamedFormsetsMixin, CreateWithInlinesView, UpdateWithInlinesView, InlineFormSet
import datetime
from velo.core.formsets import CustomBaseInlineFormSet
from velo.core.models import Competition
from velo.payment.utils import get_total
from velo.registration.models import Participant, Application
from velo.team.forms import MemberInlineForm, TeamForm
from velo.team.models import Team, Member, MemberApplication
from velo.team.tables import TeamTable, TeamMyTable
from velo.velo.mixins.forms import GetClassNameMixin
from velo.velo.mixins.views import SetCompetitionContextMixin, SingleTableViewWithRequest, RequestFormKwargsMixin, NeverCacheMixin
class TeamAppliedView(SetCompetitionContextMixin, ListView):
"""
This class is used to display teams that have applied to competition.
This is optimized view.
"""
model = Team
template_name = 'team/applied.html'
def get(self, *args, **kwargs):
self.set_competition(kwargs.get('pk'))
self.set_distances(only_w_teams=True) # Based on self.competition
self.set_distance(self.request.GET.get('distance', None))
return super(TeamAppliedView, self).get(*args, **kwargs)
def get_queryset(self):
queryset = super(TeamAppliedView, self).get_queryset()
queryset = queryset.filter(distance=self.distance, member__memberapplication__competition=self.competition, status__gte=0)
search = self.request.GET.get('search', None)
if search:
queryset = queryset.filter(title__icontains=search)
queryset = queryset.order_by('-is_featured', 'title',
'member__memberapplication__kind', 'member__memberapplication__participant__primary_number__number',)
queryset = queryset.values_list('id', 'title', 'is_featured',
'member__first_name', 'member__last_name', 'member__birthday',
'member__memberapplication__kind',
'member__memberapplication__participant__primary_number__number',
'member__memberapplication__participant_id',
)
return queryset
class TeamListView(SingleTableViewWithRequest):
model = Team
table_class = TeamTable
def get(self, *args, **kwargs):
self.set_competition(kwargs.get('pk'))
self.set_distances(only_w_teams=True) # Based on self.competition
self.set_distance(self.request.GET.get('distance', None))
return super(TeamListView, self).get(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ListView, self).get_context_data(**kwargs)
table = self.get_table(request=self.request, request_kwargs=self.kwargs)
context[self.get_context_table_name(table)] = table
context.update({'competition': self.competition})
context.update({'distances': self.distances})
context.update({'distance_active': self.distance})
context.update({'banners': self.get_banners()})
return context
def get_queryset(self):
queryset = super(TeamListView, self).get_queryset()
queryset = queryset.filter(distance=self.distance, distance__competition_id__in=self.competition.get_ids(), status__gte=0)
if self.request.GET.get("search", None):
queryset = queryset.filter(title__icontains=self.request.GET.get("search", None))
return queryset
class TeamView(DetailView):
model = Team
pk_url_kwarg = 'pk2'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({'competition': Competition.objects.get(id=self.kwargs.get('pk'))})
context.update({'members': self.object.member_set.filter(status=Member.STATUS_ACTIVE).order_by('last_name')})
for member in context["members"]:
if Participant.objects.filter(slug=member.slug, is_shown_public=False).count():
member.first_name = member.last_name = _("Anonymized")
setattr(member, "not_public", True)
return context
class TeamMemberProfileView(DetailView):
model = Member
pk_url_kwarg = 'pk3'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({'competition': Competition.objects.get(id=self.kwargs.get('pk'))})
context.update({'members': self.object.team.member_set.filter(status=Member.STATUS_ACTIVE).order_by('last_name')})
if Participant.objects.filter(slug=context["member"].slug, is_shown_public=False).count():
context["member"].first_name = context["member"].last_name = _("Anonymized")
setattr(context["member"], "not_public", True)
for member in context["members"]:
if Participant.objects.filter(slug=member.slug, is_shown_public=False).count():
member.first_name = member.last_name = _("Anonymized")
setattr(member, "not_public", True)
return context
class MyTeamList(NeverCacheMixin, LoginRequiredMixin, SingleTableView):
model = Team
table_class = TeamMyTable
template_name = 'team/team_list_my.html'
paginate_by = 100
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.filter(owner=self.request.user).select_related('distance', 'distance__competition', 'distance__competition__parent')
return queryset
class MemberInline(GetClassNameMixin, InlineFormSet):
can_order = False
model = Member
formset_class = CustomBaseInlineFormSet
form_class = MemberInlineForm
competition = None
fields = MemberInlineForm.Meta.fields
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.object:
self.competition = self.object.distance.competition
@property
def can_delete(self):
delete_date_obj = datetime.date.today()
if self.competition and self.competition.params:
delete_date = self.competition.params_dict.get('team_member_delete_final', None)
if delete_date:
delete_date_obj = datetime.datetime.strptime(delete_date, '%Y-%m-%d').date()
if datetime.date.today() <= delete_date_obj:
print('CAN DELETE')
return True
else:
print('CANNOT DELETE')
return False
@property
def extra(self):
if self.object and self.object.member_set.count() > 0:
return 0
else:
return 1
def get_formset_kwargs(self):
kwargs = super(MemberInline, self).get_formset_kwargs()
kwargs.update({'empty_form_class': self.form_class})
kwargs.update({'required': 1})
kwargs.update({'can_add_new': True})
kwargs.update({'max_num': self.competition.params_dict.get('team_member_count', 1000) if self.competition else 1000})
# Quick fix for women teams - there can only be 2 members in women teams.
if self.object and self.object.is_w:
kwargs.update({'max_num': 2})
kwargs.update({'queryset': Member.objects.filter(status=Member.STATUS_ACTIVE) })
return kwargs
def get_extra_form_kwargs(self):
kwargs = super(MemberInline, self).get_extra_form_kwargs()
kwargs.update({'request': self.request})
kwargs.update({'request_kwargs': self.kwargs})
return kwargs
class TeamCreateView(NeverCacheMixin, LoginRequiredMixin, RequestFormKwargsMixin, NamedFormsetsMixin, CreateWithInlinesView):
template_name = 'team/team_form.html'
inlines = [MemberInline, ]
inlines_names = ['member']
model = Team
form_class = TeamForm
def get_success_url(self):
return reverse('account:team_list')
class TeamUpdateView(NeverCacheMixin, LoginRequiredMixin, RequestFormKwargsMixin, NamedFormsetsMixin, UpdateWithInlinesView):
template_name = 'team/team_form.html'
inlines = [MemberInline, ]
inlines_names = ['member']
model = Team
form_class = TeamForm
pk_url_kwarg = 'pk2'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
if not self.request.user.has_perm('team.change_member'):
competition = self.object.distance.competition
if competition.get_root().id == 1:
next_competition = self.object.distance.competition.children.filter(
competition_date__gt=timezone.now())[:1]
context.update({'next_competition': next_competition[0] if next_competition else None})
elif competition.competition_date and competition.competition_date > datetime.date.today():
context.update({'next_competition': competition})
return context
def get_success_url(self):
return reverse('account:team_list')
def get_queryset(self):
queryset = super(TeamUpdateView, self).get_queryset()
if not self.request.user.is_superuser:
queryset = queryset.filter(owner=self.request.user)
return queryset
def post(self, request, *args, **kwargs):
ret = super().post(request, *args, **kwargs)
if request.POST.get('submit_pay', None):
next_competition = None
competition = self.object.distance.competition
if competition.get_root().id == 1:
next_competition = self.object.distance.competition.children.filter(competition_date__gt=timezone.now())[:1]
elif competition.competition_date and competition.competition_date > datetime.date.today():
next_competition = [competition, ]
if next_competition:
next_competition = next_competition[0]
application = Application.objects.create(competition=next_competition, email=request.user.email)
for member in self.object.member_set.filter(status=Member.STATUS_ACTIVE):
price = None
total = get_total(next_competition, self.object.distance_id, member.birthday.year)
if total:
price = total.get('price_obj', None)
application.participant_set.create(first_name=member.first_name,
last_name=member.last_name,
country=member.country,
birthday=member.birthday,
ssn=member.ssn,
gender=member.gender,
competition=next_competition,
distance=self.object.distance,
team_name=self.object.title,
price=price
)
return HttpResponseRedirect(reverse('application', kwargs={'slug': application.code}))
return ret
class TeamApplyList(NeverCacheMixin, LoginRequiredMixin, RequestFormKwargsMixin, NamedFormsetsMixin, DetailView):
model = Team
template_name = 'team/team_apply_list.html'
pk_url_kwarg = 'pk2'
def get_queryset(self):
queryset = super(TeamApplyList, self).get_queryset()
queryset = queryset.filter(owner=self.request.user)
return queryset
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
competition = self.object.distance.competition
child_competitions = competition.get_children()
if child_competitions:
competitions = child_competitions
else:
competitions = (competition, )
final_competitions = []
for competition in competitions:
members = MemberApplication.objects.filter(competition=competition, member__team=self.object).order_by('kind')
final_competitions.append((competition, members))
if competition.competition_date > datetime.date.today():
break
context.update({'competitions': final_competitions})
return context
def post(self, request, *args, **kwargs):
self.object = self.get_object()
context = self.get_context_data(object=self.object)
pay_members = request.POST.getlist('pay_member')
if pay_members:
member_ids = {}
for pay_member in pay_members:
competition_id, member_id = pay_member.split('__')
if not competition_id in member_ids:
member_ids.update({competition_id: []})
member_ids.get(competition_id).append(member_id)
key = list(member_ids.keys())[0]
competition = Competition.objects.get(id=key)
if request.POST.get('kind') == 'all_season' and competition.parent.complex_payment_enddate > timezone.now():
competition = competition.parent
application = Application.objects.create(competition=competition, email=request.user.email)
for member_id in member_ids.get(key):
member = self.object.member_set.get(id=member_id)
price = None
total = get_total(competition, self.object.distance_id, member.birthday.year)
if total:
price = total.get('price_obj', None)
application.participant_set.create(first_name=member.first_name,
last_name=member.last_name,
country=member.country,
birthday=member.birthday,
gender=member.gender,
ssn=member.ssn,
competition=competition,
distance=self.object.distance,
team_name=self.object.title,
price=price,
phone_number=member.phone_number,
)
return HttpResponseRedirect(reverse('application', kwargs={'slug': application.code}))
else:
return self.get(request, *args, **kwargs)
class TeamApply(NeverCacheMixin, LoginRequiredMixin, RequestFormKwargsMixin, NamedFormsetsMixin, DetailView):
model = Team
template_name = 'team/team_apply.html'
pk_url_kwarg = 'pk2'
def get_queryset(self):
queryset = super(TeamApply, self).get_queryset()
if not self.request.user.has_perm('registration.add_number'):
queryset = queryset.filter(owner=self.request.user)
return queryset
def get_context_data(self, **kwargs):
context = super(TeamApply, self).get_context_data(**kwargs)
competition = Competition.objects.get(id=self.kwargs.get('competition_pk'))
team_competition = self.object.distance.competition
child_competitions = team_competition.get_children()
if child_competitions:
competitions = child_competitions
else:
competitions = (team_competition, )
if competition not in competitions:
raise Http404
members = Member.objects.filter(team=self.object, status=Member.STATUS_ACTIVE).extra(select={
'kind': 'Select team_memberapplication.kind from team_memberapplication where team_memberapplication.member_id = team_member.id and team_memberapplication.competition_id=%s'
}, select_params=(competition.id, ))
context.update({'members': members, 'competition': competition, 'team_competition': team_competition})
return context
def match_applied_to_participant(self, application):
distance = application.member.team.distance
application.participant = None
application.participant_unpaid = None
application.participant_potential = None
participant = Participant.objects.filter(competition_id__in=application.competition.get_ids(), slug=application.member.slug, is_participating=True, distance=distance)
if participant:
application.participant = participant[0]
else:
participant = Participant.objects.filter(competition_id__in=application.competition.get_ids(), slug=application.member.slug, distance=distance)
if participant:
application.participant_unpaid = participant[0]
else:
slugs = [obj.slug for obj in Participant.objects.filter(competition_id__in=application.competition.get_ids(), distance=distance, is_participating=True)]
matches = get_close_matches(application.member.slug, slugs, 1, 0.5)
if matches:
participants = Participant.objects.filter(competition=application.competition, slug=matches[0], distance=distance).order_by('-id')
if participants:
application.participant_potential = participants[0]
application.save()
def post(self, request, *args, **kwargs):
self.object = self.get_object()
context = self.get_context_data(object=self.object)
team_competition = context.get('team_competition')
competition = context.get('competition')
riders = []
reserve = []
nothing = []
for member in context.get('members'):
data = int(request.POST.get('member_%i' % member.id))
if data == MemberApplication.KIND_PARTICIPANT:
riders.append(member.id)
elif data == MemberApplication.KIND_RESERVE:
reserve.append(member.id)
else:
nothing.append(member.id)
max_team_riders = team_competition.params_dict.get('max_team_riders', 1000)
max_team_reserve = team_competition.params_dict.get('max_team_reserve', 1000)
if len(riders) > max_team_riders:
messages.error(request, _('Too many team members marked as participants. MAX-%i') % max_team_riders)
elif len(reserve) > max_team_reserve:
messages.error(request, _('Too many team members marked as reserve. MAX-%i') % max_team_reserve)
else:
for rider in riders:
application, created = MemberApplication.objects.get_or_create(member_id=rider, competition=competition, defaults={'kind': MemberApplication.KIND_PARTICIPANT})
if not created:
application.kind = MemberApplication.KIND_PARTICIPANT
application.save()
self.match_applied_to_participant(application)
for rider in reserve:
application, created = MemberApplication.objects.get_or_create(member_id=rider, competition=competition, defaults={'kind': MemberApplication.KIND_RESERVE})
if not created:
application.kind = MemberApplication.KIND_RESERVE
application.save()
self.match_applied_to_participant(application)
MemberApplication.objects.filter(competition=competition).filter(member_id__in=nothing).delete()
messages.info(request, _('Successfuly saved.'))
if 'pk' in self.kwargs:
return HttpResponseRedirect(reverse('manager:team_apply_list', kwargs={'pk2': self.object.id, 'pk': self.kwargs.get('pk')}))
else:
return HttpResponseRedirect(reverse('account:team_apply_list', kwargs={'pk2': self.object.id}))
return self.render_to_response(context)
|
gpl-3.0
| -7,865,938,040,519,920,000
| 43.573913
| 185
| 0.615587
| false
| 4.191333
| false
| false
| false
|
scikit-optimize/scikit-optimize
|
skopt/tests/test_parallel_cl.py
|
1
|
5399
|
"""This script contains set of functions that test parallel optimization with
skopt, where constant liar parallelization strategy is used.
"""
from numpy.testing import assert_equal
from numpy.testing import assert_raises
from skopt.space import Real
from skopt import Optimizer
from skopt.benchmarks import branin
import skopt.learning as sol
from scipy.spatial.distance import pdist
import pytest
# list of all strategies for parallelization
supported_strategies = ["cl_min", "cl_mean", "cl_max"]
# test one acq function that incorporates the runtime, and one that does not
supported_acq_functions = ["EI", "EIps"]
# Extract available surrogates, so that new ones are used automatically
available_surrogates = [
getattr(sol, name) for name in sol.__all__
if "GradientBoostingQuantileRegressor" not in name
] # excluding the GradientBoostingQuantileRegressor, will open issue later
n_steps = 5 # number of steps to test the algorithms with
n_points = 4 # number of points to evaluate at a single step
# n_steps x n_points > n_random_restarts should hold
@pytest.mark.parametrize("strategy", supported_strategies)
@pytest.mark.parametrize("surrogate", available_surrogates)
@pytest.mark.parametrize("acq_func", supported_acq_functions)
def test_constant_liar_runs(strategy, surrogate, acq_func):
"""
Tests whether the optimizer runs properly during the random
initialization phase and beyond
Parameters
----------
* `strategy` [string]:
Name of the strategy to use during optimization.
* `surrogate` [scikit-optimize surrogate class]:
A class of the scikit-optimize surrogate used in Optimizer.
"""
optimizer = Optimizer(
base_estimator=surrogate(),
dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
acq_func=acq_func,
acq_optimizer='sampling',
random_state=0
)
# test arguments check
assert_raises(ValueError, optimizer.ask, {"strategy": "cl_maen"})
assert_raises(ValueError, optimizer.ask, {"n_points": "0"})
assert_raises(ValueError, optimizer.ask, {"n_points": 0})
for i in range(n_steps):
x = optimizer.ask(n_points=n_points, strategy=strategy)
# check if actually n_points was generated
assert_equal(len(x), n_points)
if "ps" in acq_func:
optimizer.tell(x, [[branin(v), 1.1] for v in x])
else:
optimizer.tell(x, [branin(v) for v in x])
@pytest.mark.parametrize("strategy", supported_strategies)
@pytest.mark.parametrize("surrogate", available_surrogates)
def test_all_points_different(strategy, surrogate):
"""
Tests whether the parallel optimizer always generates
different points to evaluate.
Parameters
----------
* `strategy` [string]:
Name of the strategy to use during optimization.
* `surrogate` [scikit-optimize surrogate class]:
A class of the scikit-optimize surrogate used in Optimizer.
"""
optimizer = Optimizer(
base_estimator=surrogate(),
dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
acq_optimizer='sampling',
random_state=1
)
tolerance = 1e-3 # distance above which points are assumed same
for i in range(n_steps):
x = optimizer.ask(n_points, strategy)
optimizer.tell(x, [branin(v) for v in x])
distances = pdist(x)
assert all(distances > tolerance)
@pytest.mark.parametrize("strategy", supported_strategies)
@pytest.mark.parametrize("surrogate", available_surrogates)
def test_same_set_of_points_ask(strategy, surrogate):
"""
For n_points not None, tests whether two consecutive calls to ask
return the same sets of points.
Parameters
----------
* `strategy` [string]:
Name of the strategy to use during optimization.
* `surrogate` [scikit-optimize surrogate class]:
A class of the scikit-optimize surrogate used in Optimizer.
"""
optimizer = Optimizer(
base_estimator=surrogate(),
dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
acq_optimizer='sampling',
random_state=2
)
for i in range(n_steps):
xa = optimizer.ask(n_points, strategy)
xb = optimizer.ask(n_points, strategy)
optimizer.tell(xa, [branin(v) for v in xa])
assert_equal(xa, xb) # check if the sets of points generated are equal
@pytest.mark.parametrize("strategy", supported_strategies)
@pytest.mark.parametrize("surrogate", available_surrogates)
def test_reproducible_runs(strategy, surrogate):
# two runs of the optimizer should yield exactly the same results
optimizer = Optimizer(
base_estimator=surrogate(random_state=1),
dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
acq_optimizer='sampling',
random_state=1
)
points = []
for i in range(n_steps):
x = optimizer.ask(n_points, strategy)
points.append(x)
optimizer.tell(x, [branin(v) for v in x])
# the x's should be exaclty as they are in `points`
optimizer = Optimizer(
base_estimator=surrogate(random_state=1),
dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
acq_optimizer='sampling',
random_state=1
)
for i in range(n_steps):
x = optimizer.ask(n_points, strategy)
assert points[i] == x
optimizer.tell(x, [branin(v) for v in x])
|
bsd-3-clause
| -7,413,750,497,891,699,000
| 31.920732
| 79
| 0.665308
| false
| 3.687842
| true
| false
| false
|
cmallwitz/Sunflower
|
application/plugins/file_list/trash_list.py
|
1
|
1931
|
from gi.repository import Gtk
from file_list import FileList
from gio_provider import TrashProvider
from operation import DeleteOperation
class TrashList(FileList):
"""Trash file list plugin
Generic operations related to trash management are provided with this
class. By extending FileList standard features such as drag and drop are
supported.
"""
def __init__(self, parent, notebook, options):
FileList.__init__(self, parent, notebook, options)
def _create_buttons(self):
"""Create titlebar buttons."""
options = self._parent.options
# empty trash button
self._empty_button = Gtk.Button.new_from_icon_name('edittrash', Gtk.IconSize.MENU)
self._empty_button.set_focus_on_click(False)
self._empty_button.set_tooltip_text(_('Empty trash'))
self._empty_button.connect('clicked', self.empty_trash)
self._title_bar.add_control(self._empty_button)
def empty_trash(self, widget=None, data=None):
"""Empty trash can."""
# ask user to confirm
dialog = Gtk.MessageDialog(
self._parent,
Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.QUESTION,
Gtk.ButtonsType.YES_NO,
_(
"All items in the Trash will be permanently deleted. "
"Are you sure?"
)
)
dialog.set_default_response(Gtk.ResponseType.YES)
result = dialog.run()
dialog.destroy()
# remove all items in trash
if result == Gtk.ResponseType.YES:
provider = self.get_provider()
# create delete operation
operation = DeleteOperation(
self._parent,
provider
)
operation.set_force_delete(True)
operation.set_selection(provider.list_dir(provider.get_root_path(None)))
# perform removal
operation.start()
def change_path(self, path=None, selected=None):
"""Change file list path."""
if path is not None and not path.startswith('trash://'):
path = 'trash:///'
FileList.change_path(self, path, selected)
|
gpl-3.0
| -785,780,227,369,612,700
| 26.985507
| 84
| 0.68928
| false
| 3.423759
| false
| false
| false
|
avelino/bottle-auth
|
bottle_auth/core/auth.py
|
1
|
54118
|
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementations of various third-party authentication schemes.
All the classes in this file are class Mixins designed to be used with
web.py RequestHandler classes. The primary methods for each service are
authenticate_redirect(), authorize_redirect(), and get_authenticated_user().
The former should be called to redirect the user to, e.g., the OpenID
authentication page on the third party service, and the latter should
be called upon return to get the user data from the data returned by
the third party service.
They all take slightly different arguments due to the fact all these
services implement authentication and authorization slightly differently.
See the individual service classes below for complete documentation.
Example usage for Google OpenID::
class GoogleHandler(tornado.web.RequestHandler, tornado.auth.GoogleMixin):
@tornado.web.asynchronous
def get(self):
if self.get_argument("openid.mode", None):
self.get_authenticated_user(self.async_callback(self._on_auth))
return
self.authenticate_redirect()
def _on_auth(self, user):
if not user:
raise tornado.web.HTTPError(500, "Google auth failed")
# Save the user with, e.g., set_secure_cookie()
"""
import base64
import binascii
import cgi
import hashlib
import hmac
import logging
import time
import urllib
import urlparse
import uuid
import pprint
from bottle_auth.core import httpclient
from bottle_auth.core import escape
from bottle_auth.core.escape import _unicode
from bottle_auth.core.httputil import url_concat, bytes_type, b
import webob
import functools
import re
log = logging.getLogger('bottleauth.auth')
class HTTPError(Exception):
def __init__(self, code, description):
self.code = code
self.description = description
class HTTPRedirect(Exception):
def __init__(self, url):
self.url = url
class WebobRequestWrapper(object):
def __init__(self, inst):
self.inst = inst
def full_url(self):
return self.inst.url
@property
def uri(self):
return self.inst.url
@property
def host(self):
return self.inst.host
@property
def params(self):
return self.inst.params
@property
def arguments(self):
return self.inst.GET.dict_of_lists()
class WebobResponseWrapper(object):
def __init__(self, inst):
self.inst = inst
def set_cookie(self, name, value):
self.inst.set_cookie(name, value)
def get_cookie(self, name, default=None):
return self.inst.cookies.get(name, default)
def delete_cookie(self, name):
self.inst.delete_cookie(name)
class GenericAuth(object):
"""Generic base class to emulate a tornado.Request
using the current WSGI environ.
"""
def __init__(self, request, settings=None, cookie_monster=None):
self.settings = settings or {}
if not isinstance(request, webob.Request):
request = webob.Request(request)
self.request = WebobRequestWrapper(request)
if isinstance(cookie_monster, webob.Response):
self.cookie_monster = WebobResponseWrapper(cookie_monster)
else:
self.cookie_monster = cookie_monster
def redirect(self, url):
raise HTTPRedirect(url)
def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined."""
if name not in self.settings:
raise Exception("You must define the '%s' setting in your "
"application to use %s" % (name, feature))
_ARG_DEFAULT = []
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we throw an HTTP 400 exception if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
"""
args = self.get_arguments(name, strip=strip)
if not args:
if default is self._ARG_DEFAULT:
raise HTTPError(400, "Missing argument %s" % name)
return default
return args[-1]
def get_arguments(self, name, strip=True):
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
"""
values = []
for v in self.request.params.getall(name):
v = self.decode_argument(v, name=name)
if isinstance(v, unicode):
# Get rid of any weird control chars (unless decoding gave
# us bytes, in which case leave it alone)
v = re.sub(r"[\x00-\x08\x0e-\x1f]", " ", v)
if strip:
v = v.strip()
values.append(v)
return values
def decode_argument(self, value, name=None):
"""Decodes an argument from the request.
The argument has been percent-decoded and is now a byte string.
By default, this method decodes the argument as utf-8 and returns
a unicode string, but this may be overridden in subclasses.
This method is used as a filter for both get_argument() and for
values extracted from the url and passed to get()/post()/etc.
The name of the argument is provided if known, but may be None
(e.g. for unnamed groups in the url regex).
"""
return _unicode(value)
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is unnecessary since Tornado 1.1.
"""
if callback is None:
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
#FIXME what about the exception wrapper?
return callback
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
assert self.cookie_monster, 'Cookie Monster not set'
return self.cookie_monster.get_cookie(name, default)
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
assert self.cookie_monster, 'Cookie Monster not set'
#, domain=domain, path=path)
self.cookie_monster.set_cookie(name, value)
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name."""
assert self.cookie_monster, 'Cookie Monster not set'
#, path=path, domain=domain)
self.cookie_monster.delete_cookie(name)
class OpenIdMixin(GenericAuth):
"""Abstract implementation of OpenID and Attribute Exchange.
See GoogleMixin below for example implementations.
"""
def authenticate_redirect(
self, callback_uri=None, ax_attrs=["name", "email", "language",
"username"]):
"""Returns the authentication URL for this service.
After authentication, the service will redirect back to the given
callback URI.
We request the given attributes for the authenticated user by
default (name, email, language, and username). If you don't need
all those attributes for your app, you can request fewer with
the ax_attrs keyword argument.
"""
callback_uri = callback_uri or self.request.uri
args = self._openid_args(callback_uri, ax_attrs=ax_attrs)
self.redirect(self._OPENID_ENDPOINT + "?" + urllib.urlencode(args))
def get_authenticated_user(self, callback):
"""Fetches the authenticated user data upon redirect.
This method should be called by the handler that receives the
redirect from the authenticate_redirect() or authorize_redirect()
methods.
"""
# Verify the OpenID response via direct request to the OP
# Recommendation @hmarrao, ref #3
args = dict((k, unicode(v[-1]).encode('utf-8')) for k, v in self.request.arguments.iteritems())
args["openid.mode"] = u"check_authentication"
url = self._OPENID_ENDPOINT
http = httpclient.AsyncHTTPClient()
log.debug("OpenID requesting {0} at uri {1}".format(args, url))
http.fetch(url, self.async_callback(
self._on_authentication_verified, callback),
method="POST", body=urllib.urlencode(args))
def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None):
url = urlparse.urljoin(self.request.full_url(), callback_uri)
args = {
"openid.ns": "http://specs.openid.net/auth/2.0",
"openid.claimed_id": "http://specs.openid.net/auth/2.0/"
"identifier_select",
"openid.identity": "http://specs.openid.net/auth/2.0/"
"identifier_select",
"openid.return_to": url,
"openid.realm": urlparse.urljoin(url, '/'),
"openid.mode": "checkid_setup",
}
if ax_attrs:
args.update({
"openid.ns.ax": "http://openid.net/srv/ax/1.0",
"openid.ax.mode": "fetch_request",
})
ax_attrs = set(ax_attrs)
required = []
if "name" in ax_attrs:
ax_attrs -= set(["name", "firstname", "fullname", "lastname"])
required += ["firstname", "fullname", "lastname"]
args.update({
"openid.ax.type.firstname":
"http://axschema.org/namePerson/first",
"openid.ax.type.fullname":
"http://axschema.org/namePerson",
"openid.ax.type.lastname":
"http://axschema.org/namePerson/last",
})
known_attrs = {
"email": "http://axschema.org/contact/email",
"language": "http://axschema.org/pref/language",
"username": "http://axschema.org/namePerson/friendly",
}
for name in ax_attrs:
args["openid.ax.type." + name] = known_attrs[name]
required.append(name)
args["openid.ax.required"] = ",".join(required)
if oauth_scope:
args.update({
"openid.ns.oauth":
"http://specs.openid.net/extensions/oauth/1.0",
"openid.oauth.consumer": self.request.host.split(":")[0],
"openid.oauth.scope": oauth_scope,
})
return args
def _on_authentication_verified(self, callback, response):
log.debug('Verifying token {0}'.format(pprint.pformat({
'status_code': response.status_code,
'headers': response.headers,
'error': response.error,
'body': response.body,
})))
if response.error or b("is_valid:true") not in response.body:
log.warning("Invalid OpenID response: %s", response.error or
response.body)
callback(None)
return
# Make sure we got back at least an email from attribute exchange
ax_ns = None
for name in self.request.arguments.iterkeys():
if name.startswith("openid.ns.") and \
self.get_argument(name) == u"http://openid.net/srv/ax/1.0":
ax_ns = name[10:]
break
def get_ax_arg(uri):
log.debug('Getting {0}'.format(uri))
if not ax_ns: return u""
prefix = "openid." + ax_ns + ".type."
ax_name = None
for name in self.request.arguments.iterkeys():
if self.get_argument(name) == uri and name.startswith(prefix):
part = name[len(prefix):]
ax_name = "openid." + ax_ns + ".value." + part
break
if not ax_name: return u""
return self.get_argument(ax_name, u"")
email = get_ax_arg("http://axschema.org/contact/email")
name = get_ax_arg("http://axschema.org/namePerson")
first_name = get_ax_arg("http://axschema.org/namePerson/first")
last_name = get_ax_arg("http://axschema.org/namePerson/last")
username = get_ax_arg("http://axschema.org/namePerson/friendly")
locale = get_ax_arg("http://axschema.org/pref/language").lower()
user = dict()
name_parts = []
if first_name:
user["first_name"] = first_name
name_parts.append(first_name)
if last_name:
user["last_name"] = last_name
name_parts.append(last_name)
if name:
user["name"] = name
elif name_parts:
user["name"] = u" ".join(name_parts)
elif email:
user["name"] = email.split("@")[0]
if email: user["email"] = email
if locale: user["locale"] = locale
if username: user["username"] = username
user['claimed_id'] = self.request.arguments.get('openid.claimed_id')[-1]
log.debug('Final step, got claimed_id {0}'.format(user['claimed_id']))
callback(user)
class OAuthMixin(GenericAuth):
"""Abstract implementation of OAuth.
See TwitterMixin and FriendFeedMixin below for example implementations.
"""
def authorize_redirect(self, callback_uri=None, extra_params=None):
"""Redirects the user to obtain OAuth authorization for this service.
Twitter and FriendFeed both require that you register a Callback
URL with your application. You should call this method to log the
user in, and then call get_authenticated_user() in the handler
you registered as your Callback URL to complete the authorization
process.
This method sets a cookie called _oauth_request_token which is
subsequently used (and cleared) in get_authenticated_user for
security purposes.
"""
if callback_uri and getattr(self, "_OAUTH_NO_CALLBACKS", False):
raise Exception("This service does not support oauth_callback")
http = httpclient.AsyncHTTPClient()
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
http.fetch(self._oauth_request_token_url(callback_uri=callback_uri,
extra_params=extra_params),
self.async_callback(
self._on_request_token,
self._OAUTH_AUTHORIZE_URL,
callback_uri))
else:
http.fetch(self._oauth_request_token_url(), self.async_callback(
self._on_request_token, self._OAUTH_AUTHORIZE_URL, callback_uri))
def get_authenticated_user(self, callback):
"""Gets the OAuth authorized user and access token on callback.
This method should be called from the handler for your registered
OAuth Callback URL to complete the registration process. We call
callback with the authenticated user, which in addition to standard
attributes like 'name' includes the 'access_key' attribute, which
contains the OAuth access you can use to make authorized requests
to this service on behalf of the user.
"""
request_key = self.get_argument("oauth_token")
oauth_verifier = self.get_argument("oauth_verifier", None)
request_cookie = self.get_cookie("_oauth_request_token")
if not request_cookie:
log.warning("Missing OAuth request token cookie")
callback(None)
return
self.clear_cookie("_oauth_request_token")
cookie_key, cookie_secret = [base64.b64decode(i) for i in request_cookie.split("|")]
if cookie_key != request_key:
log.warning("Request token does not match cookie")
callback(None)
return
token = dict(key=cookie_key, secret=cookie_secret)
if oauth_verifier:
token["verifier"] = oauth_verifier
http = httpclient.AsyncHTTPClient()
http.fetch(self._oauth_access_token_url(token), self.async_callback(
self._on_access_token, callback))
def _oauth_request_token_url(self, callback_uri= None, extra_params=None):
consumer_token = self._oauth_consumer_token()
url = self._OAUTH_REQUEST_TOKEN_URL
args = dict(
oauth_consumer_key=consumer_token["key"],
oauth_signature_method="HMAC-SHA1",
oauth_timestamp=str(int(time.time())),
oauth_nonce=binascii.b2a_hex(uuid.uuid4().bytes),
oauth_version=getattr(self, "_OAUTH_VERSION", "1.0a"),
)
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
if callback_uri:
args["oauth_callback"] = urlparse.urljoin(
self.request.full_url(), callback_uri)
if extra_params: args.update(extra_params)
signature = _oauth10a_signature(consumer_token, "GET", url, args)
else:
signature = _oauth_signature(consumer_token, "GET", url, args)
args["oauth_signature"] = signature
return url + "?" + urllib.urlencode(args)
def _on_request_token(self, authorize_url, callback_uri, response):
if response.error:
raise Exception("Could not get request token")
request_token = _oauth_parse_response(response.body)
data = "|".join([base64.b64encode(request_token["key"]),
base64.b64encode(request_token["secret"])])
self.set_cookie("_oauth_request_token", data)
args = dict(oauth_token=request_token["key"])
if callback_uri:
args["oauth_callback"] = urlparse.urljoin(
self.request.full_url(), callback_uri)
self.redirect(authorize_url + "?" + urllib.urlencode(args))
def _oauth_access_token_url(self, request_token):
consumer_token = self._oauth_consumer_token()
url = self._OAUTH_ACCESS_TOKEN_URL
args = dict(
oauth_consumer_key=consumer_token["key"],
oauth_token=request_token["key"],
oauth_signature_method="HMAC-SHA1",
oauth_timestamp=str(int(time.time())),
oauth_nonce=binascii.b2a_hex(uuid.uuid4().bytes),
oauth_version=getattr(self, "_OAUTH_VERSION", "1.0a"),
)
if "verifier" in request_token:
args["oauth_verifier"]=request_token["verifier"]
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
signature = _oauth10a_signature(consumer_token, "GET", url, args,
request_token)
else:
signature = _oauth_signature(consumer_token, "GET", url, args,
request_token)
args["oauth_signature"] = signature
return url + "?" + urllib.urlencode(args)
def _on_access_token(self, callback, response):
if response.error:
log.warning("Could not fetch access token")
callback(None)
return
access_token = _oauth_parse_response(response.body)
user = self._oauth_get_user(access_token, self.async_callback(
self._on_oauth_get_user, access_token, callback))
def _oauth_get_user(self, access_token, callback):
raise NotImplementedError()
def _on_oauth_get_user(self, access_token, callback, user):
if not user:
callback(None)
return
user["access_token"] = access_token
callback(user)
def _oauth_request_parameters(self, url, access_token, parameters={},
method="GET"):
"""Returns the OAuth parameters as a dict for the given request.
parameters should include all POST arguments and query string arguments
that will be sent with the request.
"""
consumer_token = self._oauth_consumer_token()
base_args = dict(
oauth_consumer_key=consumer_token["key"],
oauth_token=access_token["key"],
oauth_signature_method="HMAC-SHA1",
oauth_timestamp=str(int(time.time())),
oauth_nonce=binascii.b2a_hex(uuid.uuid4().bytes),
oauth_version=getattr(self, "_OAUTH_VERSION", "1.0a"),
)
args = {}
args.update(base_args)
args.update(parameters)
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
signature = _oauth10a_signature(consumer_token, method, url, args,
access_token)
else:
signature = _oauth_signature(consumer_token, method, url, args,
access_token)
base_args["oauth_signature"] = signature
return base_args
class OAuth2Mixin(GenericAuth):
"""Abstract implementation of OAuth v 2."""
def authorize_redirect(self, redirect_uri=None, client_id=None,
client_secret=None, extra_params=None ):
"""Redirects the user to obtain OAuth authorization for this service.
Some providers require that you register a Callback
URL with your application. You should call this method to log the
user in, and then call get_authenticated_user() in the handler
you registered as your Callback URL to complete the authorization
process.
"""
args = {
"redirect_uri": redirect_uri,
"client_id": client_id
}
if extra_params: args.update(extra_params)
self.redirect(
url_concat(self._OAUTH_AUTHORIZE_URL, args))
def _oauth_request_token_url(self, redirect_uri= None, client_id = None,
client_secret=None, code=None,
extra_params=None):
url = self._OAUTH_ACCESS_TOKEN_URL
args = dict(
redirect_uri=redirect_uri,
code=code,
client_id=client_id,
client_secret=client_secret,
)
if extra_params: args.update(extra_params)
return url_concat(url, args)
class TwitterMixin(OAuthMixin):
"""Twitter OAuth authentication.
To authenticate with Twitter, register your application with
Twitter at http://twitter.com/apps. Then copy your Consumer Key and
Consumer Secret to the application settings 'twitter_consumer_key' and
'twitter_consumer_secret'. Use this Mixin on the handler for the URL
you registered as your application's Callback URL.
When your application is set up, you can use this Mixin like this
to authenticate the user with Twitter and get access to their stream::
class TwitterHandler(tornado.web.RequestHandler,
tornado.auth.TwitterMixin):
@tornado.web.asynchronous
def get(self):
if self.get_argument("oauth_token", None):
self.get_authenticated_user(self.async_callback(self._on_auth))
return
self.authorize_redirect()
def _on_auth(self, user):
if not user:
raise tornado.web.HTTPError(500, "Twitter auth failed")
# Save the user using, e.g., set_secure_cookie()
The user object returned by get_authenticated_user() includes the
attributes 'username', 'name', and all of the custom Twitter user
attributes describe at
http://apiwiki.twitter.com/Twitter-REST-API-Method%3A-users%C2%A0show
in addition to 'access_token'. You should save the access token with
the user; it is required to make requests on behalf of the user later
with twitter_request().
"""
_OAUTH_REQUEST_TOKEN_URL = "http://api.twitter.com/oauth/request_token"
_OAUTH_ACCESS_TOKEN_URL = "http://api.twitter.com/oauth/access_token"
_OAUTH_AUTHORIZE_URL = "http://api.twitter.com/oauth/authorize"
_OAUTH_AUTHENTICATE_URL = "http://api.twitter.com/oauth/authenticate"
_OAUTH_NO_CALLBACKS = False
def authenticate_redirect(self):
"""Just like authorize_redirect(), but auto-redirects if authorized.
This is generally the right interface to use if you are using
Twitter for single-sign on.
"""
http = httpclient.AsyncHTTPClient()
http.fetch(self._oauth_request_token_url(), self.async_callback(
self._on_request_token, self._OAUTH_AUTHENTICATE_URL, None))
def twitter_request(self, path, callback, access_token=None,
post_args=None, **args):
"""Fetches the given API path, e.g., "/statuses/user_timeline/btaylor"
The path should not include the format (we automatically append
".json" and parse the JSON output).
If the request is a POST, post_args should be provided. Query
string arguments should be given as keyword arguments.
All the Twitter methods are documented at
http://apiwiki.twitter.com/Twitter-API-Documentation.
Many methods require an OAuth access token which you can obtain
through authorize_redirect() and get_authenticated_user(). The
user returned through that process includes an 'access_token'
attribute that can be used to make authenticated requests via
this method. Example usage::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.TwitterMixin):
@tornado.web.authenticated
@tornado.web.asynchronous
def get(self):
self.twitter_request(
"/statuses/update",
post_args={"status": "Testing Tornado Web Server"},
access_token=user["access_token"],
callback=self.async_callback(self._on_post))
def _on_post(self, new_entry):
if not new_entry:
# Call failed; perhaps missing permission?
self.authorize_redirect()
return
self.finish("Posted a message!")
"""
# Add the OAuth resource request signature if we have credentials
url = "http://api.twitter.com/1" + path + ".json"
if access_token:
all_args = {}
all_args.update(args)
all_args.update(post_args or {})
consumer_token = self._oauth_consumer_token()
method = "POST" if post_args is not None else "GET"
oauth = self._oauth_request_parameters(
url, access_token, all_args, method=method)
args.update(oauth)
if args: url += "?" + urllib.urlencode(args)
callback = self.async_callback(self._on_twitter_request, callback)
http = httpclient.AsyncHTTPClient()
if post_args is not None:
http.fetch(url, method="POST", body=urllib.urlencode(post_args),
callback=callback)
else:
http.fetch(url, callback=callback)
def _on_twitter_request(self, callback, response):
if response.error:
log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
return
callback(escape.json_decode(response.body))
def _oauth_consumer_token(self):
self.require_setting("twitter_consumer_key", "Twitter OAuth")
self.require_setting("twitter_consumer_secret", "Twitter OAuth")
return dict(
key=self.settings["twitter_consumer_key"],
secret=self.settings["twitter_consumer_secret"])
def _oauth_get_user(self, access_token, callback):
callback = self.async_callback(self._parse_user_response, callback)
self.twitter_request(
"/users/show/" + access_token["screen_name"],
access_token=access_token, callback=callback)
def _parse_user_response(self, callback, user):
if user:
user["username"] = user["screen_name"]
callback(user)
class FriendFeedMixin(OAuthMixin):
"""FriendFeed OAuth authentication.
To authenticate with FriendFeed, register your application with
FriendFeed at http://friendfeed.com/api/applications. Then
copy your Consumer Key and Consumer Secret to the application settings
'friendfeed_consumer_key' and 'friendfeed_consumer_secret'. Use
this Mixin on the handler for the URL you registered as your
application's Callback URL.
When your application is set up, you can use this Mixin like this
to authenticate the user with FriendFeed and get access to their feed::
class FriendFeedHandler(tornado.web.RequestHandler,
tornado.auth.FriendFeedMixin):
@tornado.web.asynchronous
def get(self):
if self.get_argument("oauth_token", None):
self.get_authenticated_user(self.async_callback(self._on_auth))
return
self.authorize_redirect()
def _on_auth(self, user):
if not user:
raise tornado.web.HTTPError(500, "FriendFeed auth failed")
# Save the user using, e.g., set_secure_cookie()
The user object returned by get_authenticated_user() includes the
attributes 'username', 'name', and 'description' in addition to
'access_token'. You should save the access token with the user;
it is required to make requests on behalf of the user later with
friendfeed_request().
"""
_OAUTH_VERSION = "1.0"
_OAUTH_REQUEST_TOKEN_URL = "https://friendfeed.com/account/oauth/request_token"
_OAUTH_ACCESS_TOKEN_URL = "https://friendfeed.com/account/oauth/access_token"
_OAUTH_AUTHORIZE_URL = "https://friendfeed.com/account/oauth/authorize"
_OAUTH_NO_CALLBACKS = True
_OAUTH_VERSION = "1.0"
def friendfeed_request(self, path, callback, access_token=None,
post_args=None, **args):
"""Fetches the given relative API path, e.g., "/bret/friends"
If the request is a POST, post_args should be provided. Query
string arguments should be given as keyword arguments.
All the FriendFeed methods are documented at
http://friendfeed.com/api/documentation.
Many methods require an OAuth access token which you can obtain
through authorize_redirect() and get_authenticated_user(). The
user returned through that process includes an 'access_token'
attribute that can be used to make authenticated requests via
this method. Example usage::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FriendFeedMixin):
@tornado.web.authenticated
@tornado.web.asynchronous
def get(self):
self.friendfeed_request(
"/entry",
post_args={"body": "Testing Tornado Web Server"},
access_token=self.current_user["access_token"],
callback=self.async_callback(self._on_post))
def _on_post(self, new_entry):
if not new_entry:
# Call failed; perhaps missing permission?
self.authorize_redirect()
return
self.finish("Posted a message!")
"""
# Add the OAuth resource request signature if we have credentials
url = "http://friendfeed-api.com/v2" + path
if access_token:
all_args = {}
all_args.update(args)
all_args.update(post_args or {})
consumer_token = self._oauth_consumer_token()
method = "POST" if post_args is not None else "GET"
oauth = self._oauth_request_parameters(
url, access_token, all_args, method=method)
args.update(oauth)
if args: url += "?" + urllib.urlencode(args)
callback = self.async_callback(self._on_friendfeed_request, callback)
http = httpclient.AsyncHTTPClient()
if post_args is not None:
http.fetch(url, method="POST", body=urllib.urlencode(post_args),
callback=callback)
else:
http.fetch(url, callback=callback)
def _on_friendfeed_request(self, callback, response):
if response.error:
log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
return
callback(escape.json_decode(response.body))
def _oauth_consumer_token(self):
self.require_setting("friendfeed_consumer_key", "FriendFeed OAuth")
self.require_setting("friendfeed_consumer_secret", "FriendFeed OAuth")
return dict(
key=self.settings["friendfeed_consumer_key"],
secret=self.settings["friendfeed_consumer_secret"])
def _oauth_get_user(self, access_token, callback):
callback = self.async_callback(self._parse_user_response, callback)
self.friendfeed_request(
"/feedinfo/" + access_token["username"],
include="id,name,description", access_token=access_token,
callback=callback)
def _parse_user_response(self, callback, user):
if user:
user["username"] = user["id"]
callback(user)
class GoogleMixin(OpenIdMixin, OAuthMixin):
"""Google Open ID / OAuth authentication.
No application registration is necessary to use Google for authentication
or to access Google resources on behalf of a user. To authenticate with
Google, redirect with authenticate_redirect(). On return, parse the
response with get_authenticated_user(). We send a dict containing the
values for the user, including 'email', 'name', and 'locale'.
Example usage::
class GoogleHandler(tornado.web.RequestHandler, tornado.auth.GoogleMixin):
@tornado.web.asynchronous
def get(self):
if self.get_argument("openid.mode", None):
self.get_authenticated_user(self.async_callback(self._on_auth))
return
self.authenticate_redirect()
def _on_auth(self, user):
if not user:
raise tornado.web.HTTPError(500, "Google auth failed")
# Save the user with, e.g., set_secure_cookie()
"""
_OPENID_ENDPOINT = "https://www.google.com/accounts/o8/ud"
_OAUTH_ACCESS_TOKEN_URL = "https://www.google.com/accounts/OAuthGetAccessToken"
def authorize_redirect(self, oauth_scope, callback_uri=None,
ax_attrs=["name","email","language","username"]):
"""Authenticates and authorizes for the given Google resource.
Some of the available resources are:
* Gmail Contacts - http://www.google.com/m8/feeds/
* Calendar - http://www.google.com/calendar/feeds/
* Finance - http://finance.google.com/finance/feeds/
You can authorize multiple resources by separating the resource
URLs with a space.
"""
callback_uri = callback_uri or self.request.uri
args = self._openid_args(callback_uri, ax_attrs=ax_attrs,
oauth_scope=oauth_scope)
self.redirect(self._OPENID_ENDPOINT + "?" + urllib.urlencode(args))
def get_authenticated_user(self, callback):
"""Fetches the authenticated user data upon redirect."""
# Look to see if we are doing combined OpenID/OAuth
oauth_ns = ""
for name, values in self.request.arguments.iteritems():
if name.startswith("openid.ns.") and \
values[-1] == u"http://specs.openid.net/extensions/oauth/1.0":
oauth_ns = name[10:]
break
token = self.get_argument("openid." + oauth_ns + ".request_token", "")
if token:
http = httpclient.AsyncHTTPClient()
token = dict(key=token, secret="")
http.fetch(self._oauth_access_token_url(token),
self.async_callback(self._on_access_token, callback))
else:
OpenIdMixin.get_authenticated_user(self, callback)
def _oauth_consumer_token(self):
self.require_setting("google_consumer_key", "Google OAuth")
self.require_setting("google_consumer_secret", "Google OAuth")
return dict(
key=self.settings["google_consumer_key"],
secret=self.settings["google_consumer_secret"])
def _oauth_get_user(self, access_token, callback):
OpenIdMixin.get_authenticated_user(self, callback)
class FacebookMixin(GenericAuth):
"""Facebook Connect authentication.
New applications should consider using `FacebookGraphMixin` below instead
of this class.
To authenticate with Facebook, register your application with
Facebook at http://www.facebook.com/developers/apps.php. Then
copy your API Key and Application Secret to the application settings
'facebook_api_key' and 'facebook_secret'.
When your application is set up, you can use this Mixin like this
to authenticate the user with Facebook::
class FacebookHandler(tornado.web.RequestHandler,
tornado.auth.FacebookMixin):
@tornado.web.asynchronous
def get(self):
if self.get_argument("session", None):
self.get_authenticated_user(self.async_callback(self._on_auth))
return
self.authenticate_redirect()
def _on_auth(self, user):
if not user:
raise tornado.web.HTTPError(500, "Facebook auth failed")
# Save the user using, e.g., set_secure_cookie()
The user object returned by get_authenticated_user() includes the
attributes 'facebook_uid' and 'name' in addition to session attributes
like 'session_key'. You should save the session key with the user; it is
required to make requests on behalf of the user later with
facebook_request().
"""
def authenticate_redirect(self, callback_uri=None, cancel_uri=None,
extended_permissions=None):
"""Authenticates/installs this app for the current user."""
self.require_setting("facebook_api_key", "Facebook Connect")
callback_uri = callback_uri or self.request.uri
args = {
"api_key": self.settings["facebook_api_key"],
"v": "1.0",
"fbconnect": "true",
"display": "page",
"next": urlparse.urljoin(self.request.full_url(), callback_uri),
"return_session": "true",
}
if cancel_uri:
args["cancel_url"] = urlparse.urljoin(
self.request.full_url(), cancel_uri)
if extended_permissions:
if isinstance(extended_permissions, (unicode, bytes_type)):
extended_permissions = [extended_permissions]
args["req_perms"] = ",".join(extended_permissions)
self.redirect("http://www.facebook.com/login.php?" +
urllib.urlencode(args))
def authorize_redirect(self, extended_permissions, callback_uri=None,
cancel_uri=None):
"""Redirects to an authorization request for the given FB resource.
The available resource names are listed at
http://wiki.developers.facebook.com/index.php/Extended_permission.
The most common resource types include:
* publish_stream
* read_stream
* email
* sms
extended_permissions can be a single permission name or a list of
names. To get the session secret and session key, call
get_authenticated_user() just as you would with
authenticate_redirect().
"""
self.authenticate_redirect(callback_uri, cancel_uri,
extended_permissions)
def get_authenticated_user(self, callback):
"""Fetches the authenticated Facebook user.
The authenticated user includes the special Facebook attributes
'session_key' and 'facebook_uid' in addition to the standard
user attributes like 'name'.
"""
self.require_setting("facebook_api_key", "Facebook Connect")
session = escape.json_decode(self.get_argument("session"))
self.facebook_request(
method="facebook.users.getInfo",
callback=self.async_callback(
self._on_get_user_info, callback, session),
session_key=session["session_key"],
uids=session["uid"],
fields="uid,first_name,last_name,name,locale,pic_square," \
"profile_url,username")
def facebook_request(self, method, callback, **args):
"""Makes a Facebook API REST request.
We automatically include the Facebook API key and signature, but
it is the callers responsibility to include 'session_key' and any
other required arguments to the method.
The available Facebook methods are documented here:
http://wiki.developers.facebook.com/index.php/API
Here is an example for the stream.get() method::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FacebookMixin):
@tornado.web.authenticated
@tornado.web.asynchronous
def get(self):
self.facebook_request(
method="stream.get",
callback=self.async_callback(self._on_stream),
session_key=self.current_user["session_key"])
def _on_stream(self, stream):
if stream is None:
# Not authorized to read the stream yet?
self.redirect(self.authorize_redirect("read_stream"))
return
self.render("stream.html", stream=stream)
"""
self.require_setting("facebook_api_key", "Facebook Connect")
self.require_setting("facebook_secret", "Facebook Connect")
if not method.startswith("facebook."):
method = "facebook." + method
args["api_key"] = self.settings["facebook_api_key"]
args["v"] = "1.0"
args["method"] = method
args["call_id"] = str(long(time.time() * 1e6))
args["format"] = "json"
args["sig"] = self._signature(args)
url = "http://api.facebook.com/restserver.php?" + \
urllib.urlencode(args)
http = httpclient.AsyncHTTPClient()
http.fetch(url, callback=self.async_callback(
self._parse_response, callback))
def _on_get_user_info(self, callback, session, users):
if users is None:
callback(None)
return
callback({
"name": users[0]["name"],
"first_name": users[0]["first_name"],
"last_name": users[0]["last_name"],
"uid": users[0]["uid"],
"locale": users[0]["locale"],
"pic_square": users[0]["pic_square"],
"profile_url": users[0]["profile_url"],
"username": users[0].get("username"),
"session_key": session["session_key"],
"session_expires": session.get("expires"),
})
def _parse_response(self, callback, response):
if response.error:
log.warning("HTTP error from Facebook: %s", response.error)
callback(None)
return
try:
json = escape.json_decode(response.body)
except Exception:
log.warning("Invalid JSON from Facebook: %r", response.body)
callback(None)
return
if isinstance(json, dict) and json.get("error_code"):
log.warning("Facebook error: %d: %r", json["error_code"],
json.get("error_msg"))
callback(None)
return
callback(json)
def _signature(self, args):
parts = ["%s=%s" % (n, args[n]) for n in sorted(args.keys())]
body = "".join(parts) + self.settings["facebook_secret"]
if isinstance(body, unicode): body = body.encode("utf-8")
return hashlib.md5(body).hexdigest()
class FacebookGraphMixin(OAuth2Mixin):
"""Facebook authentication using the new Graph API and OAuth2."""
_OAUTH_ACCESS_TOKEN_URL = "https://graph.facebook.com/oauth/access_token?"
_OAUTH_AUTHORIZE_URL = "https://graph.facebook.com/oauth/authorize?"
_OAUTH_NO_CALLBACKS = False
def get_authenticated_user(self, redirect_uri, client_id, client_secret,
code, callback, fields=None):
"""Handles the login for the Facebook user, returning a user object.
Example usage::
class FacebookGraphLoginHandler(LoginHandler, tornado.auth.FacebookGraphMixin):
@tornado.web.asynchronous
def get(self):
if self.get_argument("code", False):
self.get_authenticated_user(
redirect_uri='/auth/facebookgraph/',
client_id=self.settings["facebook_api_key"],
client_secret=self.settings["facebook_secret"],
code=self.get_argument("code"),
callback=self.async_callback(
self._on_login))
return
self.authorize_redirect(redirect_uri='/auth/facebookgraph/',
client_id=self.settings["facebook_api_key"],
extra_params={"scope": "read_stream,offline_access"})
def _on_login(self, user):
log.error(user)
self.finish()
"""
http = httpclient.AsyncHTTPClient()
args = {
"redirect_uri": redirect_uri,
"code": code,
"client_id": client_id,
"client_secret": client_secret,
}
#fields = set(['id', 'name', 'first_name', 'last_name',
# 'locale', 'picture', 'link'])
#if extra_fields: fields.update(extra_fields)
if fields:
fields = fields.split(',')
http.fetch(self._oauth_request_token_url(**args),
self.async_callback(self._on_access_token, redirect_uri, client_id,
client_secret, callback, fields))
def _on_access_token(self, redirect_uri, client_id, client_secret,
callback, fields, response):
if response.error:
log.warning('Facebook auth error: %s' % str(response))
callback(None)
return
args = escape.parse_qs_bytes(escape.native_str(response.body))
session = {
"access_token": args["access_token"][-1],
"expires": args.get("expires")
}
if fields is not None:
self.facebook_request(
path="/me",
callback=self.async_callback(
self._on_get_user_info, callback, session, fields),
access_token=session["access_token"],
fields=",".join(fields)
)
else:
self.facebook_request(
path="/me",
callback=self.async_callback(
self._on_get_user_info, callback, session, fields),
access_token=session["access_token"],
)
def _on_get_user_info(self, callback, session, fields, user):
if user is None:
callback(None)
return
fieldmap = {}
if fields is None:
fieldmap.update(user)
else:
for field in fields:
fieldmap[field] = user.get(field)
fieldmap.update({"access_token": session["access_token"], "session_expires": session.get("expires")})
callback(fieldmap)
def facebook_request(self, path, callback, access_token=None,
post_args=None, **args):
"""Fetches the given relative API path, e.g., "/btaylor/picture"
If the request is a POST, post_args should be provided. Query
string arguments should be given as keyword arguments.
An introduction to the Facebook Graph API can be found at
http://developers.facebook.com/docs/api
Many methods require an OAuth access token which you can obtain
through authorize_redirect() and get_authenticated_user(). The
user returned through that process includes an 'access_token'
attribute that can be used to make authenticated requests via
this method. Example usage::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FacebookGraphMixin):
@tornado.web.authenticated
@tornado.web.asynchronous
def get(self):
self.facebook_request(
"/me/feed",
post_args={"message": "I am posting from my Tornado application!"},
access_token=self.current_user["access_token"],
callback=self.async_callback(self._on_post))
def _on_post(self, new_entry):
if not new_entry:
# Call failed; perhaps missing permission?
self.authorize_redirect()
return
self.finish("Posted a message!")
"""
url = "https://graph.facebook.com" + path
all_args = {}
if access_token:
all_args["access_token"] = access_token
all_args.update(args)
all_args.update(post_args or {})
if all_args: url += "?" + urllib.urlencode(all_args)
callback = self.async_callback(self._on_facebook_request, callback)
http = httpclient.AsyncHTTPClient()
if post_args is not None:
http.fetch(url, method="POST", body=urllib.urlencode(post_args),
callback=callback)
else:
http.fetch(url, callback=callback)
def _on_facebook_request(self, callback, response):
if response.error:
log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
return
callback(escape.json_decode(response.body))
def _oauth_signature(consumer_token, method, url, parameters={}, token=None):
"""Calculates the HMAC-SHA1 OAuth signature for the given request.
See http://oauth.net/core/1.0/#signing_process
"""
parts = urlparse.urlparse(url)
scheme, netloc, path = parts[:3]
normalized_url = scheme.lower() + "://" + netloc.lower() + path
base_elems = []
base_elems.append(method.upper())
base_elems.append(normalized_url)
base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v)))
for k, v in sorted(parameters.items())))
base_string = "&".join(_oauth_escape(e) for e in base_elems)
key_elems = [consumer_token["secret"]]
key_elems.append(token["secret"] if token else "")
key = "&".join(key_elems)
hash = hmac.new(key, base_string, hashlib.sha1)
return binascii.b2a_base64(hash.digest())[:-1]
def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None):
"""Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request.
See http://oauth.net/core/1.0a/#signing_process
"""
parts = urlparse.urlparse(url)
scheme, netloc, path = parts[:3]
normalized_url = scheme.lower() + "://" + netloc.lower() + path
base_elems = []
base_elems.append(method.upper())
base_elems.append(normalized_url)
base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v)))
for k, v in sorted(parameters.items())))
base_string = "&".join(_oauth_escape(e) for e in base_elems)
key_elems = [urllib.quote(consumer_token["secret"], safe='~')]
key_elems.append(urllib.quote(token["secret"], safe='~') if token else "")
key = "&".join(key_elems)
hash = hmac.new(key, base_string, hashlib.sha1)
return binascii.b2a_base64(hash.digest())[:-1]
def _oauth_escape(val):
if isinstance(val, unicode):
val = val.encode("utf-8")
return urllib.quote(val, safe="~")
def _oauth_parse_response(body):
p = cgi.parse_qs(body, keep_blank_values=False)
token = dict(key=p["oauth_token"][0], secret=p["oauth_token_secret"][0])
# Add the extra parameters the Provider included to the token
special = ("oauth_token", "oauth_token_secret")
token.update((k, p[k][0]) for k in p if k not in special)
return token
|
mit
| -3,886,845,082,925,986,300
| 39.751506
| 109
| 0.595717
| false
| 4.298149
| false
| false
| false
|
renebentes/JoomlaPack
|
lib/extensions/plugin.py
|
1
|
2214
|
# coding: utf-8
import sublime
import os
import re
st_version = int(sublime.version())
if st_version > 3000:
from JoomlaPack.lib import *
from JoomlaPack.lib.extensions.base import Base
from JoomlaPack.lib.inflector import *
else:
from lib import *
from lib.extensions.base import Base
from lib.inflector import *
class Plugin(Base):
'''
Implements the Joomla's Plugin extension.
'''
def __init__(self, content=None, inflector=English):
Base.__init__(self, inflector)
self.prefix = 'plg_'
self.template_path = 'plugin'
if content is not None:
self.group, self.name = self.inflector.humanize(content,
prefix='plg_') \
.split(' ')
self.fullname = self.inflector.underscore(
self.inflector.variablize(self.prefix +
self.group + ' ' + self.name))
else:
self.fullname = self.inflector.underscore(
Project().get_project_name())
self.group, self.name = self.inflector.humanize(self.fullname,
prefix='plg_') \
.split(' ')
def rename(self):
for root, dirs, files in os.walk(self.path):
for filename in files:
newname = re.sub('{{name}}', self.name,
re.sub('{{group}}', self.group,
re.sub('{{locale}}',
Helper().language(), filename)))
if newname != filename:
os.rename(os.path.join(root, filename),
os.path.join(root, newname))
for root, dirs, files in os.walk(self.path):
for folder in dirs:
newname = folder.replace(
'{{locale}}', Helper().language())
if newname != folder:
os.rename(os.path.join(root, folder),
os.path.join(root, newname))
def __str__(self):
return "JoomlaPack: Joomla Plugin"
|
mit
| -5,881,219,763,017,948,000
| 33.061538
| 79
| 0.484192
| false
| 4.555556
| false
| false
| false
|
HenriquePaulo/projeto
|
backend/test/editar_produto_tests/editar_produto_new_tests.py
|
1
|
1576
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from base import GAETestCase
from datetime import datetime, date
from decimal import Decimal
from editar_produto_app.editar_produto_model import Editar_produto
from routes.editar_produtos.new import index, save
from tekton.gae.middleware.redirect import RedirectResponse
class IndexTests(GAETestCase):
def test_success(self):
template_response = index()
self.assert_can_render(template_response)
class SaveTests(GAETestCase):
def test_success(self):
self.assertIsNone(Editar_produto.query().get())
redirect_response = save(titulo='titulo_string', preco='preco_string', descricao='descricao_string', imagem='imagem_string', nome='nome_string')
self.assertIsInstance(redirect_response, RedirectResponse)
saved_editar_produto = Editar_produto.query().get()
self.assertIsNotNone(saved_editar_produto)
self.assertEquals('titulo_string', saved_editar_produto.titulo)
self.assertEquals('preco_string', saved_editar_produto.preco)
self.assertEquals('descricao_string', saved_editar_produto.descricao)
self.assertEquals('imagem_string', saved_editar_produto.imagem)
self.assertEquals('nome_string', saved_editar_produto.nome)
def test_error(self):
template_response = save()
errors = template_response.context['errors']
self.assertSetEqual(set(['titulo', 'preco', 'descricao', 'imagem', 'nome']), set(errors.keys()))
self.assert_can_render(template_response)
|
mit
| 3,332,256,648,107,032,600
| 45.352941
| 152
| 0.719543
| false
| 3.589977
| true
| false
| false
|
freeman-lab/dask
|
dask/array/random.py
|
1
|
11940
|
from __future__ import absolute_import, division, print_function
import numpy as np
from itertools import product
from .core import normalize_chunks, Array, names
def doc_wraps(func):
""" Copy docstring from one function to another """
def _(func2):
func2.__doc__ = func.__doc__.replace('>>>', '>>').replace('...', '..')
return func2
return _
class RandomState(object):
"""
Mersenne Twister pseudo-random number generator
This object contains state to deterministicly generate pseudo-random
numbers from a variety of probabilitiy distributions. It is identical to
``np.random.RandomState`` except that all functions also take a ``chunks=``
keyword argument.
Examples
--------
>>> import dask.array as da
>>> state = da.random.RandomState(1234) # a seed
>>> x = state.normal(10, 0.1, size=3, chunks=(2,))
>>> x.compute()
array([ 9.95487579, 10.02999135, 10.08498441])
See Also:
np.random.RandomState
"""
def __init__(self, seed=None):
self._numpy_state = np.random.RandomState(seed)
def _wrap(self, func, *args, **kwargs):
size = kwargs.pop('size')
chunks = kwargs.pop('chunks')
if not isinstance(size, (tuple, list)):
size = (size,)
chunks = normalize_chunks(chunks, size)
name = next(names)
# Get dtype
kw = kwargs.copy()
kw['size'] = (0,)
dtype = func(np.random.RandomState(), *args, **kw).dtype
# Build graph
keys = product([name], *[range(len(bd)) for bd in chunks])
sizes = product(*chunks)
vals = ((_apply_random,
func.__name__,
self._numpy_state.randint(np.iinfo(np.int32).max),
size, args, kwargs)
for size in sizes)
dsk = dict(zip(keys, vals))
return Array(dsk, name, chunks, dtype=dtype)
@doc_wraps(np.random.RandomState.beta)
def beta(self, a, b, size=None, chunks=None):
return self._wrap(np.random.RandomState.beta, a, b,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.binomial)
def binomial(self, n, p, size=None, chunks=None):
return self._wrap(np.random.RandomState.binomial, n, p,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.chisquare)
def chisquare(self, df, size=None, chunks=None):
return self._wrap(np.random.RandomState.chisquare, df,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.choice)
def choice(self, a, size=None, replace=True, p=None, chunks=None):
return self._wrap(np.random.RandomState.choice, a,
size=size, replace=True, p=None, chunks=chunks)
# @doc_wraps(np.random.RandomState.dirichlet)
# def dirichlet(self, alpha, size=None, chunks=None):
@doc_wraps(np.random.RandomState.exponential)
def exponential(self, scale=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.exponential, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.f)
def f(self, dfnum, dfden, size=None, chunks=None):
return self._wrap(np.random.RandomState.f, dfnum, dfden,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.gamma)
def gamma(self, shape, scale=1.0, chunks=None):
return self._wrap(np.random.RandomState.gamma, scale,
size=shape, chunks=chunks)
@doc_wraps(np.random.RandomState.geometric)
def geometric(self, p, size=None, chunks=None):
return self._wrap(np.random.RandomState.geometric, p,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.gumbel)
def gumbel(self, loc=0.0, scale=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.gumbel, loc, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.hypergeometric)
def hypergeometric(self, ngood, nbad, nsample, size=None, chunks=None):
return self._wrap(np.random.RandomState.hypergeometric,
ngood, nbad, nsample,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.laplace)
def laplace(self, loc=0.0, scale=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.laplace, loc, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.logistic)
def logistic(self, loc=0.0, scale=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.logistic, loc, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.lognormal)
def lognormal(self, mean=0.0, sigma=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.lognormal, mean, sigma,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.logseries)
def logseries(self, p, size=None, chunks=None):
return self._wrap(np.random.RandomState.logseries, p,
size=size, chunks=chunks)
# multinomial
@doc_wraps(np.random.RandomState.negative_binomial)
def negative_binomial(self, n, p, size=None, chunks=None):
return self._wrap(np.random.RandomState.negative_binomial, n, p,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.noncentral_chisquare)
def noncentral_chisquare(self, df, nonc, size=None, chunks=None):
return self._wrap(np.random.RandomState.noncentral_chisquare, df, nonc,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.noncentral_f)
def noncentral_f(self, dfnum, dfden, nonc, size=None, chunks=None):
return self._wrap(np.random.RandomState.noncentral_f,
dfnum, dfden, nonc,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.normal)
def normal(self, loc=0.0, scale=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.normal, loc, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.pareto)
def pareto(self, a, size=None, chunks=None):
return self._wrap(np.random.RandomState.pareto, a,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.poisson)
def poisson(self, lam=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.poisson, lam,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.power)
def power(self, a, size=None, chunks=None):
return self._wrap(np.random.RandomState.power, a,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.randint)
def randint(self, low, high=None, size=None, chunks=None):
return self._wrap(np.random.RandomState.randint, low, high,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.random_integers)
def random_integers(self, low, high=None, size=None, chunks=None):
return self._wrap(np.random.RandomState.random_integers, low, high,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.random_sample)
def random_sample(self, size=None, chunks=None):
return self._wrap(np.random.RandomState.random_sample,
size=size, chunks=chunks)
random = random_sample
@doc_wraps(np.random.RandomState.rayleigh)
def rayleigh(self, scale=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.rayleigh, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.standard_cauchy)
def standard_cauchy(self, size=None, chunks=None):
return self._wrap(np.random.RandomState.standard_cauchy,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.standard_exponential)
def standard_exponential(self, size=None, chunks=None):
return self._wrap(np.random.RandomState.standard_exponential,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.standard_gamma)
def standard_gamma(self, shape, size=None, chunks=None):
return self._wrap(np.random.RandomState.standard_gamma, shape,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.standard_normal)
def standard_normal(self, size=None, chunks=None):
return self._wrap(np.random.RandomState.standard_normal,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.standard_t)
def standard_t(self, df, size=None, chunks=None):
return self._wrap(np.random.RandomState.standard_t, df,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.tomaxint)
def tomaxint(self, size=None, chunks=None):
return self._wrap(np.random.RandomState.tomaxint,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.triangular)
def triangular(self, left, mode, right, size=None, chunks=None):
return self._wrap(np.random.RandomState.triangular, left, mode, right,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.uniform)
def uniform(self, low=0.0, high=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.uniform, low, high,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.vonmises)
def vonmises(self, mu, kappa, size=None, chunks=None):
return self._wrap(np.random.RandomState.vonmises, mu, kappa,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.wald)
def wald(self, mean, scale, size=None, chunks=None):
return self._wrap(np.random.RandomState.wald, mean, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.weibull)
def weibull(self, a, size=None, chunks=None):
return self._wrap(np.random.RandomState.weibull, a,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.zipf)
def zipf(self, a, size=None, chunks=None):
return self._wrap(np.random.RandomState.zipf, a,
size=size, chunks=chunks)
def _apply_random(func, seed, size, args, kwargs):
""" Apply RandomState method with seed
>>> _apply_random('normal', 123, 3, (10, 1.0), {})
array([ 8.9143694 , 10.99734545, 10.2829785 ])
"""
state = np.random.RandomState(seed)
func = getattr(state, func)
return func(*args, size=size, **kwargs)
_state = RandomState()
beta = _state.beta
binomial = _state.binomial
chisquare = _state.chisquare
exponential = _state.exponential
f = _state.f
gamma = _state.gamma
geometric = _state.geometric
gumbel = _state.gumbel
hypergeometric = _state.hypergeometric
laplace = _state.laplace
logistic = _state.logistic
lognormal = _state.lognormal
logseries = _state.logseries
negative_binomial = _state.negative_binomial
noncentral_chisquare = _state.noncentral_chisquare
noncentral_f = _state.noncentral_f
normal = _state.normal
pareto = _state.pareto
poisson = _state.poisson
power = _state.power
rayleigh = _state.rayleigh
random_sample = _state.random_sample
random = random_sample
triangular = _state.triangular
uniform = _state.uniform
vonmises = _state.vonmises
wald = _state.wald
weibull = _state.weibull
zipf = _state.zipf
"""
Standard distributions
"""
standard_cauchy = _state.standard_cauchy
standard_exponential = _state.standard_exponential
standard_gamma = _state.standard_gamma
standard_normal = _state.standard_normal
standard_t = _state.standard_t
|
bsd-3-clause
| 7,957,553,145,970,565,000
| 37.146965
| 79
| 0.63258
| false
| 3.597469
| false
| false
| false
|
nlgcoin/guldencoin-official
|
test/functional/mempool_spend_coinbase.py
|
2
|
2317
|
#!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test spending coinbase transactions.
The coinbase transaction in block N can appear in block
N+100... so is valid in the mempool when the best block
height is N+99.
This test makes sure coinbase spends that will be mature
in the next block are accepted into the memory pool,
but less mature coinbase spends are NOT.
"""
from test_framework.test_framework import GuldenTestFramework
from test_framework.blocktools import create_raw_transaction
from test_framework.util import assert_equal, assert_raises_rpc_error
class MempoolSpendCoinbaseTest(GuldenTestFramework):
def set_test_params(self):
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
chain_height = self.nodes[0].getblockcount()
assert_equal(chain_height, 200)
node0_address = self.nodes[0].getnewaddress()
# Coinbase at height chain_height-100+1 ok in mempool, should
# get mined. Coinbase at height chain_height-100+2 is
# is too immature to spend.
b = [self.nodes[0].getblockhash(n) for n in range(101, 103)]
coinbase_txids = [self.nodes[0].getblock(h)['tx'][0] for h in b]
spends_raw = [create_raw_transaction(self.nodes[0], txid, node0_address, amount=49.99) for txid in coinbase_txids]
spend_101_id = self.nodes[0].sendrawtransaction(spends_raw[0])
# coinbase at height 102 should be too immature to spend
assert_raises_rpc_error(-26,"bad-txns-premature-spend-of-coinbase", self.nodes[0].sendrawtransaction, spends_raw[1])
# mempool should have just spend_101:
assert_equal(self.nodes[0].getrawmempool(), [ spend_101_id ])
# mine a block, spend_101 should get confirmed
self.nodes[0].generate(1)
assert_equal(set(self.nodes[0].getrawmempool()), set())
# ... and now height 102 can be spent:
spend_102_id = self.nodes[0].sendrawtransaction(spends_raw[1])
assert_equal(self.nodes[0].getrawmempool(), [ spend_102_id ])
if __name__ == '__main__':
MempoolSpendCoinbaseTest().main()
|
mit
| -8,024,148,556,039,803,000
| 40.375
| 124
| 0.694001
| false
| 3.484211
| true
| false
| false
|
Tomasuh/Tomasuh.github.io
|
files/cyclic/dbcommands.py
|
1
|
1472
|
import sqlite3
import os.path
import datetime
class the_db:
def __init__(self):
exists = os.path.exists('./cyclic.db')
self.conn = sqlite3.connect('cyclic.db')
self.c = self.conn.cursor()
if exists:
return
# If it's a new instance of the db we need to generate the layout
sql = '''CREATE TABLE posts (key text PRIMARY KEY,
title text,
user text,
date integer,
size integer,
syntax text,
expire integer,
scrape_url text,
full_url text)'''
self.c.execute(sql)
self.conn.commit()
def post_exists(self, key):
sql = '''SELECT COUNT(*) FROM posts WHERE key=?'''
self.c.execute(sql, (key,))
return self.c.fetchone()[0]==1
def add_post(self, data):
if self.post_exists(data["key"]):
print "Post exists %s" % data["key"]
return
sql = '''INSERT INTO posts(key, title, user,date, size, syntax, expire, scrape_url, full_url)
VALUES (?,?,?,?,?,?,?,?,?)'''
param = (data["key"], \
data["title"], \
data["user"], \
data["date"], \
data["size"], \
data["syntax"], \
data["expire"], \
data["scrape_url"], \
data["full_url"])
self.c.execute(sql, param)
self.conn.commit()
def fetch_posts(self):
sql = '''SELECT * FROM posts'''
self.c.execute(sql)
rows = self.c.fetchall()
n = 0
while n < len(rows):
tmp = list(rows[n])
tmp[3] = datetime.datetime.fromtimestamp(tmp[3])
tmp[6] = datetime.datetime.fromtimestamp(tmp[6])
rows[n] = tmp
n += 1
return rows
|
mit
| -3,592,289,148,472,909,300
| 18.626667
| 95
| 0.605978
| false
| 2.869396
| false
| false
| false
|
cloudmesh/book
|
cloud-clusters/bin/todo.py
|
1
|
1641
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import fnmatch
import glob
from pprint import pprint
def recursive_glob(rootdir='.', pattern='*.md'):
"""Search recursively for files matching a specified pattern.
Adapted from http://stackoverflow.com/questions/2186525/use-a-glob-to-find-files-recursively-in-python
"""
matches = []
for root, dirnames, filenames in os.walk(rootdir):
for filename in fnmatch.filter(filenames, pattern):
matches.append(os.path.join(root, filename))
return matches
files = recursive_glob(rootdir="chapters")
def extract_todos(filename):
if 'todo.md' in filename:
return
with open(filename, 'r') as f:
content = f.readlines()
count = 1
for line in content:
line = line.strip()
# path = filename.replace("chapters/","")
path = os.path.basename(filename) .replace(".md", "")
link = '[' + path + '](https://github.com/cloudmesh/book/edit/master/cloud-clusters/' + filename + ')' + '{style="font-size:50%"}'
if "todo" in line:
print ("|", count, "|", link, "|", line, "|")
if "TODO" in line:
line = line.replace("\TODO{","")
line = line.replace("}","")
line = line.replace("TODO:","")
line = line.replace("TODO","")
print("|", count, "|", link, "|", line, "|")
count = count + 1
#print("# TODO")
print()
print('<div class="smalltable">')
print("| Line | Path | Description |")
print("| - | ---- | -------------- |")
for file in files:
extract_todos(file)
print('</div>')
print()
|
apache-2.0
| -738,105,093,781,443,800
| 24.246154
| 138
| 0.582572
| false
| 3.567391
| false
| false
| false
|
raiden-network/raiden
|
tools/pip-compile-wrapper.py
|
1
|
13013
|
#!/usr/bin/env python
"""
Helper utility to compile / upgrade requirements files from templates.
This only manages dependencies between requirements sources.
The actual compiling is delegated to ``pip-compile`` from the ``pip-tools` package.
NOTE: This utility *must only* use stdlib imports in order to be runnable even
before the dev requirements are installed.
"""
import os
import re
import shlex
import subprocess
import sys
from argparse import ArgumentParser
from enum import Enum
from itertools import chain, groupby, repeat
from operator import itemgetter
from pathlib import Path
from shutil import which
from typing import Dict, Iterable, Iterator, List, Optional, Set, Tuple
# Regex taken from https://www.python.org/dev/peps/pep-0508/#names
# The trailing `$` is intentionally left out since we're dealing with complete requirement lines,
# not just bare package names here. Since regex matching is greedy by default this shouldn't cause
# any problems for valid package names.
REQUIREMENT_RE = re.compile(r"^([A-Z0-9][A-Z0-9._-]*[A-Z0-9])", re.IGNORECASE)
REQUIREMENTS_SOURCE_DEV = "requirements-dev.in"
SCRIPT_NAME = os.environ.get("_SCRIPT_NAME", sys.argv[0])
REQUIREMENTS_DIR = Path(__file__).parent.parent.joinpath("requirements").resolve()
SOURCE_PATHS: Dict[str, Path] = {
path.relative_to(REQUIREMENTS_DIR).stem: path.resolve()
for path in REQUIREMENTS_DIR.glob("*.in")
}
TARGET_PATHS: Dict[str, Path] = {
name: REQUIREMENTS_DIR.joinpath(name).with_suffix(".txt") for name in SOURCE_PATHS.keys()
}
SOURCE_DEPENDENCIES: Dict[str, Set[str]] = {}
class TargetType(Enum):
SOURCE = 1
TARGET = 2
ALL = 3
def _resolve_source_dependencies() -> None:
"""Determine direct dependencies between requirements files
Dependencies of the form ``-r <other-file>`` are recognized.
"""
for source_name, source_path in SOURCE_PATHS.items():
source_path = source_path.resolve()
SOURCE_DEPENDENCIES[source_name] = set()
target_dir: Path = source_path.parent
with source_path.open("rt") as target_file:
for line in target_file:
line = line.strip()
if line.startswith("-r"):
required = (
target_dir.joinpath(line.lstrip("-r").strip())
.resolve()
.relative_to(REQUIREMENTS_DIR)
.stem
)
SOURCE_DEPENDENCIES[source_name].add(required)
_resolve_source_dependencies()
def _run_pip_compile(
source_name: str,
upgrade_all: bool = False,
upgrade_packages: Optional[Set[str]] = None,
verbose: bool = False,
dry_run: bool = False,
pre: bool = False,
) -> None:
"""Run pip-compile with the given parameters
This automatically makes sure that packages listed in ``upgrade_packages`` are only passed
for requirement files that already contain this package either in the source or the target.
This is necessary since pip-compile will otherwise unconditionally add that package to the
output.
"""
assert_msg = "Only one of `upgrade_all` or `upgrade_packages` may be given."
assert not (upgrade_all and upgrade_packages), assert_msg
pip_compile_exe = which("pip-compile")
if not pip_compile_exe:
raise RuntimeError("pip-compile missing. This shouldn't happen.")
if not upgrade_packages:
upgrade_packages = set()
working_path = Path.cwd()
source_path = SOURCE_PATHS[source_name]
target_path = TARGET_PATHS[source_name]
upgrade_packages_cmd: List[str] = []
if upgrade_packages:
packages_in_target = {
package_name
for package_name, _ in _get_requirement_packages(source_name, TargetType.ALL)
}
upgrade_packages_cmd = list(
chain.from_iterable(
zip(repeat("--upgrade-package"), upgrade_packages.intersection(packages_in_target))
)
)
upgrade_all_cmd: List[str] = []
if upgrade_all:
upgrade_all_cmd = ["--upgrade"]
dry_run_cmd = ["--dry-run"] if dry_run else []
pre_cmd = ["--pre"] if pre else []
# We use a relative path for the source file because of
# https://github.com/raiden-network/raiden/pull/5987#discussion_r392145782 and
# https://github.com/jazzband/pip-tools/issues/1084
command = [
pip_compile_exe,
"--verbose" if verbose else "--quiet",
*dry_run_cmd,
*pre_cmd,
"--no-emit-index-url",
*upgrade_packages_cmd,
*upgrade_all_cmd,
"--output-file",
str(target_path),
str(source_path.relative_to(working_path)),
]
print(f"Compiling {source_path.name}...", end="", flush=True)
if verbose:
print(f"\nRunning command: {' '.join(shlex.quote(c) for c in command)}")
env = os.environ.copy()
env[
"CUSTOM_COMPILE_COMMAND"
] = "'requirements/deps compile' (for details see requirements/README)"
process = subprocess.run(
command, capture_output=(not verbose), cwd=str(source_path.parent), env=env
)
if process.returncode == 0:
print("\b\b Success.")
return
print("\b\b Error!")
if not verbose:
print(process.stdout.decode())
print(process.stderr.decode())
process.check_returncode()
def _resolve_deps(source_names: Iterable[str]) -> List[str]:
"""Partially order source_names based on their dependencies
Raises an Exception if not possible.
The resulting list has the following property: Each entry does not depend on a later entry.
"""
requirements = {
source: dependencies.intersection(source_names)
for source, dependencies in SOURCE_DEPENDENCIES.items()
if source in source_names
}
solution: List[str] = []
while requirements:
satisfied = {source for source, targets in requirements.items() if not targets}
if not satisfied:
raise RuntimeError(f"Missing dependencies or circular dependency in: {requirements}")
for source in satisfied:
del requirements[source]
for dependencies in requirements.values():
dependencies -= satisfied
solution.extend(satisfied)
return solution
def _get_requirement_packages(
source_name: str, where: TargetType = TargetType.SOURCE
) -> Iterator[Tuple[str, str]]:
if where is TargetType.SOURCE:
source_paths = [SOURCE_PATHS.get(source_name)]
elif where is TargetType.TARGET:
source_paths = [TARGET_PATHS.get(source_name)]
elif where is TargetType.ALL:
source_paths = [path.get(source_name) for path in [SOURCE_PATHS, TARGET_PATHS]]
else:
raise ValueError("Invalid 'where'")
filtered_source_paths = [source_path for source_path in source_paths if source_path]
if not filtered_source_paths or not all(path.exists() for path in filtered_source_paths):
yield from []
for source_path in filtered_source_paths:
with source_path.open("rt") as source_file:
for line in source_file:
line, *_ = line.strip().partition("#")
line = line.strip()
if not line or line.startswith("-"):
continue
match = REQUIREMENT_RE.search(line)
if match:
yield match.group(1), line
def _get_sources_for_packages(package_names: Set[str], where: TargetType) -> Dict[str, Set[str]]:
"""Return source and / or target files concerned by packages"""
package_to_source = [
(package_name, source_name)
for source_name in SOURCE_PATHS.keys()
for package_name, _ in _get_requirement_packages(source_name, where)
if package_name in package_names
]
return {
key: {source_name for _, source_name in group}
for key, group in groupby(sorted(package_to_source, key=itemgetter(0)), key=itemgetter(0))
}
def _get_requirement_package(source_name: str, target_package_name: str) -> Optional[str]:
for package_name, req_line in _get_requirement_packages(source_name):
if package_name == target_package_name:
return req_line
return None
def _ensure_pip_tools() -> None:
if not which("pip-compile"):
print("pip-tools not available.")
pip_tools_req = _get_requirement_package(
REQUIREMENTS_SOURCE_DEV.replace(".in", ""), "pip-tools"
)
if not pip_tools_req:
raise RuntimeError(f"Package 'pip-tools' not found in {REQUIREMENTS_SOURCE_DEV}")
print(f"Installing {pip_tools_req}...", end="", flush=True)
process = subprocess.run(
[sys.executable, "-m", "pip", "install", pip_tools_req],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
process.check_returncode()
print("\b\b Done.")
def compile_source(
upgrade_all: bool = False, verbose: bool = False, dry_run: bool = False
) -> None:
for source_name in _resolve_deps(SOURCE_PATHS.keys()):
_run_pip_compile(source_name, upgrade_all=upgrade_all, verbose=verbose, dry_run=dry_run)
def upgrade_source(
upgrade_package_names: Set[str],
verbose: bool = False,
dry_run: bool = False,
pre: bool = False,
) -> None:
packages_to_sources = _get_sources_for_packages(upgrade_package_names, TargetType.ALL)
_newline = "\n - "
missing_packages = upgrade_package_names - packages_to_sources.keys()
if missing_packages:
print(
"Some of the given packages were not found in either source or target files.\n"
"Please check that the packages are spelled correctly.\n"
"If any of these packages were newly added to any of the source files you need to "
f"run '{SCRIPT_NAME} compile' first.\n"
f"Missing package(s):\n - {_newline.join(missing_packages)}"
)
sys.exit(1)
grouped_packages_to_sources = [
(set(package_name for package_name, _ in group), key)
for key, group in groupby(
sorted(packages_to_sources.items(), key=itemgetter(1)), key=itemgetter(1)
)
]
for package_names, source_names in grouped_packages_to_sources:
print(f"Upgrading package(s):\n - {_newline.join(package_names)}")
for source_name in _resolve_deps(source_names):
_run_pip_compile(
source_name,
upgrade_packages=package_names,
verbose=verbose,
dry_run=dry_run,
pre=pre,
)
def main() -> None:
parser = ArgumentParser(prog=SCRIPT_NAME)
parser.add_argument("-v", "--verbose", action="store_true", default=False)
parser.add_argument("-n", "--dry-run", action="store_true", default=False)
commands = parser.add_subparsers(title="Sub-commands", required=True, dest="command")
commands.add_parser(
"compile",
help=(
"Compile source files. "
"Keep current versions unless changed requirements force newer versions."
),
)
upgrade_parser = commands.add_parser(
"upgrade",
help=(
"Compile source files and upgrade package versions. "
"Optionally specify package names to upgrade only those."
),
)
upgrade_parser.add_argument(
"--pre",
action="store_true",
default=False,
help="Use pre-release versions of packages if available.",
)
upgrade_parser.add_argument("packages", metavar="package", nargs="*")
parsed = parser.parse_args()
_ensure_pip_tools()
if parsed.command == "compile":
compile_source(verbose=parsed.verbose, dry_run=parsed.dry_run)
elif parsed.command == "upgrade":
packages = set(parsed.packages)
if not packages:
# This is a standalone script which is not using gevent
resp = input( # pylint: disable=gevent-input-forbidden
"Are you sure you want to upgrade ALL packages? [y/N] "
)
if resp.lower() != "y":
print("Aborting")
sys.exit(1)
compile_source(upgrade_all=True, verbose=parsed.verbose, dry_run=parsed.dry_run)
else:
if parsed.pre:
print(
"Warning: Using the '--pre' option can cause unintended upgrades to "
"prerelease versions of unrelated packages. This is due to constraints in the "
"underlying tools (pip-compile / pip) that don't currently allow constraining "
"pre-releases to only specific packages.\n"
"Please carefully inspect the generated output files!"
)
upgrade_source(
packages, verbose=parsed.verbose, dry_run=parsed.dry_run, pre=parsed.pre
)
if __name__ == "__main__":
main()
|
mit
| 8,479,760,281,170,723,000
| 35.656338
| 99
| 0.621763
| false
| 3.996622
| false
| false
| false
|
Netflix-Skunkworks/swag-client
|
swag_client/cli.py
|
1
|
11229
|
import logging
import os
import time
import simplejson as json
import boto3
import click
import click_log
from tabulate import tabulate
from swag_client.backend import SWAGManager
from swag_client.__about__ import __version__
from swag_client.migrations import run_migration
from swag_client.util import parse_swag_config_options
from swag_client.exceptions import InvalidSWAGDataException
log = logging.getLogger('swag_client')
click_log.basic_config(log)
class CommaList(click.ParamType):
name = 'commalist'
def convert(self, value, param, ctx):
return value.split(',')
def create_swag_from_ctx(ctx):
"""Creates SWAG client from the current context."""
swag_opts = {}
if ctx.type == 'file':
swag_opts = {
'swag.type': 'file',
'swag.data_dir': ctx.data_dir,
'swag.data_file': ctx.data_file
}
elif ctx.type == 's3':
swag_opts = {
'swag.type': 's3',
'swag.bucket_name': ctx.bucket_name,
'swag.data_file': ctx.data_file,
'swag.region': ctx.region
}
elif ctx.type == 'dynamodb':
swag_opts = {
'swag.type': 'dynamodb',
'swag.region': ctx.region
}
return SWAGManager(**parse_swag_config_options(swag_opts))
class AppContext(object):
def __init__(self):
self.namespace = None
self.region = None
self.type = None
self.data_dir = None
self.data_file = None
self.bucket_name = None
self.dry_run = None
pass_context = click.make_pass_decorator(AppContext, ensure=True)
@click.group()
@click.option('--namespace', default='accounts')
@click.option('--dry-run', type=bool, default=False, is_flag=True, help='Run command without persisting anything.')
@click_log.simple_verbosity_option(log)
@click.version_option(version=__version__)
@pass_context
def cli(ctx, namespace, dry_run):
if not ctx.namespace:
ctx.namespace = namespace
if not ctx.dry_run:
ctx.dry_run = dry_run
@cli.group()
@click.option('--region', default='us-east-1', help='Region the table is located in.')
@pass_context
def dynamodb(ctx, region):
if not ctx.region:
ctx.region = region
ctx.type = 'dynamodb'
@cli.group()
@click.option('--data-dir', help='Directory to store data.', default=os.getcwd())
@click.option('--data-file')
@pass_context
def file(ctx, data_dir, data_file):
"""Use the File SWAG Backend"""
if not ctx.file:
ctx.data_file = data_file
if not ctx.data_dir:
ctx.data_dir = data_dir
ctx.type = 'file'
@cli.group()
@click.option('--bucket-name', help='Name of the bucket you wish to operate on.')
@click.option('--data-file', help='Key name of the file to operate on.')
@click.option('--region', default='us-east-1', help='Region the bucket is located in.')
@pass_context
def s3(ctx, bucket_name, data_file, region):
"""Use the S3 SWAG backend."""
if not ctx.data_file:
ctx.data_file = data_file
if not ctx.bucket_name:
ctx.bucket_name = bucket_name
if not ctx.region:
ctx.region = region
ctx.type = 's3'
@cli.command()
@pass_context
def list(ctx):
"""List SWAG account info."""
if ctx.namespace != 'accounts':
click.echo(
click.style('Only account data is available for listing.', fg='red')
)
return
swag = create_swag_from_ctx(ctx)
accounts = swag.get_all()
_table = [[result['name'], result.get('id')] for result in accounts]
click.echo(
tabulate(_table, headers=["Account Name", "Account Number"])
)
@cli.command()
@click.option('--name', help='Name of the service to list.')
@pass_context
def list_service(ctx, name):
"""Retrieve accounts pertaining to named service."""
swag = create_swag_from_ctx(ctx)
accounts = swag.get_service_enabled(name)
_table = [[result['name'], result.get('id')] for result in accounts]
click.echo(
tabulate(_table, headers=["Account Name", "Account Number"])
)
@cli.command()
@click.option('--start-version', default=1, help='Starting version.')
@click.option('--end-version', default=2, help='Ending version.')
@pass_context
def migrate(ctx, start_version, end_version):
"""Transition from one SWAG schema to another."""
if ctx.type == 'file':
if ctx.data_file:
file_path = ctx.data_file
else:
file_path = os.path.join(ctx.data_file, ctx.namespace + '.json')
# todo make this more like alemebic and determine/load versions automatically
with open(file_path, 'r') as f:
data = json.loads(f.read())
data = run_migration(data, start_version, end_version)
with open(file_path, 'w') as f:
f.write(json.dumps(data))
@cli.command()
@pass_context
def propagate(ctx):
"""Transfers SWAG data from one backend to another"""
data = []
if ctx.type == 'file':
if ctx.data_file:
file_path = ctx.data_file
else:
file_path = os.path.join(ctx.data_dir, ctx.namespace + '.json')
with open(file_path, 'r') as f:
data = json.loads(f.read())
swag_opts = {
'swag.type': 'dynamodb'
}
swag = SWAGManager(**parse_swag_config_options(swag_opts))
for item in data:
time.sleep(2)
swag.create(item, dry_run=ctx.dry_run)
@cli.command()
@pass_context
@click.argument('data', type=click.File())
def create(ctx, data):
"""Create a new SWAG item."""
swag = create_swag_from_ctx(ctx)
data = json.loads(data.read())
for account in data:
swag.create(account, dry_run=ctx.dry_run)
@cli.command()
@pass_context
@click.argument('data', type=click.File())
def update(ctx, data):
"""Updates a given record."""
swag = create_swag_from_ctx(ctx)
data = json.loads(data.read())
for account in data:
swag.update(account, dry_run=ctx.dry_run)
@cli.command()
@pass_context
@click.argument('name')
@click.option('--path', type=str, default='', help='JMESPath string to filter accounts to be targeted. Default is all accounts.')
@click.option('--regions', type=CommaList(), default='all',
help='AWS regions that should be configured. These are comma delimited (e.g. us-east-1, us-west-2, eu-west-1). Default: all')
@click.option('--disabled', type=bool, default=False, is_flag=True, help='Service should be marked as enabled.')
def deploy_service(ctx, path, name, regions, disabled):
"""Deploys a new service JSON to multiple accounts. NAME is the service name you wish to deploy."""
enabled = False if disabled else True
swag = create_swag_from_ctx(ctx)
accounts = swag.get_all(search_filter=path)
log.debug('Searching for accounts. Found: {} JMESPath: `{}`'.format(len(accounts), path))
for a in accounts:
try:
if not swag.get_service(name, search_filter="[?id=='{id}']".format(id=a['id'])):
log.info('Found an account to update. AccountName: {name} AccountNumber: {number}'.format(name=a['name'], number=a['id']))
status = []
for region in regions:
status.append(
{
'enabled': enabled,
'region': region
}
)
a['services'].append(
{
'name': name,
'status': status
}
)
swag.update(a, dry_run=ctx.dry_run)
except InvalidSWAGDataException as e:
log.warning('Found a data quality issue. AccountName: {name} AccountNumber: {number}'.format(name=a['name'], number=a['id']))
log.info('Service has been deployed to all matching accounts.')
@cli.command()
@pass_context
@click.argument('data', type=click.File())
def seed_aws_data(ctx, data):
"""Seeds SWAG from a list of known AWS accounts."""
swag = create_swag_from_ctx(ctx)
for k, v in json.loads(data.read()).items():
for account in v['accounts']:
data = {
'description': 'This is an AWS owned account used for {}'.format(k),
'id': account['account_id'],
'contacts': [],
'owner': 'aws',
'provider': 'aws',
'sensitive': False,
'email': 'support@amazon.com',
'name': k + '-' + account['region']
}
click.echo(click.style(
'Seeded Account. AccountName: {}'.format(data['name']), fg='green')
)
swag.create(data, dry_run=ctx.dry_run)
@cli.command()
@pass_context
@click.option('--owner', type=str, required=True, help='The owner for the account schema.')
def seed_aws_organization(ctx, owner):
"""Seeds SWAG from an AWS organziation."""
swag = create_swag_from_ctx(ctx)
accounts = swag.get_all()
_ids = [result.get('id') for result in accounts]
client = boto3.client('organizations')
paginator = client.get_paginator('list_accounts')
response_iterator = paginator.paginate()
count = 0
for response in response_iterator:
for account in response['Accounts']:
if account['Id'] in _ids:
click.echo(click.style(
'Ignoring Duplicate Account. AccountId: {} already exists in SWAG'.format(account['Id']), fg='yellow')
)
continue
if account['Status'] == 'SUSPENDED':
status = 'deprecated'
else:
status = 'created'
data = {
'id': account['Id'],
'name': account['Name'],
'description': 'Account imported from AWS organization.',
'email': account['Email'],
'owner': owner,
'provider': 'aws',
'contacts': [],
'sensitive': False,
'status': [{'region': 'all', 'status': status}]
}
click.echo(click.style(
'Seeded Account. AccountName: {}'.format(data['name']), fg='green')
)
count += 1
swag.create(data, dry_run=ctx.dry_run)
click.echo('Seeded {} accounts to SWAG.'.format(count))
# todo perhaps there is a better way of dynamically adding subcommands?
file.add_command(list)
file.add_command(migrate)
file.add_command(propagate)
file.add_command(create)
file.add_command(seed_aws_data)
file.add_command(seed_aws_organization)
file.add_command(update)
file.add_command(deploy_service)
file.add_command(list_service)
dynamodb.add_command(list)
dynamodb.add_command(create)
dynamodb.add_command(update)
dynamodb.add_command(seed_aws_data)
dynamodb.add_command(seed_aws_organization)
dynamodb.add_command(deploy_service)
dynamodb.add_command(list_service)
s3.add_command(list)
s3.add_command(create)
s3.add_command(update)
s3.add_command(seed_aws_data)
s3.add_command(seed_aws_organization)
s3.add_command(deploy_service)
s3.add_command(list_service)
|
apache-2.0
| -4,546,113,567,798,324,700
| 29.680328
| 139
| 0.596669
| false
| 3.612934
| false
| false
| false
|
Atothendrew/SpartanTokenService
|
SpartanTokenService.py
|
1
|
3313
|
#!/usr/bin/python
import re
import json
import random
import urllib
import requests
from requests import Request, Session
from requests.cookies import RequestsCookieJar
MS_LOGIN = "https://logSpartanTokenService.pyin.live.com/login.srf?id=2"
WAYPOINT_GATEWAY = "https://www.halowaypoint.com/oauth/signin?returnUrl=https%3a%2f%2fwww.halowaypoint.com%2fen-us&locale=en-US"
WAYPOINT_REGISTER_URL = "https://settings.svc.halowaypoint.com/RegisterClientService.svc/spartantoken/wlid?_={0}"
SPARTAN_TOKEN_GENERATOR = "https://app.halowaypoint.com/oauth/spartanToken"
URL_TO_SCRAPE = "https://login.live.com/oauth20_authorize.srf?client_id=000000004C0BD2F1&scope=xbox.basic+xbox.offline_access&response_type=code&redirect_uri=https://www.halowaypoint.com/oauth/callback&state=https%253a%252f%252fwww.halowaypoint.com%252fen-us&locale=en-US&display=touch"
URL_TO_POST = "https://login.live.com/ppsecure/post.srf?client_id=000000004C0BD2F1&scope=xbox.basic+xbox.offline_access&response_type=code&redirect_uri=https://www.halowaypoint.com/oauth/callback&state=https%253a%252f%252fwww.halowaypoint.com%252fen-us&locale=en-US&display=touch&bk=1383096785"
EMAIL = "PLACE_EMAIL_HERE"
PASSWORD = "PLACE_PASSWORD_HERE"
def get_spartan_token():
# Get the First Cookies
cookie_container = RequestsCookieJar()
first_response = requests.get(URL_TO_SCRAPE)
body = first_response.text.encode('utf-8', 'ignore')
for cookie in first_response.cookies: cookie_container.set_cookie(cookie)
# Get the PPFT
ppft_regex = re.compile("name=\"PPFT\".*?value=\"(.*?)\"")
ppft_match = re.findall(ppft_regex, body)
assert len(ppft_match) == 1
ppft = ppft_match[0]
# Prepare the login to Xbox
ppsx = "Pass"
query = "PPFT={ppft}&login={email}&passwd={password}&LoginOptions=3&NewUser=1&PPSX={ppsx}&type=11&i3={random}&m1=1680&m2=1050&m3=0&i12=1&i17=0&i18=__MobileLogin|1".format(
ppft = ppft, email = urllib.quote(EMAIL), password = PASSWORD, ppsx = ppsx, random = random.randint(15000, 50000))
headers = {"Content-Type": "application/x-www-form-urlencoded", "Host": "login.live.com", "Expect": "100-continue", "Connection": "Keep-Alive"}
# Stream the login to xbox
s = Session()
login_request = Request('POST', URL_TO_POST, headers = headers, data = query, cookies = cookie_container)
prepped = s.prepare_request(login_request)
login_response = s.send(prepped, stream = True, allow_redirects = False)
for cookie in login_response.cookies: cookie_container.set_cookie(cookie)
if "Location" not in login_response.headers: return None
next_location = login_response.headers['Location']
# Get Waypoint Cookies and Headers
waypoint_response = requests.get(next_location, allow_redirects = False)
if "WebAuth" not in waypoint_response.cookies: return None
for cookie in waypoint_response.cookies: cookie_container.set_cookie(cookie)
# Get the Spartan Token
headers = {"UserAgent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.52 Safari/537.17"}
token_response = requests.get(SPARTAN_TOKEN_GENERATOR, headers = headers, cookies = cookie_container)
spartan_token = token_response.text
spartan_token = json.loads(spartan_token)["SpartanToken"]
return spartan_token
|
mit
| 1,194,989,679,917,223,700
| 53.327869
| 294
| 0.735587
| false
| 2.9713
| false
| false
| false
|
sacharya/nova
|
nova/tests/api/openstack/compute/plugins/v3/test_user_data.py
|
1
|
9909
|
# Copyright 2012 OpenStack Foundation
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import datetime
import uuid
from oslo.config import cfg
import webob
from nova.api.openstack.compute import plugins
from nova.api.openstack.compute.plugins.v3 import servers
from nova.api.openstack.compute.plugins.v3 import user_data
from nova.compute import api as compute_api
from nova.compute import flavors
from nova import db
from nova.network import manager
from nova.openstack.common import jsonutils
from nova.openstack.common import rpc
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests.image import fake
CONF = cfg.CONF
FAKE_UUID = fakes.FAKE_UUID
def fake_gen_uuid():
return FAKE_UUID
def return_security_group(context, instance_id, security_group_id):
pass
class ServersControllerCreateTest(test.TestCase):
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTest, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
CONF.set_override('extensions_blacklist', 'os-user-data',
'osapi_v3')
self.no_user_data_controller = servers.ServersController(
extension_info=ext_info)
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': dict(inst_type),
'access_ip_v4': '1.2.3.4',
'access_ip_v6': 'fead::1234',
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
user_data.ATTRIBUTE_NAME: None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"root_device_name": inst.get('root_device_name', 'vda'),
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def server_update(context, instance_uuid, params):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
def fake_method(*args, **kwargs):
pass
def project_get_networks(context, user_id):
return dict(id='1', host='localhost')
def queue_get_for(context, *args):
return 'network_topic'
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'project_get_networks',
project_get_networks)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_system_metadata_update',
fake_method)
self.stubs.Set(db, 'instance_get', instance_get)
self.stubs.Set(db, 'instance_update', instance_update)
self.stubs.Set(rpc, 'cast', fake_method)
self.stubs.Set(db, 'instance_update_and_get_original',
server_update)
self.stubs.Set(rpc, 'queue_get_for', queue_get_for)
self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip',
fake_method)
def _test_create_extra(self, params, no_image=False,
override_controller=None):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
server = dict(name='server_test', image_ref=image_uuid, flavor_ref=2)
if no_image:
server.pop('image_ref', None)
server.update(params)
body = dict(server=server)
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
if override_controller:
server = override_controller.create(req, body).obj['server']
else:
server = self.controller.create(req, body).obj['server']
def test_create_instance_with_user_data_disabled(self):
params = {user_data.ATTRIBUTE_NAME: base64.b64encode('fake')}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('user_data', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(
params,
override_controller=self.no_user_data_controller)
def test_create_instance_with_user_data_enabled(self):
params = {user_data.ATTRIBUTE_NAME: base64.b64encode('fake')}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIn('user_data', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_user_data(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/flavors/3'
value = "A random string"
body = {
'server': {
'name': 'user_data_test',
'image_ref': image_href,
'flavor_ref': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
user_data.ATTRIBUTE_NAME: base64.b64encode(value),
},
}
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_bad_user_data(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/flavors/3'
value = "A random string"
body = {
'server': {
'name': 'user_data_test',
'image_ref': image_href,
'flavor_ref': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
user_data.ATTRIBUTE_NAME: value,
},
}
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
class TestServerCreateRequestXMLDeserializer(test.TestCase):
def setUp(self):
super(TestServerCreateRequestXMLDeserializer, self).setUp()
ext_info = plugins.LoadedExtensionInfo()
controller = servers.ServersController(extension_info=ext_info)
self.deserializer = servers.CreateDeserializer(controller)
def test_request_with_user_data(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v3"
xmlns:%(alias)s="%(namespace)s"
name="user_data_test"
image_ref="1"
flavor_ref="1"
%(alias)s:user_data="IyEvYmluL2Jhc2gKL2Jpbi9"/>""" % {
'alias': user_data.ALIAS,
'namespace': user_data.UserData.namespace}
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "user_data_test",
"image_ref": "1",
"flavor_ref": "1",
user_data.ATTRIBUTE_NAME: "IyEvYmluL2Jhc2gKL2Jpbi9"
},
}
self.assertEqual(request['body'], expected)
|
apache-2.0
| -7,509,084,529,474,616,000
| 36.392453
| 78
| 0.58795
| false
| 3.827346
| true
| false
| false
|
mvanveen/catcher
|
catcher.py
|
1
|
1373
|
import traceback
TRACE_STACK = []
class Trace(object):
def __init__(self, exception, stack=None):
if not isinstance(exception, Exception):
raise ValueError("Expected an Exception object as first argument")
if not stack:
stack = traceback.extract_stack()
# pop off current frame and initial catch frame
#stack.pop()
#stack.pop()
# TODO: try to grab exception if it's not passed in explicitly
self._exception = exception
self._stack = stack
@property
def exception(self):
return self._exception
@property
def stack(self):
return self._stack
def __str__(self):
return ''.join(
traceback.format_list(self.stack) +
traceback.format_exception_only(
type(self.exception),
self.exception
)
).strip()
def __repr__(self):
return '<Trace (%s)>' % (
str(type(self.exception)).replace('exceptions.', ''),
)
def catch(e):
TRACE_STACK.append(Trace(e))
def dump(exception_type=None, lineno=None, module=None):
return TRACE_STACK
def clear():
del TRACE_STACK
TRACE_STACK = []
if __name__ == '__main__':
import random
for i in range(20):
try:
random.randint(0,5) / 0
except Exception, e:
catch(e)
print str(dump()[0])
|
mit
| -2,296,891,424,965,171,500
| 21.508197
| 71
| 0.576839
| false
| 3.956772
| false
| false
| false
|
horacioMartinez/dakara-client
|
tools/protocol_generator/generator/gendefs_js.py
|
1
|
7898
|
class Packet:
def __init__(self, name, args):
self.name = name
self.args = args
def get_header_fmt(self):
return """
function {name} (buffer) {{
this.id = {base_name}ID.{name} /* {packet_id} */;
if (buffer){{
buffer.ReadByte(); /* PacketID */
{ctor_fields_bytequeue}
}}
this.serialize = function(buffer) {{
buffer.WriteByte({base_name}ID.{name}); /* PacketID: {packet_id} */
{serialize_fields}
buffer.flush();
}};
this.dispatch = function (d){{
d.handle{name}(this);
}};
}}
"""
def get_builder_fmt(self):
return """
Build{name}({header_fields_signature}) {{
var e = new {name}();
{items_assign_build}
return e;
}}
"""
def get_parametros_fmt(self):
return """
{parametros_fields}
handler.handle{name}( {parametros_args} );
"""
def get_argumentosHandler_fmt(self):
return """{parametros_args}
"""
# def get_handler_fmt(self):
# return """
#{items_assign_build}
#"""
def get_handler_fmt(self):
return """
send{name}({header_fields_signature}) {{
p = this.protocolo.Build{name}({header_fields_signature} );
p.serialize(this.byteQueue);
}}
"""
def get_ctor_fields_bytequeue_fmt(self, is_array):
if is_array:
return " var i; this.{arg_name}= []; for (i=0; i<{array_size}; ++i) this.{arg_name}[i] = buffer.{type_reader_name}();\n"
else:
return " this.{arg_name} = buffer.{type_reader_name}();\n"
def get_parametros_fields_fmt(self, is_array):
if is_array:
return " var i; var {arg_name}= []; for (i=0; i<{array_size}; ++i) {arg_name}[i] = buffer.{type_reader_name}();\n"
else:
return " var {arg_name} = buffer.{type_reader_name}();\n"
def get_parametros_args_fmt(self, is_array):
if is_array:
return "{arg_name},"
else:
return "{arg_name},"
def get_serialize_fields_fmt(self, is_array):
if is_array:
return " var i; for (i=0; i<{array_size}; ++i) buffer.{type_writer_name}(this.{arg_name}[i]);\n"
else:
return " buffer.{type_writer_name}(this.{arg_name});\n"
class PacketGMHeader(Packet):
def __init__(self, name, args):
Packet.__init__(self, name, args)
def get_header_fmt(self):
return """
function {name} (buffer) {{
this.id = {base_name}ID.{name} /* {packet_id} */;
if (buffer){{
buffer.ReadByte(); /* PacketID */
{ctor_fields_bytequeue}
}}
this.serialize = function(buffer) {{
{serialize_fields}
}};
this.dispatch = function (d){{
d.handle{name}(this);
}};
}}
"""
class PacketGMCommand(Packet):
def __init__(self, name, args):
self.name = name
self.args = args
def get_header_fmt(self):
return """
function {name} (buffer) {{
this.id = {base_name}ID.{name} /* {packet_id} */;
if (buffer){{
buffer.ReadByte(); /* PacketID */
{ctor_fields_bytequeue}
}}
this.serialize = function(buffer) {{
buffer.WriteByte(ClientPacketID_GMCommands);
buffer.WriteByte({base_name}ID.{name}); /* PacketID: {packet_id} */
{serialize_fields}
buffer.flush();
}};
this.dispatch = function (d){{
d.handle{name}(this);
}};
}}
"""
class PacketWithCount(Packet):
def __init__(self, name, args, reader_type):
Packet.__init__(self, name, args)
self.reader_type = reader_type
def get_header_fmt(self):
return """
function {name} (buffer) {{
this.id = {base_name}ID.{name} /* {packet_id} */;
this.Items = [];
if (buffer) {{
buffer.ReadByte(); /* PacketID */
var Count = buffer.__COUNTREADER__();
var i;
for (i=0; i<Count; ++i) {{
var e = {{
{ctor_fields_bytequeue}
}};
this.Items.push(e);
}}
}}
""".replace("__COUNTREADER__", TYPE_TO_READER_NAME[self.reader_type]) + """
this.serialize = function(buffer) {{
buffer.WriteByte({base_name}ID.{name}); /* PacketID: {packet_id} */
var Count = Items.length;
buffer.__COUNTWRITER__(Count);
var i;
for (i=0; i<Count; ++i) {{
e = Items[i];
{serialize_fields}
buffer.flush();
}}
}};
this.dispatch = function (d){{
d.handle{name}(this);
}};
this.addItem = function({header_fields_signature}) {{
var e = {{
{items_assign_e}
}};
this.Items.push(e);
}}
}}""".replace("__COUNTWRITER__", TYPE_TO_WRITER_NAME[self.reader_type])
def get_handler_fmt(self):
return """ /*ACA*/
var e = {{
{items_assign_e}
}};
this.Items.push(e);
}}"""
def get_parametros_fmt(self):
return """
/* Packet con count! */
var Items = [];
var Count = buffer.__COUNTREADER__();
var i;
for (i=0; i<Count; ++i) {{
var e = {{
{ctor_fields_bytequeue}
}};
Items.push(e);
}}
handler.handle{name}(Items);
""".replace("__COUNTREADER__", TYPE_TO_READER_NAME[self.reader_type])
def get_argumentosHandler_fmt(self):
return """Items
"""
# def get_handler_fmt(self):
# return """
#{items_assign_build}
#"""
def get_ctor_fields_bytequeue_fmt(self, is_array):
if is_array:
return " {{ var i; e.{arg_name} = []; for (i=0; i<{array_size}; ++i) e.{arg_name}[i] = buffer.{type_reader_name}(); }}\n"
else:
return " {arg_name} : buffer.{type_reader_name}(),\n"
def get_serialize_fields_fmt(self, is_array):
if is_array:
return " {{ var i; for (i=0; i<{array_size}; ++i) buffer.{type_writer_name}(e.{arg_name}[i]); }}\n"
else:
return " buffer.{type_writer_name}(e.{arg_name});\n"
TYPE_UNICODE_STRING = 0
TYPE_UNICODE_STRING_FIXED = 1
TYPE_BINARY_STRING = 2
TYPE_BINARY_STRING_FIXED = 3
TYPE_I8 = 4
TYPE_I16 = 5
TYPE_I32 = 6
TYPE_SINGLE = 7 # Float
TYPE_DOUBLE = 8 # Double
TYPE_BOOL = 9
TYPE_ARRAY = (1 << 8)
TYPE_TO_STR = {
TYPE_UNICODE_STRING: 'var',
TYPE_UNICODE_STRING_FIXED: 'var',
TYPE_BINARY_STRING: 'var',
TYPE_BINARY_STRING_FIXED: 'var',
TYPE_I8: 'var',
TYPE_I16: 'var',
TYPE_I32: 'var',
TYPE_SINGLE: 'var',
TYPE_DOUBLE: 'var',
TYPE_BOOL: 'var',
}
TYPE_TO_SIGNATURE_STR = {
TYPE_UNICODE_STRING: '',
TYPE_UNICODE_STRING_FIXED: '',
TYPE_BINARY_STRING: '',
TYPE_BINARY_STRING_FIXED: '',
TYPE_I8: '',
TYPE_I16: '',
TYPE_I32: '',
TYPE_SINGLE: '',
TYPE_DOUBLE: '',
TYPE_BOOL: '',
}
TYPE_TO_READER_NAME = {
TYPE_UNICODE_STRING: 'ReadUnicodeString',
#TYPE_UNICODE_STRING_FIXED: '',
#TYPE_BINARY_STRING: '',
#TYPE_BINARY_STRING_FIXED: 'ReadBinaryFixed',
TYPE_I8: 'ReadByte',
TYPE_I16: 'ReadInteger',
TYPE_I32: 'ReadLong',
TYPE_SINGLE: 'ReadSingle',
TYPE_DOUBLE: 'ReadDouble',
TYPE_BOOL: 'ReadBoolean',
}
TYPE_TO_WRITER_NAME = {
TYPE_UNICODE_STRING: 'WriteUnicodeString',
#TYPE_UNICODE_STRING_FIXED: '',
#TYPE_BINARY_STRING: '',
#TYPE_BINARY_STRING_FIXED: 'ReadBinaryFixed',
TYPE_I8: 'WriteByte',
TYPE_I16: 'WriteInteger',
TYPE_I32: 'WriteLong',
TYPE_SINGLE: 'WriteSingle',
TYPE_DOUBLE: 'WriteDouble',
TYPE_BOOL: 'WriteBoolean',
}
TYPE_SIZE = {
TYPE_UNICODE_STRING: 2,
#TYPE_UNICODE_STRING_FIXED: 0,
TYPE_BINARY_STRING: 2,
#TYPE_BINARY_STRING_FIXED: 0,
TYPE_I8: 1,
TYPE_I16: 2,
TYPE_I32: 4,
TYPE_SINGLE: 4,
TYPE_DOUBLE: 8,
TYPE_BOOL: 1,
}
|
mit
| -5,359,951,213,729,725,000
| 24.726384
| 144
| 0.530767
| false
| 3.221044
| false
| false
| false
|
Jeff-Tian/mybnb
|
Python27/Lib/test/test_hmac.py
|
2
|
17810
|
# coding: utf-8
import hmac
import hashlib
import unittest
import warnings
from test import test_support
class TestVectorsTestCase(unittest.TestCase):
def test_md5_vectors(self):
# Test the HMAC module against test vectors from the RFC.
def md5test(key, data, digest):
h = hmac.HMAC(key, data)
self.assertEqual(h.hexdigest().upper(), digest.upper())
md5test(chr(0x0b) * 16,
"Hi There",
"9294727A3638BB1C13F48EF8158BFC9D")
md5test("Jefe",
"what do ya want for nothing?",
"750c783e6ab0b503eaa86e310a5db738")
md5test(chr(0xAA)*16,
chr(0xDD)*50,
"56be34521d144c88dbb8c733f0e8b3f6")
md5test("".join([chr(i) for i in range(1, 26)]),
chr(0xCD) * 50,
"697eaf0aca3a3aea3a75164746ffaa79")
md5test(chr(0x0C) * 16,
"Test With Truncation",
"56461ef2342edc00f9bab995690efd4c")
md5test(chr(0xAA) * 80,
"Test Using Larger Than Block-Size Key - Hash Key First",
"6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd")
md5test(chr(0xAA) * 80,
("Test Using Larger Than Block-Size Key "
"and Larger Than One Block-Size Data"),
"6f630fad67cda0ee1fb1f562db3aa53e")
def test_sha_vectors(self):
def shatest(key, data, digest):
h = hmac.HMAC(key, data, digestmod=hashlib.sha1)
self.assertEqual(h.hexdigest().upper(), digest.upper())
shatest(chr(0x0b) * 20,
"Hi There",
"b617318655057264e28bc0b6fb378c8ef146be00")
shatest("Jefe",
"what do ya want for nothing?",
"effcdf6ae5eb2fa2d27416d5f184df9c259a7c79")
shatest(chr(0xAA)*20,
chr(0xDD)*50,
"125d7342b9ac11cd91a39af48aa17b4f63f175d3")
shatest("".join([chr(i) for i in range(1, 26)]),
chr(0xCD) * 50,
"4c9007f4026250c6bc8414f9bf50c86c2d7235da")
shatest(chr(0x0C) * 20,
"Test With Truncation",
"4c1a03424b55e07fe7f27be1d58bb9324a9a5a04")
shatest(chr(0xAA) * 80,
"Test Using Larger Than Block-Size Key - Hash Key First",
"aa4ae5e15272d00e95705637ce8a3b55ed402112")
shatest(chr(0xAA) * 80,
("Test Using Larger Than Block-Size Key "
"and Larger Than One Block-Size Data"),
"e8e99d0f45237d786d6bbaa7965c7808bbff1a91")
def _rfc4231_test_cases(self, hashfunc):
def hmactest(key, data, hexdigests):
h = hmac.HMAC(key, data, digestmod=hashfunc)
self.assertEqual(h.hexdigest().lower(), hexdigests[hashfunc])
# 4.2. Test Case 1
hmactest(key = '\x0b'*20,
data = 'Hi There',
hexdigests = {
hashlib.sha224: '896fb1128abbdf196832107cd49df33f'
'47b4b1169912ba4f53684b22',
hashlib.sha256: 'b0344c61d8db38535ca8afceaf0bf12b'
'881dc200c9833da726e9376c2e32cff7',
hashlib.sha384: 'afd03944d84895626b0825f4ab46907f'
'15f9dadbe4101ec682aa034c7cebc59c'
'faea9ea9076ede7f4af152e8b2fa9cb6',
hashlib.sha512: '87aa7cdea5ef619d4ff0b4241a1d6cb0'
'2379f4e2ce4ec2787ad0b30545e17cde'
'daa833b7d6b8a702038b274eaea3f4e4'
'be9d914eeb61f1702e696c203a126854',
})
# 4.3. Test Case 2
hmactest(key = 'Jefe',
data = 'what do ya want for nothing?',
hexdigests = {
hashlib.sha224: 'a30e01098bc6dbbf45690f3a7e9e6d0f'
'8bbea2a39e6148008fd05e44',
hashlib.sha256: '5bdcc146bf60754e6a042426089575c7'
'5a003f089d2739839dec58b964ec3843',
hashlib.sha384: 'af45d2e376484031617f78d2b58a6b1b'
'9c7ef464f5a01b47e42ec3736322445e'
'8e2240ca5e69e2c78b3239ecfab21649',
hashlib.sha512: '164b7a7bfcf819e2e395fbe73b56e0a3'
'87bd64222e831fd610270cd7ea250554'
'9758bf75c05a994a6d034f65f8f0e6fd'
'caeab1a34d4a6b4b636e070a38bce737',
})
# 4.4. Test Case 3
hmactest(key = '\xaa'*20,
data = '\xdd'*50,
hexdigests = {
hashlib.sha224: '7fb3cb3588c6c1f6ffa9694d7d6ad264'
'9365b0c1f65d69d1ec8333ea',
hashlib.sha256: '773ea91e36800e46854db8ebd09181a7'
'2959098b3ef8c122d9635514ced565fe',
hashlib.sha384: '88062608d3e6ad8a0aa2ace014c8a86f'
'0aa635d947ac9febe83ef4e55966144b'
'2a5ab39dc13814b94e3ab6e101a34f27',
hashlib.sha512: 'fa73b0089d56a284efb0f0756c890be9'
'b1b5dbdd8ee81a3655f83e33b2279d39'
'bf3e848279a722c806b485a47e67c807'
'b946a337bee8942674278859e13292fb',
})
# 4.5. Test Case 4
hmactest(key = ''.join([chr(x) for x in xrange(0x01, 0x19+1)]),
data = '\xcd'*50,
hexdigests = {
hashlib.sha224: '6c11506874013cac6a2abc1bb382627c'
'ec6a90d86efc012de7afec5a',
hashlib.sha256: '82558a389a443c0ea4cc819899f2083a'
'85f0faa3e578f8077a2e3ff46729665b',
hashlib.sha384: '3e8a69b7783c25851933ab6290af6ca7'
'7a9981480850009cc5577c6e1f573b4e'
'6801dd23c4a7d679ccf8a386c674cffb',
hashlib.sha512: 'b0ba465637458c6990e5a8c5f61d4af7'
'e576d97ff94b872de76f8050361ee3db'
'a91ca5c11aa25eb4d679275cc5788063'
'a5f19741120c4f2de2adebeb10a298dd',
})
# 4.7. Test Case 6
hmactest(key = '\xaa'*131,
data = 'Test Using Larger Than Block-Siz'
'e Key - Hash Key First',
hexdigests = {
hashlib.sha224: '95e9a0db962095adaebe9b2d6f0dbce2'
'd499f112f2d2b7273fa6870e',
hashlib.sha256: '60e431591ee0b67f0d8a26aacbf5b77f'
'8e0bc6213728c5140546040f0ee37f54',
hashlib.sha384: '4ece084485813e9088d2c63a041bc5b4'
'4f9ef1012a2b588f3cd11f05033ac4c6'
'0c2ef6ab4030fe8296248df163f44952',
hashlib.sha512: '80b24263c7c1a3ebb71493c1dd7be8b4'
'9b46d1f41b4aeec1121b013783f8f352'
'6b56d037e05f2598bd0fd2215d6a1e52'
'95e64f73f63f0aec8b915a985d786598',
})
# 4.8. Test Case 7
hmactest(key = '\xaa'*131,
data = 'This is a test using a larger th'
'an block-size key and a larger t'
'han block-size data. The key nee'
'ds to be hashed before being use'
'd by the HMAC algorithm.',
hexdigests = {
hashlib.sha224: '3a854166ac5d9f023f54d517d0b39dbd'
'946770db9c2b95c9f6f565d1',
hashlib.sha256: '9b09ffa71b942fcb27635fbcd5b0e944'
'bfdc63644f0713938a7f51535c3a35e2',
hashlib.sha384: '6617178e941f020d351e2f254e8fd32c'
'602420feb0b8fb9adccebb82461e99c5'
'a678cc31e799176d3860e6110c46523e',
hashlib.sha512: 'e37b6a775dc87dbaa4dfa9f96e5e3ffd'
'debd71f8867289865df5a32d20cdc944'
'b6022cac3c4982b10d5eeb55c3e4de15'
'134676fb6de0446065c97440fa8c6a58',
})
def test_sha224_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha224)
def test_sha256_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha256)
def test_sha384_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha384)
def test_sha512_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha512)
def test_legacy_block_size_warnings(self):
class MockCrazyHash(object):
"""Ain't no block_size attribute here."""
def __init__(self, *args):
self._x = hashlib.sha1(*args)
self.digest_size = self._x.digest_size
def update(self, v):
self._x.update(v)
def digest(self):
return self._x.digest()
with warnings.catch_warnings():
warnings.simplefilter('error', RuntimeWarning)
with self.assertRaises(RuntimeWarning):
hmac.HMAC('a', 'b', digestmod=MockCrazyHash)
self.fail('Expected warning about missing block_size')
MockCrazyHash.block_size = 1
with self.assertRaises(RuntimeWarning):
hmac.HMAC('a', 'b', digestmod=MockCrazyHash)
self.fail('Expected warning about small block_size')
class ConstructorTestCase(unittest.TestCase):
def test_normal(self):
# Standard constructor call.
failed = 0
try:
h = hmac.HMAC("key")
except:
self.fail("Standard constructor call raised exception.")
def test_withtext(self):
# Constructor call with text.
try:
h = hmac.HMAC("key", "hash this!")
except:
self.fail("Constructor call with text argument raised exception.")
def test_withmodule(self):
# Constructor call with text and digest module.
try:
h = hmac.HMAC("key", "", hashlib.sha1)
except:
self.fail("Constructor call with hashlib.sha1 raised exception.")
class SanityTestCase(unittest.TestCase):
def test_default_is_md5(self):
# Testing if HMAC defaults to MD5 algorithm.
# NOTE: this whitebox test depends on the hmac class internals
h = hmac.HMAC("key")
self.assertTrue(h.digest_cons == hashlib.md5)
def test_exercise_all_methods(self):
# Exercising all methods once.
# This must not raise any exceptions
try:
h = hmac.HMAC("my secret key")
h.update("compute the hash of this text!")
dig = h.digest()
dig = h.hexdigest()
h2 = h.copy()
except:
self.fail("Exception raised during normal usage of HMAC class.")
class CopyTestCase(unittest.TestCase):
def test_attributes(self):
# Testing if attributes are of same type.
h1 = hmac.HMAC("key")
h2 = h1.copy()
self.assertTrue(h1.digest_cons == h2.digest_cons,
"digest constructors don't match.")
self.assertTrue(type(h1.inner) == type(h2.inner),
"Types of inner don't match.")
self.assertTrue(type(h1.outer) == type(h2.outer),
"Types of outer don't match.")
def test_realcopy(self):
# Testing if the copy method created a real copy.
h1 = hmac.HMAC("key")
h2 = h1.copy()
# Using id() in case somebody has overridden __cmp__.
self.assertTrue(id(h1) != id(h2), "No real copy of the HMAC instance.")
self.assertTrue(id(h1.inner) != id(h2.inner),
"No real copy of the attribute 'inner'.")
self.assertTrue(id(h1.outer) != id(h2.outer),
"No real copy of the attribute 'outer'.")
def test_equality(self):
# Testing if the copy has the same digests.
h1 = hmac.HMAC("key")
h1.update("some random text")
h2 = h1.copy()
self.assertTrue(h1.digest() == h2.digest(),
"Digest of copy doesn't match original digest.")
self.assertTrue(h1.hexdigest() == h2.hexdigest(),
"Hexdigest of copy doesn't match original hexdigest.")
class CompareDigestTestCase(unittest.TestCase):
def test_compare_digest(self):
# Testing input type exception handling
a, b = 100, 200
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = 100, b"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = b"foobar", 200
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = u"foobar", b"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = b"foobar", u"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
# Testing bytes of different lengths
a, b = b"foobar", b"foo"
self.assertFalse(hmac.compare_digest(a, b))
a, b = b"\xde\xad\xbe\xef", b"\xde\xad"
self.assertFalse(hmac.compare_digest(a, b))
# Testing bytes of same lengths, different values
a, b = b"foobar", b"foobaz"
self.assertFalse(hmac.compare_digest(a, b))
a, b = b"\xde\xad\xbe\xef", b"\xab\xad\x1d\xea"
self.assertFalse(hmac.compare_digest(a, b))
# Testing bytes of same lengths, same values
a, b = b"foobar", b"foobar"
self.assertTrue(hmac.compare_digest(a, b))
a, b = b"\xde\xad\xbe\xef", b"\xde\xad\xbe\xef"
self.assertTrue(hmac.compare_digest(a, b))
# Testing bytearrays of same lengths, same values
a, b = bytearray(b"foobar"), bytearray(b"foobar")
self.assertTrue(hmac.compare_digest(a, b))
# Testing bytearrays of diffeent lengths
a, b = bytearray(b"foobar"), bytearray(b"foo")
self.assertFalse(hmac.compare_digest(a, b))
# Testing bytearrays of same lengths, different values
a, b = bytearray(b"foobar"), bytearray(b"foobaz")
self.assertFalse(hmac.compare_digest(a, b))
# Testing byte and bytearray of same lengths, same values
a, b = bytearray(b"foobar"), b"foobar"
self.assertTrue(hmac.compare_digest(a, b))
self.assertTrue(hmac.compare_digest(b, a))
# Testing byte bytearray of diffeent lengths
a, b = bytearray(b"foobar"), b"foo"
self.assertFalse(hmac.compare_digest(a, b))
self.assertFalse(hmac.compare_digest(b, a))
# Testing byte and bytearray of same lengths, different values
a, b = bytearray(b"foobar"), b"foobaz"
self.assertFalse(hmac.compare_digest(a, b))
self.assertFalse(hmac.compare_digest(b, a))
# Testing str of same lengths
a, b = "foobar", "foobar"
self.assertTrue(hmac.compare_digest(a, b))
# Testing str of diffeent lengths
a, b = "foo", "foobar"
self.assertFalse(hmac.compare_digest(a, b))
# Testing bytes of same lengths, different values
a, b = "foobar", "foobaz"
self.assertFalse(hmac.compare_digest(a, b))
# Testing error cases
a, b = u"foobar", b"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = b"foobar", u"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = b"foobar", 1
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = 100, 200
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = "fooä", "fooä"
self.assertTrue(hmac.compare_digest(a, b))
with test_support.check_py3k_warnings():
# subclasses are supported by ignore __eq__
class mystr(str):
def __eq__(self, other):
return False
a, b = mystr("foobar"), mystr("foobar")
self.assertTrue(hmac.compare_digest(a, b))
a, b = mystr("foobar"), "foobar"
self.assertTrue(hmac.compare_digest(a, b))
a, b = mystr("foobar"), mystr("foobaz")
self.assertFalse(hmac.compare_digest(a, b))
with test_support.check_py3k_warnings():
class mybytes(bytes):
def __eq__(self, other):
return False
a, b = mybytes(b"foobar"), mybytes(b"foobar")
self.assertTrue(hmac.compare_digest(a, b))
a, b = mybytes(b"foobar"), b"foobar"
self.assertTrue(hmac.compare_digest(a, b))
a, b = mybytes(b"foobar"), mybytes(b"foobaz")
self.assertFalse(hmac.compare_digest(a, b))
def test_main():
test_support.run_unittest(
TestVectorsTestCase,
ConstructorTestCase,
SanityTestCase,
CopyTestCase,
CompareDigestTestCase,
)
if __name__ == "__main__":
test_main()
|
apache-2.0
| 178,129,449,049,034,270
| 39.607477
| 79
| 0.537287
| false
| 3.455851
| true
| false
| false
|
DummyDivision/Tsune
|
cardimporter/importer.py
|
1
|
2077
|
from django.db import transaction
from guardian.shortcuts import assign_perm
from sourcereader import AnkiCardSourceReader
from ankiconverter import ankiTupeToTsuneDict
from cardbox.card_model import Card, Deck
class AnkiImporter():
@transaction.commit_on_success
def importCollection(self,pathToApkg,user):
with AnkiCardSourceReader(pathToApkg) as ankireader:
self._createCollection(ankireader, user)
def _createDeck(self,deck):
names = deck['name'].split("::")
if len(names) > 1:
return Deck.objects.create(title=names[0], description="-".join(names[1:]))
return Deck.objects.create(title=names[0], description=names[0])
def _checkIfCreateDeck(self,current_cardlist):
return len(current_cardlist) > 0
def _convertAnkiCardsToTsuneCards(self,cardlist,deck):
tsuneCards = []
for card in cardlist:
tsuneDict = ankiTupeToTsuneDict(card)
convertedcard = self._createCardObjectFromTsuneDict(tsuneDict,deck)
tsuneCards.append(convertedcard)
return tsuneCards
def _addAllCardsToDeck(self,cardlist,deck):
return self._convertAnkiCardsToTsuneCards(cardlist,deck)
def _createCardObjectFromTsuneDict(self,tsuneDict,deck):
return Card.objects.create(deck=deck,front=tsuneDict["front"], back=tsuneDict["back"])
def _createCollection(self,ankireader, user):
deckdict = ankireader.getDictOfAllDecks()
for deck_id in deckdict.keys():
current_cardlist=ankireader.getAllCardsForDeck(deck_id)
if self._checkIfCreateDeck(current_cardlist):
deck = self._createDeck(deckdict[deck_id])
tsunecards = self._addAllCardsToDeck(current_cardlist,deck)
deck.save()
[card.save() for card in tsunecards]
self._assignPerms(user,deck)
def _assignPerms(self,user,deck):
assign_perm('view_deck',user,deck)
assign_perm('change_deck',user,deck)
assign_perm('delete_deck',user,deck)
|
mit
| -4,638,345,130,590,870,000
| 36.763636
| 94
| 0.676938
| false
| 3.467446
| false
| false
| false
|
philanthropy-u/edx-platform
|
openedx/features/job_board/migrations/0001_initial.py
|
1
|
2109
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.21 on 2020-03-31 10:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
import django_countries.fields
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('title', models.CharField(max_length=255)),
('company', models.CharField(max_length=255)),
('type', models.CharField(choices=[(b'remote', b'Remote'), (b'onsite', b'Onsite')], max_length=255)),
('compensation', models.CharField(choices=[(b'volunteer', b'Volunteer'), (b'hourly', b'Hourly'), (b'salaried', b'Salaried')], max_length=255)),
('hours', models.CharField(choices=[(b'fulltime', b'Full Time'), (b'parttime', b'Part Time'), (b'freelance', b'Freelance')], max_length=255)),
('city', models.CharField(max_length=255)),
('country', django_countries.fields.CountryField(max_length=2)),
('description', models.TextField()),
('function', models.TextField(blank=True, null=True)),
('responsibilities', models.TextField(blank=True, null=True)),
('website_link', models.URLField(blank=True, max_length=255, null=True)),
('contact_email', models.EmailField(max_length=255)),
('logo', models.ImageField(blank=True, null=True, upload_to=b'job-board/uploaded-logos/')),
],
options={
'abstract': False,
},
),
]
|
agpl-3.0
| -7,991,687,555,766,982,000
| 48.046512
| 159
| 0.599336
| false
| 3.964286
| false
| false
| false
|
simonmonk/prog_pi_ed2
|
07_05_kitchen_sink.py
|
1
|
1654
|
#07_05_kitchen_sink.py
from tkinter import *
class App:
def __init__(self, master):
frame = Frame(master)
frame.pack()
Label(frame, text='Label').grid(row=0, column=0)
Entry(frame, text='Entry').grid(row=0, column=1)
Button(frame, text='Button').grid(row=0, column=2)
check_var = StringVar()
check = Checkbutton(frame, text='Checkbutton', variable=check_var, onvalue='Y', offvalue='N')
check.grid(row=1, column=0)
#Listbox
listbox = Listbox(frame, height=3, selectmode=SINGLE)
for item in ['red', 'green', 'blue', 'yellow', 'pink']:
listbox.insert(END, item)
listbox.grid(row=1, column=1)
#Radiobutton set
radio_frame = Frame(frame)
radio_selection = StringVar()
b1 = Radiobutton(radio_frame, text='portrait',
variable=radio_selection, value='P')
b1.pack(side=LEFT)
b2 = Radiobutton(radio_frame, text='landscape',
variable=radio_selection, value='L')
b2.pack(side=LEFT)
radio_frame.grid(row=1, column=2)
#Scale
scale_var = IntVar()
Scale(frame, from_=1, to=10, orient=HORIZONTAL,
variable=scale_var).grid(row=2, column=0)
Label(frame, textvariable=scale_var,
font=("Helvetica", 36)).grid(row=2, column=1)
#Message
message = Message(frame,
text='Multiline Message Area')
message.grid(row=2, column=2)
#Spinbox
Spinbox(frame, values=('a','b','c')).grid(row=3)
root = Tk()
root.wm_title('Kitchen Sink')
app = App(root)
root.mainloop()
|
mit
| 872,859,228,343,858,000
| 34.956522
| 101
| 0.577993
| false
| 3.341414
| false
| false
| false
|
matheuskiser/pdx_code_guild
|
django/pointme/points/views.py
|
1
|
4113
|
from django.shortcuts import render, redirect, render_to_response
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.http import HttpResponse
from django.db.models import Q
import json
from points.models import Place
from points.forms import PlaceForm
from pygeocoder import Geocoder
def show_places(request):
places = Place.objects.all()
# Render the template depending on the context.
return render(request, 'points/index.html', {'places': places})
def my_places(request):
# # Entry.objects.all().filter(pub_date__year=2006)
# user = request.user
# places = Place.objects.all().filter(author=user)
#
# # Render the template depending on the context.
# return render(request, 'points/my_places.html', {'places': places})
if request.is_ajax():
upper_left_lat = request.GET['upper_left_lat']
upper_left_lng = request.GET['upper_left_lng']
lower_left_lat = request.GET['lower_left_lat']
lower_left_lng = request.GET['lower_left_lng']
user = request.user
places = Place.objects.all().filter(latitude__gte=lower_left_lat, longitude__gte=lower_left_lng,
latitude__lte=upper_left_lat, longitude__lte=upper_left_lng,
author=user)
spots = []
for place in places:
temp = {}
temp['id'] = place.id
temp['address'] = place.address
temp['name'] = place.name
temp['like'] = place.like
temp['user_name'] = place.author.username
spots.append(temp)
return HttpResponse(json.dumps(spots))
# Render the template depending on the context.
return render(request, 'points/my_places.html')
def search_results(request):
query = request.GET['search-query']
places = Place.objects.filter(Q(name__icontains=query) | Q(address__icontains=query))
return render(request, 'points/search_results.html', {'places': places, 'query': query})
@login_required()
def add_place(request):
form = PlaceForm(request.POST or None)
if form.is_valid():
place = form.save(commit=False)
place.author = request.user
results = Geocoder.geocode(place.address)
lat, lng = results[0].coordinates
place.latitude = lat
place.longitude = lng
place.save()
return redirect('../../points/')
return render_to_response('points/add_place.html', {'form': form}, context_instance=RequestContext(request))
def map_view(request):
if request.is_ajax():
upper_left_lat = request.GET['upper_left_lat']
upper_left_lng = request.GET['upper_left_lng']
lower_left_lat = request.GET['lower_left_lat']
lower_left_lng = request.GET['lower_left_lng']
places = Place.objects.all().filter(latitude__gte=lower_left_lat, longitude__gte=lower_left_lng,
latitude__lte=upper_left_lat, longitude__lte=upper_left_lng)
spots = []
for place in places:
temp = {}
temp['id'] = place.id
temp['address'] = place.address
temp['name'] = place.name
temp['like'] = place.like
temp['user_name'] = place.author.username
temp['comment'] = place.comment
spots.append(temp)
return HttpResponse(json.dumps(spots))
# Render the template depending on the context.
return render(request, 'points/map_view.html')
def get_places(request):
if request.is_ajax():
places = Place.objects.all()
spots = []
for place in places:
temp = {}
temp['id'] = place.id
temp['address'] = place.address
temp['name'] = place.name
temp['like'] = place.like
temp['user_name'] = place.author.username
temp['comment'] = place.comment
spots.append(temp)
return HttpResponse(json.dumps(spots))
return HttpResponse("0")
|
mit
| -8,677,430,849,673,660,000
| 31.912
| 112
| 0.605398
| false
| 3.970077
| false
| false
| false
|
amonmoce/corba_examples
|
omniORBpy-4.2.1/build/python/COS/CosNotifyComm_idl.py
|
1
|
37450
|
# Python stubs generated by omniidl from /usr/local/share/idl/omniORB/COS/CosNotifyComm.idl
# DO NOT EDIT THIS FILE!
import omniORB, _omnipy
from omniORB import CORBA, PortableServer
_0_CORBA = CORBA
_omnipy.checkVersion(4,2, __file__, 1)
try:
property
except NameError:
def property(*args):
return None
# #include "CosNotification.idl"
import CosNotification_idl
_0_CosNotification = omniORB.openModule("CosNotification")
_0_CosNotification__POA = omniORB.openModule("CosNotification__POA")
# #include "CosEventComm.idl"
import CosEventComm_idl
_0_CosEventComm = omniORB.openModule("CosEventComm")
_0_CosEventComm__POA = omniORB.openModule("CosEventComm__POA")
#
# Start of module "CosNotifyComm"
#
__name__ = "CosNotifyComm"
_0_CosNotifyComm = omniORB.openModule("CosNotifyComm", r"/usr/local/share/idl/omniORB/COS/CosNotifyComm.idl")
_0_CosNotifyComm__POA = omniORB.openModule("CosNotifyComm__POA", r"/usr/local/share/idl/omniORB/COS/CosNotifyComm.idl")
# exception InvalidEventType
_0_CosNotifyComm.InvalidEventType = omniORB.newEmptyClass()
class InvalidEventType (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosNotifyComm/InvalidEventType:1.0"
def __init__(self, type):
CORBA.UserException.__init__(self, type)
self.type = type
_0_CosNotifyComm.InvalidEventType = InvalidEventType
_0_CosNotifyComm._d_InvalidEventType = (omniORB.tcInternal.tv_except, InvalidEventType, InvalidEventType._NP_RepositoryId, "InvalidEventType", "type", omniORB.typeMapping["IDL:omg.org/CosNotification/EventType:1.0"])
_0_CosNotifyComm._tc_InvalidEventType = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_InvalidEventType)
omniORB.registerType(InvalidEventType._NP_RepositoryId, _0_CosNotifyComm._d_InvalidEventType, _0_CosNotifyComm._tc_InvalidEventType)
del InvalidEventType
# interface NotifyPublish
_0_CosNotifyComm._d_NotifyPublish = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/NotifyPublish:1.0", "NotifyPublish")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/NotifyPublish:1.0"] = _0_CosNotifyComm._d_NotifyPublish
_0_CosNotifyComm.NotifyPublish = omniORB.newEmptyClass()
class NotifyPublish :
_NP_RepositoryId = _0_CosNotifyComm._d_NotifyPublish[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.NotifyPublish = NotifyPublish
_0_CosNotifyComm._tc_NotifyPublish = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_NotifyPublish)
omniORB.registerType(NotifyPublish._NP_RepositoryId, _0_CosNotifyComm._d_NotifyPublish, _0_CosNotifyComm._tc_NotifyPublish)
# NotifyPublish operations and attributes
NotifyPublish._d_offer_change = ((omniORB.typeMapping["IDL:omg.org/CosNotification/EventTypeSeq:1.0"], omniORB.typeMapping["IDL:omg.org/CosNotification/EventTypeSeq:1.0"]), (), {_0_CosNotifyComm.InvalidEventType._NP_RepositoryId: _0_CosNotifyComm._d_InvalidEventType})
# NotifyPublish object reference
class _objref_NotifyPublish (CORBA.Object):
_NP_RepositoryId = NotifyPublish._NP_RepositoryId
def __init__(self, obj):
CORBA.Object.__init__(self, obj)
def offer_change(self, *args):
return self._obj.invoke("offer_change", _0_CosNotifyComm.NotifyPublish._d_offer_change, args)
omniORB.registerObjref(NotifyPublish._NP_RepositoryId, _objref_NotifyPublish)
_0_CosNotifyComm._objref_NotifyPublish = _objref_NotifyPublish
del NotifyPublish, _objref_NotifyPublish
# NotifyPublish skeleton
__name__ = "CosNotifyComm__POA"
class NotifyPublish (PortableServer.Servant):
_NP_RepositoryId = _0_CosNotifyComm.NotifyPublish._NP_RepositoryId
_omni_op_d = {"offer_change": _0_CosNotifyComm.NotifyPublish._d_offer_change}
NotifyPublish._omni_skeleton = NotifyPublish
_0_CosNotifyComm__POA.NotifyPublish = NotifyPublish
omniORB.registerSkeleton(NotifyPublish._NP_RepositoryId, NotifyPublish)
del NotifyPublish
__name__ = "CosNotifyComm"
# interface NotifySubscribe
_0_CosNotifyComm._d_NotifySubscribe = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/NotifySubscribe:1.0", "NotifySubscribe")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/NotifySubscribe:1.0"] = _0_CosNotifyComm._d_NotifySubscribe
_0_CosNotifyComm.NotifySubscribe = omniORB.newEmptyClass()
class NotifySubscribe :
_NP_RepositoryId = _0_CosNotifyComm._d_NotifySubscribe[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.NotifySubscribe = NotifySubscribe
_0_CosNotifyComm._tc_NotifySubscribe = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_NotifySubscribe)
omniORB.registerType(NotifySubscribe._NP_RepositoryId, _0_CosNotifyComm._d_NotifySubscribe, _0_CosNotifyComm._tc_NotifySubscribe)
# NotifySubscribe operations and attributes
NotifySubscribe._d_subscription_change = ((omniORB.typeMapping["IDL:omg.org/CosNotification/EventTypeSeq:1.0"], omniORB.typeMapping["IDL:omg.org/CosNotification/EventTypeSeq:1.0"]), (), {_0_CosNotifyComm.InvalidEventType._NP_RepositoryId: _0_CosNotifyComm._d_InvalidEventType})
# NotifySubscribe object reference
class _objref_NotifySubscribe (CORBA.Object):
_NP_RepositoryId = NotifySubscribe._NP_RepositoryId
def __init__(self, obj):
CORBA.Object.__init__(self, obj)
def subscription_change(self, *args):
return self._obj.invoke("subscription_change", _0_CosNotifyComm.NotifySubscribe._d_subscription_change, args)
omniORB.registerObjref(NotifySubscribe._NP_RepositoryId, _objref_NotifySubscribe)
_0_CosNotifyComm._objref_NotifySubscribe = _objref_NotifySubscribe
del NotifySubscribe, _objref_NotifySubscribe
# NotifySubscribe skeleton
__name__ = "CosNotifyComm__POA"
class NotifySubscribe (PortableServer.Servant):
_NP_RepositoryId = _0_CosNotifyComm.NotifySubscribe._NP_RepositoryId
_omni_op_d = {"subscription_change": _0_CosNotifyComm.NotifySubscribe._d_subscription_change}
NotifySubscribe._omni_skeleton = NotifySubscribe
_0_CosNotifyComm__POA.NotifySubscribe = NotifySubscribe
omniORB.registerSkeleton(NotifySubscribe._NP_RepositoryId, NotifySubscribe)
del NotifySubscribe
__name__ = "CosNotifyComm"
# interface PushConsumer
_0_CosNotifyComm._d_PushConsumer = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/PushConsumer:1.0", "PushConsumer")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/PushConsumer:1.0"] = _0_CosNotifyComm._d_PushConsumer
_0_CosNotifyComm.PushConsumer = omniORB.newEmptyClass()
class PushConsumer (_0_CosNotifyComm.NotifyPublish, _0_CosEventComm.PushConsumer):
_NP_RepositoryId = _0_CosNotifyComm._d_PushConsumer[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.PushConsumer = PushConsumer
_0_CosNotifyComm._tc_PushConsumer = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_PushConsumer)
omniORB.registerType(PushConsumer._NP_RepositoryId, _0_CosNotifyComm._d_PushConsumer, _0_CosNotifyComm._tc_PushConsumer)
# PushConsumer object reference
class _objref_PushConsumer (_0_CosNotifyComm._objref_NotifyPublish, _0_CosEventComm._objref_PushConsumer):
_NP_RepositoryId = PushConsumer._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifyPublish.__init__(self, obj)
_0_CosEventComm._objref_PushConsumer.__init__(self, obj)
omniORB.registerObjref(PushConsumer._NP_RepositoryId, _objref_PushConsumer)
_0_CosNotifyComm._objref_PushConsumer = _objref_PushConsumer
del PushConsumer, _objref_PushConsumer
# PushConsumer skeleton
__name__ = "CosNotifyComm__POA"
class PushConsumer (_0_CosNotifyComm__POA.NotifyPublish, _0_CosEventComm__POA.PushConsumer):
_NP_RepositoryId = _0_CosNotifyComm.PushConsumer._NP_RepositoryId
_omni_op_d = {}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifyPublish._omni_op_d)
_omni_op_d.update(_0_CosEventComm__POA.PushConsumer._omni_op_d)
PushConsumer._omni_skeleton = PushConsumer
_0_CosNotifyComm__POA.PushConsumer = PushConsumer
omniORB.registerSkeleton(PushConsumer._NP_RepositoryId, PushConsumer)
del PushConsumer
__name__ = "CosNotifyComm"
# interface PullConsumer
_0_CosNotifyComm._d_PullConsumer = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/PullConsumer:1.0", "PullConsumer")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/PullConsumer:1.0"] = _0_CosNotifyComm._d_PullConsumer
_0_CosNotifyComm.PullConsumer = omniORB.newEmptyClass()
class PullConsumer (_0_CosNotifyComm.NotifyPublish, _0_CosEventComm.PullConsumer):
_NP_RepositoryId = _0_CosNotifyComm._d_PullConsumer[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.PullConsumer = PullConsumer
_0_CosNotifyComm._tc_PullConsumer = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_PullConsumer)
omniORB.registerType(PullConsumer._NP_RepositoryId, _0_CosNotifyComm._d_PullConsumer, _0_CosNotifyComm._tc_PullConsumer)
# PullConsumer object reference
class _objref_PullConsumer (_0_CosNotifyComm._objref_NotifyPublish, _0_CosEventComm._objref_PullConsumer):
_NP_RepositoryId = PullConsumer._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifyPublish.__init__(self, obj)
_0_CosEventComm._objref_PullConsumer.__init__(self, obj)
omniORB.registerObjref(PullConsumer._NP_RepositoryId, _objref_PullConsumer)
_0_CosNotifyComm._objref_PullConsumer = _objref_PullConsumer
del PullConsumer, _objref_PullConsumer
# PullConsumer skeleton
__name__ = "CosNotifyComm__POA"
class PullConsumer (_0_CosNotifyComm__POA.NotifyPublish, _0_CosEventComm__POA.PullConsumer):
_NP_RepositoryId = _0_CosNotifyComm.PullConsumer._NP_RepositoryId
_omni_op_d = {}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifyPublish._omni_op_d)
_omni_op_d.update(_0_CosEventComm__POA.PullConsumer._omni_op_d)
PullConsumer._omni_skeleton = PullConsumer
_0_CosNotifyComm__POA.PullConsumer = PullConsumer
omniORB.registerSkeleton(PullConsumer._NP_RepositoryId, PullConsumer)
del PullConsumer
__name__ = "CosNotifyComm"
# interface PullSupplier
_0_CosNotifyComm._d_PullSupplier = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/PullSupplier:1.0", "PullSupplier")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/PullSupplier:1.0"] = _0_CosNotifyComm._d_PullSupplier
_0_CosNotifyComm.PullSupplier = omniORB.newEmptyClass()
class PullSupplier (_0_CosNotifyComm.NotifySubscribe, _0_CosEventComm.PullSupplier):
_NP_RepositoryId = _0_CosNotifyComm._d_PullSupplier[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.PullSupplier = PullSupplier
_0_CosNotifyComm._tc_PullSupplier = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_PullSupplier)
omniORB.registerType(PullSupplier._NP_RepositoryId, _0_CosNotifyComm._d_PullSupplier, _0_CosNotifyComm._tc_PullSupplier)
# PullSupplier object reference
class _objref_PullSupplier (_0_CosNotifyComm._objref_NotifySubscribe, _0_CosEventComm._objref_PullSupplier):
_NP_RepositoryId = PullSupplier._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifySubscribe.__init__(self, obj)
_0_CosEventComm._objref_PullSupplier.__init__(self, obj)
omniORB.registerObjref(PullSupplier._NP_RepositoryId, _objref_PullSupplier)
_0_CosNotifyComm._objref_PullSupplier = _objref_PullSupplier
del PullSupplier, _objref_PullSupplier
# PullSupplier skeleton
__name__ = "CosNotifyComm__POA"
class PullSupplier (_0_CosNotifyComm__POA.NotifySubscribe, _0_CosEventComm__POA.PullSupplier):
_NP_RepositoryId = _0_CosNotifyComm.PullSupplier._NP_RepositoryId
_omni_op_d = {}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifySubscribe._omni_op_d)
_omni_op_d.update(_0_CosEventComm__POA.PullSupplier._omni_op_d)
PullSupplier._omni_skeleton = PullSupplier
_0_CosNotifyComm__POA.PullSupplier = PullSupplier
omniORB.registerSkeleton(PullSupplier._NP_RepositoryId, PullSupplier)
del PullSupplier
__name__ = "CosNotifyComm"
# interface PushSupplier
_0_CosNotifyComm._d_PushSupplier = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/PushSupplier:1.0", "PushSupplier")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/PushSupplier:1.0"] = _0_CosNotifyComm._d_PushSupplier
_0_CosNotifyComm.PushSupplier = omniORB.newEmptyClass()
class PushSupplier (_0_CosNotifyComm.NotifySubscribe, _0_CosEventComm.PushSupplier):
_NP_RepositoryId = _0_CosNotifyComm._d_PushSupplier[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.PushSupplier = PushSupplier
_0_CosNotifyComm._tc_PushSupplier = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_PushSupplier)
omniORB.registerType(PushSupplier._NP_RepositoryId, _0_CosNotifyComm._d_PushSupplier, _0_CosNotifyComm._tc_PushSupplier)
# PushSupplier object reference
class _objref_PushSupplier (_0_CosNotifyComm._objref_NotifySubscribe, _0_CosEventComm._objref_PushSupplier):
_NP_RepositoryId = PushSupplier._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifySubscribe.__init__(self, obj)
_0_CosEventComm._objref_PushSupplier.__init__(self, obj)
omniORB.registerObjref(PushSupplier._NP_RepositoryId, _objref_PushSupplier)
_0_CosNotifyComm._objref_PushSupplier = _objref_PushSupplier
del PushSupplier, _objref_PushSupplier
# PushSupplier skeleton
__name__ = "CosNotifyComm__POA"
class PushSupplier (_0_CosNotifyComm__POA.NotifySubscribe, _0_CosEventComm__POA.PushSupplier):
_NP_RepositoryId = _0_CosNotifyComm.PushSupplier._NP_RepositoryId
_omni_op_d = {}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifySubscribe._omni_op_d)
_omni_op_d.update(_0_CosEventComm__POA.PushSupplier._omni_op_d)
PushSupplier._omni_skeleton = PushSupplier
_0_CosNotifyComm__POA.PushSupplier = PushSupplier
omniORB.registerSkeleton(PushSupplier._NP_RepositoryId, PushSupplier)
del PushSupplier
__name__ = "CosNotifyComm"
# interface StructuredPushConsumer
_0_CosNotifyComm._d_StructuredPushConsumer = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/StructuredPushConsumer:1.0", "StructuredPushConsumer")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/StructuredPushConsumer:1.0"] = _0_CosNotifyComm._d_StructuredPushConsumer
_0_CosNotifyComm.StructuredPushConsumer = omniORB.newEmptyClass()
class StructuredPushConsumer (_0_CosNotifyComm.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm._d_StructuredPushConsumer[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.StructuredPushConsumer = StructuredPushConsumer
_0_CosNotifyComm._tc_StructuredPushConsumer = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_StructuredPushConsumer)
omniORB.registerType(StructuredPushConsumer._NP_RepositoryId, _0_CosNotifyComm._d_StructuredPushConsumer, _0_CosNotifyComm._tc_StructuredPushConsumer)
# StructuredPushConsumer operations and attributes
StructuredPushConsumer._d_push_structured_event = ((omniORB.typeMapping["IDL:omg.org/CosNotification/StructuredEvent:1.0"], ), (), {_0_CosEventComm.Disconnected._NP_RepositoryId: _0_CosEventComm._d_Disconnected})
StructuredPushConsumer._d_disconnect_structured_push_consumer = ((), (), None)
# StructuredPushConsumer object reference
class _objref_StructuredPushConsumer (_0_CosNotifyComm._objref_NotifyPublish):
_NP_RepositoryId = StructuredPushConsumer._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifyPublish.__init__(self, obj)
def push_structured_event(self, *args):
return self._obj.invoke("push_structured_event", _0_CosNotifyComm.StructuredPushConsumer._d_push_structured_event, args)
def disconnect_structured_push_consumer(self, *args):
return self._obj.invoke("disconnect_structured_push_consumer", _0_CosNotifyComm.StructuredPushConsumer._d_disconnect_structured_push_consumer, args)
omniORB.registerObjref(StructuredPushConsumer._NP_RepositoryId, _objref_StructuredPushConsumer)
_0_CosNotifyComm._objref_StructuredPushConsumer = _objref_StructuredPushConsumer
del StructuredPushConsumer, _objref_StructuredPushConsumer
# StructuredPushConsumer skeleton
__name__ = "CosNotifyComm__POA"
class StructuredPushConsumer (_0_CosNotifyComm__POA.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm.StructuredPushConsumer._NP_RepositoryId
_omni_op_d = {"push_structured_event": _0_CosNotifyComm.StructuredPushConsumer._d_push_structured_event, "disconnect_structured_push_consumer": _0_CosNotifyComm.StructuredPushConsumer._d_disconnect_structured_push_consumer}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifyPublish._omni_op_d)
StructuredPushConsumer._omni_skeleton = StructuredPushConsumer
_0_CosNotifyComm__POA.StructuredPushConsumer = StructuredPushConsumer
omniORB.registerSkeleton(StructuredPushConsumer._NP_RepositoryId, StructuredPushConsumer)
del StructuredPushConsumer
__name__ = "CosNotifyComm"
# interface StructuredPullConsumer
_0_CosNotifyComm._d_StructuredPullConsumer = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/StructuredPullConsumer:1.0", "StructuredPullConsumer")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/StructuredPullConsumer:1.0"] = _0_CosNotifyComm._d_StructuredPullConsumer
_0_CosNotifyComm.StructuredPullConsumer = omniORB.newEmptyClass()
class StructuredPullConsumer (_0_CosNotifyComm.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm._d_StructuredPullConsumer[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.StructuredPullConsumer = StructuredPullConsumer
_0_CosNotifyComm._tc_StructuredPullConsumer = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_StructuredPullConsumer)
omniORB.registerType(StructuredPullConsumer._NP_RepositoryId, _0_CosNotifyComm._d_StructuredPullConsumer, _0_CosNotifyComm._tc_StructuredPullConsumer)
# StructuredPullConsumer operations and attributes
StructuredPullConsumer._d_disconnect_structured_pull_consumer = ((), (), None)
# StructuredPullConsumer object reference
class _objref_StructuredPullConsumer (_0_CosNotifyComm._objref_NotifyPublish):
_NP_RepositoryId = StructuredPullConsumer._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifyPublish.__init__(self, obj)
def disconnect_structured_pull_consumer(self, *args):
return self._obj.invoke("disconnect_structured_pull_consumer", _0_CosNotifyComm.StructuredPullConsumer._d_disconnect_structured_pull_consumer, args)
omniORB.registerObjref(StructuredPullConsumer._NP_RepositoryId, _objref_StructuredPullConsumer)
_0_CosNotifyComm._objref_StructuredPullConsumer = _objref_StructuredPullConsumer
del StructuredPullConsumer, _objref_StructuredPullConsumer
# StructuredPullConsumer skeleton
__name__ = "CosNotifyComm__POA"
class StructuredPullConsumer (_0_CosNotifyComm__POA.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm.StructuredPullConsumer._NP_RepositoryId
_omni_op_d = {"disconnect_structured_pull_consumer": _0_CosNotifyComm.StructuredPullConsumer._d_disconnect_structured_pull_consumer}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifyPublish._omni_op_d)
StructuredPullConsumer._omni_skeleton = StructuredPullConsumer
_0_CosNotifyComm__POA.StructuredPullConsumer = StructuredPullConsumer
omniORB.registerSkeleton(StructuredPullConsumer._NP_RepositoryId, StructuredPullConsumer)
del StructuredPullConsumer
__name__ = "CosNotifyComm"
# interface StructuredPullSupplier
_0_CosNotifyComm._d_StructuredPullSupplier = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/StructuredPullSupplier:1.0", "StructuredPullSupplier")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/StructuredPullSupplier:1.0"] = _0_CosNotifyComm._d_StructuredPullSupplier
_0_CosNotifyComm.StructuredPullSupplier = omniORB.newEmptyClass()
class StructuredPullSupplier (_0_CosNotifyComm.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm._d_StructuredPullSupplier[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.StructuredPullSupplier = StructuredPullSupplier
_0_CosNotifyComm._tc_StructuredPullSupplier = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_StructuredPullSupplier)
omniORB.registerType(StructuredPullSupplier._NP_RepositoryId, _0_CosNotifyComm._d_StructuredPullSupplier, _0_CosNotifyComm._tc_StructuredPullSupplier)
# StructuredPullSupplier operations and attributes
StructuredPullSupplier._d_pull_structured_event = ((), (omniORB.typeMapping["IDL:omg.org/CosNotification/StructuredEvent:1.0"], ), {_0_CosEventComm.Disconnected._NP_RepositoryId: _0_CosEventComm._d_Disconnected})
StructuredPullSupplier._d_try_pull_structured_event = ((), (omniORB.typeMapping["IDL:omg.org/CosNotification/StructuredEvent:1.0"], omniORB.tcInternal.tv_boolean), {_0_CosEventComm.Disconnected._NP_RepositoryId: _0_CosEventComm._d_Disconnected})
StructuredPullSupplier._d_disconnect_structured_pull_supplier = ((), (), None)
# StructuredPullSupplier object reference
class _objref_StructuredPullSupplier (_0_CosNotifyComm._objref_NotifySubscribe):
_NP_RepositoryId = StructuredPullSupplier._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifySubscribe.__init__(self, obj)
def pull_structured_event(self, *args):
return self._obj.invoke("pull_structured_event", _0_CosNotifyComm.StructuredPullSupplier._d_pull_structured_event, args)
def try_pull_structured_event(self, *args):
return self._obj.invoke("try_pull_structured_event", _0_CosNotifyComm.StructuredPullSupplier._d_try_pull_structured_event, args)
def disconnect_structured_pull_supplier(self, *args):
return self._obj.invoke("disconnect_structured_pull_supplier", _0_CosNotifyComm.StructuredPullSupplier._d_disconnect_structured_pull_supplier, args)
omniORB.registerObjref(StructuredPullSupplier._NP_RepositoryId, _objref_StructuredPullSupplier)
_0_CosNotifyComm._objref_StructuredPullSupplier = _objref_StructuredPullSupplier
del StructuredPullSupplier, _objref_StructuredPullSupplier
# StructuredPullSupplier skeleton
__name__ = "CosNotifyComm__POA"
class StructuredPullSupplier (_0_CosNotifyComm__POA.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm.StructuredPullSupplier._NP_RepositoryId
_omni_op_d = {"pull_structured_event": _0_CosNotifyComm.StructuredPullSupplier._d_pull_structured_event, "try_pull_structured_event": _0_CosNotifyComm.StructuredPullSupplier._d_try_pull_structured_event, "disconnect_structured_pull_supplier": _0_CosNotifyComm.StructuredPullSupplier._d_disconnect_structured_pull_supplier}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifySubscribe._omni_op_d)
StructuredPullSupplier._omni_skeleton = StructuredPullSupplier
_0_CosNotifyComm__POA.StructuredPullSupplier = StructuredPullSupplier
omniORB.registerSkeleton(StructuredPullSupplier._NP_RepositoryId, StructuredPullSupplier)
del StructuredPullSupplier
__name__ = "CosNotifyComm"
# interface StructuredPushSupplier
_0_CosNotifyComm._d_StructuredPushSupplier = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/StructuredPushSupplier:1.0", "StructuredPushSupplier")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/StructuredPushSupplier:1.0"] = _0_CosNotifyComm._d_StructuredPushSupplier
_0_CosNotifyComm.StructuredPushSupplier = omniORB.newEmptyClass()
class StructuredPushSupplier (_0_CosNotifyComm.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm._d_StructuredPushSupplier[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.StructuredPushSupplier = StructuredPushSupplier
_0_CosNotifyComm._tc_StructuredPushSupplier = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_StructuredPushSupplier)
omniORB.registerType(StructuredPushSupplier._NP_RepositoryId, _0_CosNotifyComm._d_StructuredPushSupplier, _0_CosNotifyComm._tc_StructuredPushSupplier)
# StructuredPushSupplier operations and attributes
StructuredPushSupplier._d_disconnect_structured_push_supplier = ((), (), None)
# StructuredPushSupplier object reference
class _objref_StructuredPushSupplier (_0_CosNotifyComm._objref_NotifySubscribe):
_NP_RepositoryId = StructuredPushSupplier._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifySubscribe.__init__(self, obj)
def disconnect_structured_push_supplier(self, *args):
return self._obj.invoke("disconnect_structured_push_supplier", _0_CosNotifyComm.StructuredPushSupplier._d_disconnect_structured_push_supplier, args)
omniORB.registerObjref(StructuredPushSupplier._NP_RepositoryId, _objref_StructuredPushSupplier)
_0_CosNotifyComm._objref_StructuredPushSupplier = _objref_StructuredPushSupplier
del StructuredPushSupplier, _objref_StructuredPushSupplier
# StructuredPushSupplier skeleton
__name__ = "CosNotifyComm__POA"
class StructuredPushSupplier (_0_CosNotifyComm__POA.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm.StructuredPushSupplier._NP_RepositoryId
_omni_op_d = {"disconnect_structured_push_supplier": _0_CosNotifyComm.StructuredPushSupplier._d_disconnect_structured_push_supplier}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifySubscribe._omni_op_d)
StructuredPushSupplier._omni_skeleton = StructuredPushSupplier
_0_CosNotifyComm__POA.StructuredPushSupplier = StructuredPushSupplier
omniORB.registerSkeleton(StructuredPushSupplier._NP_RepositoryId, StructuredPushSupplier)
del StructuredPushSupplier
__name__ = "CosNotifyComm"
# interface SequencePushConsumer
_0_CosNotifyComm._d_SequencePushConsumer = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/SequencePushConsumer:1.0", "SequencePushConsumer")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/SequencePushConsumer:1.0"] = _0_CosNotifyComm._d_SequencePushConsumer
_0_CosNotifyComm.SequencePushConsumer = omniORB.newEmptyClass()
class SequencePushConsumer (_0_CosNotifyComm.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm._d_SequencePushConsumer[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.SequencePushConsumer = SequencePushConsumer
_0_CosNotifyComm._tc_SequencePushConsumer = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_SequencePushConsumer)
omniORB.registerType(SequencePushConsumer._NP_RepositoryId, _0_CosNotifyComm._d_SequencePushConsumer, _0_CosNotifyComm._tc_SequencePushConsumer)
# SequencePushConsumer operations and attributes
SequencePushConsumer._d_push_structured_events = ((omniORB.typeMapping["IDL:omg.org/CosNotification/EventBatch:1.0"], ), (), {_0_CosEventComm.Disconnected._NP_RepositoryId: _0_CosEventComm._d_Disconnected})
SequencePushConsumer._d_disconnect_sequence_push_consumer = ((), (), None)
# SequencePushConsumer object reference
class _objref_SequencePushConsumer (_0_CosNotifyComm._objref_NotifyPublish):
_NP_RepositoryId = SequencePushConsumer._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifyPublish.__init__(self, obj)
def push_structured_events(self, *args):
return self._obj.invoke("push_structured_events", _0_CosNotifyComm.SequencePushConsumer._d_push_structured_events, args)
def disconnect_sequence_push_consumer(self, *args):
return self._obj.invoke("disconnect_sequence_push_consumer", _0_CosNotifyComm.SequencePushConsumer._d_disconnect_sequence_push_consumer, args)
omniORB.registerObjref(SequencePushConsumer._NP_RepositoryId, _objref_SequencePushConsumer)
_0_CosNotifyComm._objref_SequencePushConsumer = _objref_SequencePushConsumer
del SequencePushConsumer, _objref_SequencePushConsumer
# SequencePushConsumer skeleton
__name__ = "CosNotifyComm__POA"
class SequencePushConsumer (_0_CosNotifyComm__POA.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm.SequencePushConsumer._NP_RepositoryId
_omni_op_d = {"push_structured_events": _0_CosNotifyComm.SequencePushConsumer._d_push_structured_events, "disconnect_sequence_push_consumer": _0_CosNotifyComm.SequencePushConsumer._d_disconnect_sequence_push_consumer}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifyPublish._omni_op_d)
SequencePushConsumer._omni_skeleton = SequencePushConsumer
_0_CosNotifyComm__POA.SequencePushConsumer = SequencePushConsumer
omniORB.registerSkeleton(SequencePushConsumer._NP_RepositoryId, SequencePushConsumer)
del SequencePushConsumer
__name__ = "CosNotifyComm"
# interface SequencePullConsumer
_0_CosNotifyComm._d_SequencePullConsumer = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/SequencePullConsumer:1.0", "SequencePullConsumer")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/SequencePullConsumer:1.0"] = _0_CosNotifyComm._d_SequencePullConsumer
_0_CosNotifyComm.SequencePullConsumer = omniORB.newEmptyClass()
class SequencePullConsumer (_0_CosNotifyComm.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm._d_SequencePullConsumer[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.SequencePullConsumer = SequencePullConsumer
_0_CosNotifyComm._tc_SequencePullConsumer = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_SequencePullConsumer)
omniORB.registerType(SequencePullConsumer._NP_RepositoryId, _0_CosNotifyComm._d_SequencePullConsumer, _0_CosNotifyComm._tc_SequencePullConsumer)
# SequencePullConsumer operations and attributes
SequencePullConsumer._d_disconnect_sequence_pull_consumer = ((), (), None)
# SequencePullConsumer object reference
class _objref_SequencePullConsumer (_0_CosNotifyComm._objref_NotifyPublish):
_NP_RepositoryId = SequencePullConsumer._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifyPublish.__init__(self, obj)
def disconnect_sequence_pull_consumer(self, *args):
return self._obj.invoke("disconnect_sequence_pull_consumer", _0_CosNotifyComm.SequencePullConsumer._d_disconnect_sequence_pull_consumer, args)
omniORB.registerObjref(SequencePullConsumer._NP_RepositoryId, _objref_SequencePullConsumer)
_0_CosNotifyComm._objref_SequencePullConsumer = _objref_SequencePullConsumer
del SequencePullConsumer, _objref_SequencePullConsumer
# SequencePullConsumer skeleton
__name__ = "CosNotifyComm__POA"
class SequencePullConsumer (_0_CosNotifyComm__POA.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm.SequencePullConsumer._NP_RepositoryId
_omni_op_d = {"disconnect_sequence_pull_consumer": _0_CosNotifyComm.SequencePullConsumer._d_disconnect_sequence_pull_consumer}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifyPublish._omni_op_d)
SequencePullConsumer._omni_skeleton = SequencePullConsumer
_0_CosNotifyComm__POA.SequencePullConsumer = SequencePullConsumer
omniORB.registerSkeleton(SequencePullConsumer._NP_RepositoryId, SequencePullConsumer)
del SequencePullConsumer
__name__ = "CosNotifyComm"
# interface SequencePullSupplier
_0_CosNotifyComm._d_SequencePullSupplier = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/SequencePullSupplier:1.0", "SequencePullSupplier")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/SequencePullSupplier:1.0"] = _0_CosNotifyComm._d_SequencePullSupplier
_0_CosNotifyComm.SequencePullSupplier = omniORB.newEmptyClass()
class SequencePullSupplier (_0_CosNotifyComm.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm._d_SequencePullSupplier[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.SequencePullSupplier = SequencePullSupplier
_0_CosNotifyComm._tc_SequencePullSupplier = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_SequencePullSupplier)
omniORB.registerType(SequencePullSupplier._NP_RepositoryId, _0_CosNotifyComm._d_SequencePullSupplier, _0_CosNotifyComm._tc_SequencePullSupplier)
# SequencePullSupplier operations and attributes
SequencePullSupplier._d_pull_structured_events = ((omniORB.tcInternal.tv_long, ), (omniORB.typeMapping["IDL:omg.org/CosNotification/EventBatch:1.0"], ), {_0_CosEventComm.Disconnected._NP_RepositoryId: _0_CosEventComm._d_Disconnected})
SequencePullSupplier._d_try_pull_structured_events = ((omniORB.tcInternal.tv_long, ), (omniORB.typeMapping["IDL:omg.org/CosNotification/EventBatch:1.0"], omniORB.tcInternal.tv_boolean), {_0_CosEventComm.Disconnected._NP_RepositoryId: _0_CosEventComm._d_Disconnected})
SequencePullSupplier._d_disconnect_sequence_pull_supplier = ((), (), None)
# SequencePullSupplier object reference
class _objref_SequencePullSupplier (_0_CosNotifyComm._objref_NotifySubscribe):
_NP_RepositoryId = SequencePullSupplier._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifySubscribe.__init__(self, obj)
def pull_structured_events(self, *args):
return self._obj.invoke("pull_structured_events", _0_CosNotifyComm.SequencePullSupplier._d_pull_structured_events, args)
def try_pull_structured_events(self, *args):
return self._obj.invoke("try_pull_structured_events", _0_CosNotifyComm.SequencePullSupplier._d_try_pull_structured_events, args)
def disconnect_sequence_pull_supplier(self, *args):
return self._obj.invoke("disconnect_sequence_pull_supplier", _0_CosNotifyComm.SequencePullSupplier._d_disconnect_sequence_pull_supplier, args)
omniORB.registerObjref(SequencePullSupplier._NP_RepositoryId, _objref_SequencePullSupplier)
_0_CosNotifyComm._objref_SequencePullSupplier = _objref_SequencePullSupplier
del SequencePullSupplier, _objref_SequencePullSupplier
# SequencePullSupplier skeleton
__name__ = "CosNotifyComm__POA"
class SequencePullSupplier (_0_CosNotifyComm__POA.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm.SequencePullSupplier._NP_RepositoryId
_omni_op_d = {"pull_structured_events": _0_CosNotifyComm.SequencePullSupplier._d_pull_structured_events, "try_pull_structured_events": _0_CosNotifyComm.SequencePullSupplier._d_try_pull_structured_events, "disconnect_sequence_pull_supplier": _0_CosNotifyComm.SequencePullSupplier._d_disconnect_sequence_pull_supplier}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifySubscribe._omni_op_d)
SequencePullSupplier._omni_skeleton = SequencePullSupplier
_0_CosNotifyComm__POA.SequencePullSupplier = SequencePullSupplier
omniORB.registerSkeleton(SequencePullSupplier._NP_RepositoryId, SequencePullSupplier)
del SequencePullSupplier
__name__ = "CosNotifyComm"
# interface SequencePushSupplier
_0_CosNotifyComm._d_SequencePushSupplier = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/SequencePushSupplier:1.0", "SequencePushSupplier")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/SequencePushSupplier:1.0"] = _0_CosNotifyComm._d_SequencePushSupplier
_0_CosNotifyComm.SequencePushSupplier = omniORB.newEmptyClass()
class SequencePushSupplier (_0_CosNotifyComm.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm._d_SequencePushSupplier[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.SequencePushSupplier = SequencePushSupplier
_0_CosNotifyComm._tc_SequencePushSupplier = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_SequencePushSupplier)
omniORB.registerType(SequencePushSupplier._NP_RepositoryId, _0_CosNotifyComm._d_SequencePushSupplier, _0_CosNotifyComm._tc_SequencePushSupplier)
# SequencePushSupplier operations and attributes
SequencePushSupplier._d_disconnect_sequence_push_supplier = ((), (), None)
# SequencePushSupplier object reference
class _objref_SequencePushSupplier (_0_CosNotifyComm._objref_NotifySubscribe):
_NP_RepositoryId = SequencePushSupplier._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifySubscribe.__init__(self, obj)
def disconnect_sequence_push_supplier(self, *args):
return self._obj.invoke("disconnect_sequence_push_supplier", _0_CosNotifyComm.SequencePushSupplier._d_disconnect_sequence_push_supplier, args)
omniORB.registerObjref(SequencePushSupplier._NP_RepositoryId, _objref_SequencePushSupplier)
_0_CosNotifyComm._objref_SequencePushSupplier = _objref_SequencePushSupplier
del SequencePushSupplier, _objref_SequencePushSupplier
# SequencePushSupplier skeleton
__name__ = "CosNotifyComm__POA"
class SequencePushSupplier (_0_CosNotifyComm__POA.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm.SequencePushSupplier._NP_RepositoryId
_omni_op_d = {"disconnect_sequence_push_supplier": _0_CosNotifyComm.SequencePushSupplier._d_disconnect_sequence_push_supplier}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifySubscribe._omni_op_d)
SequencePushSupplier._omni_skeleton = SequencePushSupplier
_0_CosNotifyComm__POA.SequencePushSupplier = SequencePushSupplier
omniORB.registerSkeleton(SequencePushSupplier._NP_RepositoryId, SequencePushSupplier)
del SequencePushSupplier
__name__ = "CosNotifyComm"
#
# End of module "CosNotifyComm"
#
__name__ = "CosNotifyComm_idl"
_exported_modules = ( "CosNotifyComm", )
# The end.
|
mit
| -5,131,353,625,835,709,000
| 48.933333
| 326
| 0.786195
| false
| 3.305384
| false
| false
| false
|
cipri-tom/Swiss-on-Amazon
|
spark/brand_search.py
|
1
|
1692
|
#! /bin/python
from pyspark import SparkContext, SparkConf
import json
from pprint import pprint
import pickle
import re
#config path
brands_path = "../data/wiki_brands.txt"
brands_path2 = "../data/all_swiss_brands.pickle"
metadata_path = "hdfs:///datasets/amazon-reviews/metadata.json"
# load the list of brands
brands = []
with open(brands_path) as f:
for line in f:
line = line.rstrip('\n').lower()
brands.append(line)
with open(brands_path2, 'rb') as fp:
new_brands = pickle.load(fp)
# clean brand data
for b in new_brands:
b = b.lower()
b = re.sub(" [\(\[].*?[\)\]]", "", b)
brands.append(b)
brands = list(set(brands))
# lookup if a certain brand is swiss
def searchBrand(line):
line = line.rstrip('\n').lower()
d = eval(line)
if 'brand' in d:
if d['brand'] in brands:
return ("Swiss brand", [d])
#return (d['brand'], d['asin'])
else:
return ("No Swiss brand", 1)
else:
return ("No brand", 1)
# load spark job
conf = SparkConf().setAppName("SoA")
sc = SparkContext(conf=conf)
# load metadata file
text_file = sc.textFile(metadata_path)
print("finished loading file and brands")
# map reduce ->
# for each product lookup if it is swiss, keeps brand:productkey (map)
# group products of the same brand, keeps brand:[productkeys] (reduce)
counts = text_file \
.map(searchBrand) \
.reduceByKey(lambda a, b: a + b)
products = counts.collect()
print("finished map reduce")
#print(products)
# create json file containing only swiss products
f = open('../data/swiss_products.json','w')
products = dict(products)
for product in products['Swiss brand']:
f.write(str(product) + '\n')
f.close()
print("finished writing file")
|
gpl-3.0
| -4,317,103,139,602,162,700
| 22.191781
| 70
| 0.670804
| false
| 2.968421
| false
| false
| false
|
tskisner/pytoast
|
src/python/tests/dist.py
|
1
|
5208
|
# Copyright (c) 2015-2018 by the parties listed in the AUTHORS file.
# All rights reserved. Use of this source code is governed by
# a BSD-style license that can be found in the LICENSE file.
from ..mpi import MPI
from .mpi import MPITestCase
from ..dist import *
import numpy as np
import sys
import os
from ._helpers import (create_outdir, create_distdata)
class DataTest(MPITestCase):
def setUp(self):
fixture_name = os.path.splitext(os.path.basename(__file__))[0]
self.outdir = create_outdir(self.comm, fixture_name)
# Create one observation per group.
self.data = create_distdata(self.comm, obs_per_group=1)
self.ntask = 24
self.sizes1 = [
29218,
430879,
43684,
430338,
36289,
437553,
37461,
436200,
41249,
432593,
42467,
431195,
35387,
438274,
36740,
436741,
40663,
432999,
42015,
431285,
35297,
438004,
37010,
436291,
41114,
432186,
42828,
430293,
36243,
436697,
38318,
434802,
42602,
430338,
44676,
428264,
38273,
434306,
40708,
432051,
45308,
427452,
36695,
435884,
41520,
430879,
44090,
428309,
38273,
434126,
40843,
431375
]
self.totsamp1 = np.sum(self.sizes1)
self.sizes2 = [ (int(3600*169.7)) for i in range(8640) ]
self.totsamp2 = np.sum(self.sizes2)
def test_construction(self):
dist_uni1 = distribute_uniform(self.totsamp1, self.ntask)
# with open("test_uni_{}".format(self.comm.rank), "w") as f:
# for d in dist_uni:
# f.write("uniform: {} {}\n".format(d[0], d[1]))
n1 = np.sum(np.array(dist_uni1)[:,1])
assert(n1 == self.totsamp1)
n = self.totsamp1
breaks = [n//2+1000, n//4-1000000, n//2+1000, (3*n)//4]
dist_uni2 = distribute_uniform(self.totsamp1, self.ntask,
breaks=breaks)
n2 = np.sum(np.array(dist_uni2)[:,1])
assert(n2 == self.totsamp1)
for offset, nsamp in dist_uni2:
for brk in breaks:
if brk > offset and brk < offset+nsamp:
raise Exception(
"Uniform data distribution did not honor the breaks")
dist_disc1 = distribute_discrete(self.sizes1, self.ntask)
# with open("test_disc_{}".format(self.comm.rank), "w") as f:
# for d in dist_disc:
# f.write("discrete: {} {}\n".format(d[0], d[1]))
n = np.sum(np.array(dist_disc1)[:,1])
assert(n == len(self.sizes1))
n = len(self.sizes1)
breaks = [n//2, n//4, n//2, (3*n)//4]
dist_disc2 = distribute_discrete(self.sizes1, self.ntask,
breaks=breaks)
n = np.sum(np.array(dist_disc2)[:,1])
assert(n == len(self.sizes1))
for offset, nchunk in dist_disc2:
for brk in breaks:
if brk > offset and brk < offset+nchunk:
raise Exception(
"Discrete data distribution did not honor the breaks")
handle = None
if self.comm.rank == 0:
handle = open(os.path.join(self.outdir,"out_test_construct_info"),
"w")
self.data.info(handle)
if self.comm.rank == 0:
handle.close()
dist_disc3 = distribute_discrete(self.sizes2, 384)
if self.comm.rank == 0:
with open(os.path.join(self.outdir,"dist_discrete_8640x384.txt"), "w") as f:
indx = 0
for d in dist_disc3:
f.write("{:04d} = ({}, {})\n".format(indx, d[0], d[1]))
indx += 1
return
def test_split(self):
data = Data(self.data.comm)
data.obs.append({"site":"Atacama", "season":1})
data.obs.append({"site":"Atacama", "season":2})
data.obs.append({"site":"Atacama", "season":3})
data.obs.append({"site":"Pole", "season":1})
data.obs.append({"site":"Pole", "season":2})
data.obs.append({"site":"Pole", "season":3})
datasplit_site = data.split("site")
datasplit_season = data.split("season")
nt.assert_equal(len(datasplit_site), 2)
nt.assert_equal(len(datasplit_season), 3)
# Verify that the observations are shared
sum1 = 0
for value, site_data in datasplit_site:
for obs in site_data.obs:
assert("var1" not in obs)
obs["var1"] = 1
sum1 += 1
sum2 = 0
for value, season_data in datasplit_season:
for obs in season_data.obs:
sum2 += obs["var1"]
nt.assert_equal(sum1, sum2)
return
|
bsd-2-clause
| 3,222,813,290,255,280,000
| 27.773481
| 88
| 0.49424
| false
| 3.516543
| true
| false
| false
|
MrSnede/BalancingWheelRobot
|
mainWindow.py
|
1
|
19684
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/mainWindow.ui'
#
# Created: Sat Nov 29 18:47:56 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1237, 745)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.Serial_groupBox = QtGui.QGroupBox(self.centralwidget)
self.Serial_groupBox.setObjectName(_fromUtf8("Serial_groupBox"))
self.verticalLayout = QtGui.QVBoxLayout(self.Serial_groupBox)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.HLayOutSerialSettings = QtGui.QHBoxLayout()
self.HLayOutSerialSettings.setObjectName(_fromUtf8("HLayOutSerialSettings"))
self.portsComboBox = QtGui.QComboBox(self.Serial_groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.portsComboBox.sizePolicy().hasHeightForWidth())
self.portsComboBox.setSizePolicy(sizePolicy)
self.portsComboBox.setMinimumSize(QtCore.QSize(100, 27))
self.portsComboBox.setObjectName(_fromUtf8("portsComboBox"))
self.HLayOutSerialSettings.addWidget(self.portsComboBox)
self.refreshPortsPushButton = QtGui.QPushButton(self.Serial_groupBox)
self.refreshPortsPushButton.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.refreshPortsPushButton.sizePolicy().hasHeightForWidth())
self.refreshPortsPushButton.setSizePolicy(sizePolicy)
self.refreshPortsPushButton.setMinimumSize(QtCore.QSize(38, 27))
self.refreshPortsPushButton.setMaximumSize(QtCore.QSize(38, 27))
self.refreshPortsPushButton.setText(_fromUtf8(""))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/icons/refresh.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.refreshPortsPushButton.setIcon(icon)
self.refreshPortsPushButton.setIconSize(QtCore.QSize(16, 16))
self.refreshPortsPushButton.setObjectName(_fromUtf8("refreshPortsPushButton"))
self.HLayOutSerialSettings.addWidget(self.refreshPortsPushButton)
self.baudRateComboBox = QtGui.QComboBox(self.Serial_groupBox)
self.baudRateComboBox.setMinimumSize(QtCore.QSize(91, 27))
self.baudRateComboBox.setObjectName(_fromUtf8("baudRateComboBox"))
self.HLayOutSerialSettings.addWidget(self.baudRateComboBox)
self.connectPushButton = QtGui.QPushButton(self.Serial_groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.connectPushButton.sizePolicy().hasHeightForWidth())
self.connectPushButton.setSizePolicy(sizePolicy)
self.connectPushButton.setMinimumSize(QtCore.QSize(91, 27))
self.connectPushButton.setObjectName(_fromUtf8("connectPushButton"))
self.HLayOutSerialSettings.addWidget(self.connectPushButton)
self.disconnectPushButton = QtGui.QPushButton(self.Serial_groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.disconnectPushButton.sizePolicy().hasHeightForWidth())
self.disconnectPushButton.setSizePolicy(sizePolicy)
self.disconnectPushButton.setMinimumSize(QtCore.QSize(91, 27))
self.disconnectPushButton.setObjectName(_fromUtf8("disconnectPushButton"))
self.HLayOutSerialSettings.addWidget(self.disconnectPushButton)
self.verticalLayout.addLayout(self.HLayOutSerialSettings)
self.logPlainTextEdit = QtGui.QPlainTextEdit(self.Serial_groupBox)
self.logPlainTextEdit.setMinimumSize(QtCore.QSize(270, 200))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Courier New"))
self.logPlainTextEdit.setFont(font)
self.logPlainTextEdit.setReadOnly(True)
self.logPlainTextEdit.setObjectName(_fromUtf8("logPlainTextEdit"))
self.verticalLayout.addWidget(self.logPlainTextEdit)
self.cmdLineEdit = QtGui.QLineEdit(self.Serial_groupBox)
self.cmdLineEdit.setMinimumSize(QtCore.QSize(0, 27))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Courier New"))
self.cmdLineEdit.setFont(font)
self.cmdLineEdit.setDragEnabled(True)
self.cmdLineEdit.setPlaceholderText(_fromUtf8(""))
self.cmdLineEdit.setObjectName(_fromUtf8("cmdLineEdit"))
self.verticalLayout.addWidget(self.cmdLineEdit)
self.horizontalLayout_2.addWidget(self.Serial_groupBox)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.PID_groupBox_2 = QtGui.QGroupBox(self.centralwidget)
self.PID_groupBox_2.setObjectName(_fromUtf8("PID_groupBox_2"))
self.gridLayout = QtGui.QGridLayout(self.PID_groupBox_2)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.D_Label = QtGui.QLabel(self.PID_groupBox_2)
self.D_Label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.D_Label.setObjectName(_fromUtf8("D_Label"))
self.gridLayout.addWidget(self.D_Label, 2, 0, 1, 1)
self.I_Label = QtGui.QLabel(self.PID_groupBox_2)
self.I_Label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.I_Label.setObjectName(_fromUtf8("I_Label"))
self.gridLayout.addWidget(self.I_Label, 1, 0, 1, 1)
self.P_ValueLabel = QtGui.QLabel(self.PID_groupBox_2)
self.P_ValueLabel.setMinimumSize(QtCore.QSize(50, 0))
self.P_ValueLabel.setText(_fromUtf8(""))
self.P_ValueLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.P_ValueLabel.setObjectName(_fromUtf8("P_ValueLabel"))
self.gridLayout.addWidget(self.P_ValueLabel, 0, 1, 1, 1)
self.I_ValueLabel = QtGui.QLabel(self.PID_groupBox_2)
self.I_ValueLabel.setMinimumSize(QtCore.QSize(50, 0))
self.I_ValueLabel.setText(_fromUtf8(""))
self.I_ValueLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.I_ValueLabel.setObjectName(_fromUtf8("I_ValueLabel"))
self.gridLayout.addWidget(self.I_ValueLabel, 1, 1, 1, 1)
self.P_HSlider = QtGui.QSlider(self.PID_groupBox_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.P_HSlider.sizePolicy().hasHeightForWidth())
self.P_HSlider.setSizePolicy(sizePolicy)
self.P_HSlider.setMaximum(3000)
self.P_HSlider.setSingleStep(100)
self.P_HSlider.setPageStep(300)
self.P_HSlider.setProperty("value", 2000)
self.P_HSlider.setOrientation(QtCore.Qt.Horizontal)
self.P_HSlider.setTickPosition(QtGui.QSlider.TicksBelow)
self.P_HSlider.setTickInterval(100)
self.P_HSlider.setObjectName(_fromUtf8("P_HSlider"))
self.gridLayout.addWidget(self.P_HSlider, 0, 2, 1, 1)
self.D_ValueLabel = QtGui.QLabel(self.PID_groupBox_2)
self.D_ValueLabel.setMinimumSize(QtCore.QSize(50, 0))
self.D_ValueLabel.setText(_fromUtf8(""))
self.D_ValueLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.D_ValueLabel.setObjectName(_fromUtf8("D_ValueLabel"))
self.gridLayout.addWidget(self.D_ValueLabel, 2, 1, 1, 1)
self.P_Label = QtGui.QLabel(self.PID_groupBox_2)
self.P_Label.setMinimumSize(QtCore.QSize(20, 0))
self.P_Label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.P_Label.setObjectName(_fromUtf8("P_Label"))
self.gridLayout.addWidget(self.P_Label, 0, 0, 1, 1)
self.D_HSlider = QtGui.QSlider(self.PID_groupBox_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.D_HSlider.sizePolicy().hasHeightForWidth())
self.D_HSlider.setSizePolicy(sizePolicy)
self.D_HSlider.setMaximum(3000)
self.D_HSlider.setSingleStep(100)
self.D_HSlider.setPageStep(300)
self.D_HSlider.setProperty("value", 2000)
self.D_HSlider.setOrientation(QtCore.Qt.Horizontal)
self.D_HSlider.setTickPosition(QtGui.QSlider.TicksBelow)
self.D_HSlider.setTickInterval(100)
self.D_HSlider.setObjectName(_fromUtf8("D_HSlider"))
self.gridLayout.addWidget(self.D_HSlider, 2, 2, 1, 1)
self.I_HSlider = QtGui.QSlider(self.PID_groupBox_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.I_HSlider.sizePolicy().hasHeightForWidth())
self.I_HSlider.setSizePolicy(sizePolicy)
self.I_HSlider.setMaximum(3000)
self.I_HSlider.setSingleStep(100)
self.I_HSlider.setPageStep(300)
self.I_HSlider.setProperty("value", 2000)
self.I_HSlider.setOrientation(QtCore.Qt.Horizontal)
self.I_HSlider.setTickPosition(QtGui.QSlider.TicksBelow)
self.I_HSlider.setTickInterval(100)
self.I_HSlider.setObjectName(_fromUtf8("I_HSlider"))
self.gridLayout.addWidget(self.I_HSlider, 1, 2, 1, 1)
self.verticalLayout_2.addWidget(self.PID_groupBox_2)
self.FreeSlider_groupBox_3 = QtGui.QGroupBox(self.centralwidget)
self.FreeSlider_groupBox_3.setObjectName(_fromUtf8("FreeSlider_groupBox_3"))
self.gridLayout_2 = QtGui.QGridLayout(self.FreeSlider_groupBox_3)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.FreeSlider2_Label = QtGui.QLabel(self.FreeSlider_groupBox_3)
self.FreeSlider2_Label.setObjectName(_fromUtf8("FreeSlider2_Label"))
self.gridLayout_2.addWidget(self.FreeSlider2_Label, 1, 0, 1, 1)
self.FreeSlider1_Label = QtGui.QLabel(self.FreeSlider_groupBox_3)
self.FreeSlider1_Label.setObjectName(_fromUtf8("FreeSlider1_Label"))
self.gridLayout_2.addWidget(self.FreeSlider1_Label, 0, 0, 1, 1)
self.FreeSlider2_HSlider = QtGui.QSlider(self.FreeSlider_groupBox_3)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.FreeSlider2_HSlider.sizePolicy().hasHeightForWidth())
self.FreeSlider2_HSlider.setSizePolicy(sizePolicy)
self.FreeSlider2_HSlider.setMaximum(3000)
self.FreeSlider2_HSlider.setSingleStep(100)
self.FreeSlider2_HSlider.setPageStep(300)
self.FreeSlider2_HSlider.setProperty("value", 2000)
self.FreeSlider2_HSlider.setOrientation(QtCore.Qt.Horizontal)
self.FreeSlider2_HSlider.setTickPosition(QtGui.QSlider.TicksBelow)
self.FreeSlider2_HSlider.setTickInterval(100)
self.FreeSlider2_HSlider.setObjectName(_fromUtf8("FreeSlider2_HSlider"))
self.gridLayout_2.addWidget(self.FreeSlider2_HSlider, 1, 2, 1, 1)
self.FreeSlider2_ValueLabel = QtGui.QLabel(self.FreeSlider_groupBox_3)
self.FreeSlider2_ValueLabel.setMinimumSize(QtCore.QSize(50, 0))
self.FreeSlider2_ValueLabel.setText(_fromUtf8(""))
self.FreeSlider2_ValueLabel.setObjectName(_fromUtf8("FreeSlider2_ValueLabel"))
self.gridLayout_2.addWidget(self.FreeSlider2_ValueLabel, 1, 1, 1, 1)
self.FreeSlider1_HSlider = QtGui.QSlider(self.FreeSlider_groupBox_3)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.FreeSlider1_HSlider.sizePolicy().hasHeightForWidth())
self.FreeSlider1_HSlider.setSizePolicy(sizePolicy)
self.FreeSlider1_HSlider.setMaximum(3000)
self.FreeSlider1_HSlider.setSingleStep(100)
self.FreeSlider1_HSlider.setPageStep(300)
self.FreeSlider1_HSlider.setProperty("value", 2000)
self.FreeSlider1_HSlider.setOrientation(QtCore.Qt.Horizontal)
self.FreeSlider1_HSlider.setTickPosition(QtGui.QSlider.TicksBelow)
self.FreeSlider1_HSlider.setTickInterval(100)
self.FreeSlider1_HSlider.setObjectName(_fromUtf8("FreeSlider1_HSlider"))
self.gridLayout_2.addWidget(self.FreeSlider1_HSlider, 0, 2, 1, 1)
self.FreeSlider1_ValueLabel = QtGui.QLabel(self.FreeSlider_groupBox_3)
self.FreeSlider1_ValueLabel.setMinimumSize(QtCore.QSize(50, 0))
self.FreeSlider1_ValueLabel.setText(_fromUtf8(""))
self.FreeSlider1_ValueLabel.setObjectName(_fromUtf8("FreeSlider1_ValueLabel"))
self.gridLayout_2.addWidget(self.FreeSlider1_ValueLabel, 0, 1, 1, 1)
self.verticalLayout_2.addWidget(self.FreeSlider_groupBox_3)
self.FreeSwitches_groupBox = QtGui.QGroupBox(self.centralwidget)
self.FreeSwitches_groupBox.setObjectName(_fromUtf8("FreeSwitches_groupBox"))
self.horizontalLayout = QtGui.QHBoxLayout(self.FreeSwitches_groupBox)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.Free_Free_checkBox_1 = QtGui.QCheckBox(self.FreeSwitches_groupBox)
self.Free_Free_checkBox_1.setObjectName(_fromUtf8("Free_Free_checkBox_1"))
self.horizontalLayout.addWidget(self.Free_Free_checkBox_1)
self.Free_checkBox_2 = QtGui.QCheckBox(self.FreeSwitches_groupBox)
self.Free_checkBox_2.setObjectName(_fromUtf8("Free_checkBox_2"))
self.horizontalLayout.addWidget(self.Free_checkBox_2)
self.Free_checkBox_3 = QtGui.QCheckBox(self.FreeSwitches_groupBox)
self.Free_checkBox_3.setObjectName(_fromUtf8("Free_checkBox_3"))
self.horizontalLayout.addWidget(self.Free_checkBox_3)
self.Free_checkBox_4 = QtGui.QCheckBox(self.FreeSwitches_groupBox)
self.Free_checkBox_4.setObjectName(_fromUtf8("Free_checkBox_4"))
self.horizontalLayout.addWidget(self.Free_checkBox_4)
self.Free_checkBox_5 = QtGui.QCheckBox(self.FreeSwitches_groupBox)
self.Free_checkBox_5.setObjectName(_fromUtf8("Free_checkBox_5"))
self.horizontalLayout.addWidget(self.Free_checkBox_5)
self.verticalLayout_2.addWidget(self.FreeSwitches_groupBox)
self.horizontalLayout_2.addLayout(self.verticalLayout_2)
self.verticalLayout_3.addLayout(self.horizontalLayout_2)
self.mpl_widget = MplWidget(self.centralwidget)
self.mpl_widget.setMinimumSize(QtCore.QSize(0, 300))
self.mpl_widget.setObjectName(_fromUtf8("mpl_widget"))
self.verticalLayout_3.addWidget(self.mpl_widget)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1237, 27))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName(_fromUtf8("menuFile"))
MainWindow.setMenuBar(self.menubar)
self.actionExit = QtGui.QAction(MainWindow)
self.actionExit.setObjectName(_fromUtf8("actionExit"))
self.menuFile.addAction(self.actionExit)
self.menubar.addAction(self.menuFile.menuAction())
self.retranslateUi(MainWindow)
QtCore.QObject.connect(self.actionExit, QtCore.SIGNAL(_fromUtf8("activated()")), MainWindow.close)
QtCore.QObject.connect(self.cmdLineEdit, QtCore.SIGNAL(_fromUtf8("returnPressed()")), self.logPlainTextEdit.paste)
QtCore.QObject.connect(self.P_HSlider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.P_ValueLabel.setNum)
QtCore.QObject.connect(self.I_HSlider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.I_ValueLabel.setNum)
QtCore.QObject.connect(self.D_HSlider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.D_ValueLabel.setNum)
QtCore.QObject.connect(self.FreeSlider1_HSlider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.FreeSlider1_ValueLabel.setNum)
QtCore.QObject.connect(self.FreeSlider2_HSlider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.FreeSlider2_ValueLabel.setNum)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "BotControlGUI", None))
self.Serial_groupBox.setTitle(_translate("MainWindow", "Serielle Verbindung", None))
self.connectPushButton.setText(_translate("MainWindow", "Connect", None))
self.disconnectPushButton.setText(_translate("MainWindow", "Disconnect", None))
self.cmdLineEdit.setText(_translate("MainWindow", ">LedBlinkTime=100<", None))
self.PID_groupBox_2.setTitle(_translate("MainWindow", "PID Regler", None))
self.D_Label.setText(_translate("MainWindow", "D :", None))
self.I_Label.setText(_translate("MainWindow", "I :", None))
self.P_Label.setText(_translate("MainWindow", "P :", None))
self.FreeSlider_groupBox_3.setTitle(_translate("MainWindow", "Freie Regler", None))
self.FreeSlider2_Label.setText(_translate("MainWindow", "Max Geschwindigkeit:", None))
self.FreeSlider1_Label.setText(_translate("MainWindow", "Max Beschleunigung :", None))
self.FreeSwitches_groupBox.setTitle(_translate("MainWindow", "Freie Schalter", None))
self.Free_Free_checkBox_1.setText(_translate("MainWindow", "Schalter 1", None))
self.Free_checkBox_2.setText(_translate("MainWindow", "Schalter 2", None))
self.Free_checkBox_3.setText(_translate("MainWindow", "Schalter 3", None))
self.Free_checkBox_4.setText(_translate("MainWindow", "Schalter 4", None))
self.Free_checkBox_5.setText(_translate("MainWindow", "Schalter 5", None))
self.menuFile.setTitle(_translate("MainWindow", "File", None))
self.actionExit.setText(_translate("MainWindow", "Exit", None))
from mplwidget import MplWidget
import mainWindow_rc
|
gpl-2.0
| 4,252,222,543,709,044,000
| 61.888179
| 139
| 0.723024
| false
| 3.723094
| false
| false
| false
|
Krissbro/LondonGaymers
|
serverquotes/serverquotes.py
|
1
|
7819
|
import discord
from discord.ext import commands
from cogs.utils.dataIO import dataIO
from .utils import checks
from .utils.chat_formatting import escape_mass_mentions, pagify
import os
from random import choice as randchoice
try:
from tabulate import tabulate
except Exception as e:
raise RuntimeError("You must run `pip3 install tabulate`.") from e
PATH = 'data/serverquotes/'
JSON = PATH + 'quotes.json'
class ServerQuotes:
def __init__(self, bot):
self.bot = bot
self.quotes = dataIO.load_json(JSON)
def _get_random_quote(self, ctx):
sid = ctx.message.server.id
if sid not in self.quotes or len(self.quotes[sid]) == 0:
raise AssertionError("There are no quotes in this server!")
quotes = list(enumerate(self.quotes[sid]))
return randchoice(quotes)
def _get_random_author_quote(self, ctx, author):
sid = ctx.message.server.id
if sid not in self.quotes or len(self.quotes[sid]) == 0:
raise AssertionError("There are no quotes in this server!")
if isinstance(author, discord.User):
uid = author.id
quotes = [(i,q) for i,q in enumerate(self.quotes[sid]) if q['author_id'] == uid]
else:
quotes = [(i,q) for i,q in enumerate(self.quotes[sid]) if q['author_name'] == author]
if len(quotes) == 0:
raise commands.BadArgument("There are no quotes by %s." % author)
return randchoice(quotes)
def _add_quote(self, ctx, author, message):
sid = ctx.message.server.id
aid = ctx.message.author.id
if sid not in self.quotes:
self.quotes[sid] = []
author_name = 'Unknown'
author_id = None
if isinstance(author, discord.User):
author_name = author.display_name
author_id = author.id
elif isinstance(author, str):
author_name = author
quote = {'added_by': aid,
'author_name': author_name,
'author_id': author_id,
'text': escape_mass_mentions(message)}
self.quotes[sid].append(quote)
dataIO.save_json(JSON, self.quotes)
def _quote_author(self, ctx, quote):
if quote['author_id']:
name = self._get_name_by_id(ctx, quote['author_id'])
if quote['author_name'] and not name:
name = quote['author_name']
name += " (non-present user ID#%s)" % (quote['author_id'])
return name
elif quote['author_name']:
return quote['author_name']
else:
return "Unknown"
def _format_quote(self, ctx, quote):
qid, quote = quote
author = self._quote_author(ctx, quote)
return '"%s"\n—%s (quote #%i)' % (quote['text'], author, qid + 1)
def _get_name_by_id(self, ctx, uid):
member = discord.utils.get(ctx.message.server.members, id=uid)
if member:
return member.display_name
else:
return None
def _get_quote(self, ctx, author_or_num=None):
sid = ctx.message.server.id
if type(author_or_num) is discord.Member:
return self._get_random_author_quote(ctx, author_or_num)
if author_or_num:
try:
quote_id = int(author_or_num)
if quote_id > 0 and quote_id <= len(self.quotes[sid]):
return (quote_id - 1, self.quotes[sid][quote_id - 1])
else:
raise commands.BadArgument("Quote #%i does not exist." % quote_id)
except ValueError:
pass
try:
author = commands.MemberConverter(ctx, author_or_num).convert()
except commands.errors.BadArgument:
author = author_or_num.strip(' \t\n\r\x0b\x0c-–—') # whitespace + dashes
return self._get_random_author_quote(ctx, author)
return self._get_random_quote(ctx)
@commands.command(pass_context=True, no_pm=True)
@checks.serverowner_or_permissions(administrator=True)
async def rmquote(self, ctx, num: int):
"""Deletes a quote by its number
Use [p]lsquotes to find quote numbers
Example: !delquote 3"""
sid = ctx.message.server.id
if num > 0 and num <= len(self.quotes[sid]):
del self.quotes[sid][num-1]
await self.bot.say("Quote #%i deleted." % num)
dataIO.save_json(JSON, self.quotes)
else:
await self.bot.say("Quote #%i does not exist." % num)
@commands.command(pass_context=True, no_pm=True)
async def lsquotes(self, ctx):
"""Displays a list of all quotes"""
sid = ctx.message.server.id
if sid not in self.quotes:
raise commands.UserInputError("There are no quotes in this server!")
quotes = self.quotes[sid]
header = ['#', 'Author', 'Added by', 'Quote']
table = []
for i, q in enumerate(quotes):
text = q['text']
if len(text) > 60:
text = text[:60 - 3] + '...'
name = self._get_name_by_id(ctx, q['added_by'])
if not name:
name = "(non-present user ID#%s)" % q['added_by']
table.append((i + 1, self._quote_author(ctx, q), name, text))
tabulated = tabulate(table, header)
for page in pagify(tabulated, ['\n']):
await self.bot.whisper('```\n%s\n```' % page)
@commands.command(pass_context=True, no_pm=True)
@checks.serverowner_or_permissions(administrator=True)
async def addquote(self, ctx, message: str, *, author: str = None):
"""Adds a quote to the server quote list. The quote must be enclosed
in \"double quotes\". If a member mention or name is the last argument,
the quote will be stored as theirs. If not, the last argument will
be stored as the quote's author. If left empty, "Unknown" is used.
"""
if author:
try:
author = commands.MemberConverter(ctx, author).convert()
except commands.errors.BadArgument:
author = author.strip(' \t\n\r\x0b\x0c-–—') # whitespace + dashes
pass
self._add_quote(ctx, author, message)
await self.bot.say("Quote added.")
@commands.command(pass_context=True, no_pm=True)
@commands.cooldown(6, 60, commands.BucketType.channel)
async def quote(self, ctx, *, author_or_num: str = None):
"""Say a stored quote!
Without any arguments, this command randomly selects from all stored
quotes. If you supply an author name, it randomly selects from among
that author's quotes. Finally, if given a number, that specific quote
will be said, assuming it exists. Use [p]lsquotes to show all quotes where [p] is the prefix.
"""
sid = ctx.message.server.id
if sid not in self.quotes or len(self.quotes[sid]) == 0:
await self.bot.say("There are no quotes in this server!")
return
try:
quote = self._get_quote(ctx, author_or_num)
except commands.BadArgument:
if author_or_num.lower().strip() in ['me', 'myself', 'self']:
quote = self._get_quote(ctx, ctx.message.author)
else:
raise
await self.bot.say(self._format_quote(ctx, quote))
def check_folder():
if not os.path.exists(PATH):
print("Creating serverquotes folder...")
os.makedirs(PATH)
def check_file():
if not dataIO.is_valid_json(JSON):
print("Creating default quotes.json...")
dataIO.save_json(JSON, {})
def setup(bot):
check_folder()
check_file()
n = ServerQuotes(bot)
bot.add_cog(n)
|
gpl-3.0
| 7,994,293,379,458,089,000
| 35.490654
| 101
| 0.577667
| false
| 3.774287
| false
| false
| false
|
stormi/tsunami
|
src/secondaires/crafting/rang.py
|
1
|
5518
|
# -*-coding:Utf-8 -*
# Copyright (c) 2015 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la classe Rang, détaillée plus bas."""
from abstraits.obase import BaseObj
from secondaires.crafting.exception import ExceptionCrafting
from secondaires.crafting.recette import Recette
class Rang(BaseObj):
"""Classe représentant un rang de guilde."""
def __init__(self, guilde, cle):
"""Constructeur de la fiche."""
BaseObj.__init__(self)
self.guilde = guilde
self.cle = cle
self.nom = "rang inconnu"
self.points_guilde = 10
self.recettes = []
self._construire()
def __getnewargs__(self):
return (None, "")
@property
def total_points_guilde(self):
"""Retourne les points de guilde consommés pour arriver à ce rang.
Si le rang a des prédécesseurs, retourne la somme des
points de guilde nécessités pour atteindre ce rang. Par
exemple, si un membre est au rang 2, il faut additionner
les points de guilde du rang 1 et du rang 2.
"""
# Cherche à trouver les rangs prédécesseurs
guilde = self.guilde
try:
indice = guilde.rangs.index(self)
except ValueError:
raise RangIntrouvable("le rang {} ne peut être trouvé " \
"dans la guilde {}".format(self.cle, guilde.cle))
precedents = guilde.rangs[:indice]
return sum(p.points_guilde for p in precedents) + self.points_guilde
@property
def membres(self):
"""Retourne la liste des membres (personnages) à ce rang."""
progressions = self.guilde.membres.values()
membres = []
for progression in progressions:
if progression.rang is self:
membres.append(progression.membre)
return membres
@property
def nom_complet(self):
"""Retourne le nom complet du rang."""
membres = self.membres
ps = "s" if self.points_guilde > 1 else ""
ms = "s" if len(membres) > 1 else ""
msg = "{}, nom : {}, {} point{ps} de guilde ({} accumulés), " \
"{} membre{ms}".format(self.cle, self.nom, self.points_guilde,
self.total_points_guilde, len(membres), ps=ps, ms=ms)
return msg
@property
def rangs_parents(self):
"""Retourne les rangs parents, incluant self."""
guilde = self.guilde
try:
indice = guilde.rangs.index(self)
except ValueError:
raise RangIntrouvable("le rang {} ne peut être trouvé " \
"dans la guilde {}".format(self.cle, guilde.cle))
return guilde.rangs[:indice + 1]
def get_recette(self, cle, exception=True):
"""Récupère la recette correspondant à la clé.
La clé est celle du résultat.
"""
cle = cle.lower()
for recette in self.recettes:
if recette.resultat == cle:
return recette
if exception:
raise ValueError("Recette {} inconnue".format(repr(cle)))
def ajouter_recette(self, resultat):
"""Ajoute une recette.
Le résultat doit être la clé du prototype de résultat.
"""
if self.get_recette(resultat, False):
raise ValueError("La recette {} existe déjà".format(
repr(resultat)))
recette = Recette(self)
recette.resultat = resultat
self.recettes.append(recette)
return recette
def supprimer_recette(self, cle):
"""Retire la recette spécifiée."""
cle = cle.lower()
for recette in list(self.recettes):
if recette.resultat == cle:
self.recettes.remove(recette)
recette.detruire()
return
raise ValueError("Recette {} introuvable".format(repr(cle)))
class RangIntrouvable(ExceptionCrafting):
"""Exception levée si le rang de la guilde est introuvable."""
pass
|
bsd-3-clause
| -3,393,670,967,765,601,300
| 33.71519
| 79
| 0.643209
| false
| 3.630046
| false
| false
| false
|
hastexo/edx-platform
|
openedx/core/djangoapps/content/course_overviews/models.py
|
1
|
35401
|
"""
Declaration of CourseOverview model
"""
import json
import logging
from urlparse import urlparse, urlunparse
from django.conf import settings
from django.db import models, transaction
from django.db.models.fields import BooleanField, DateTimeField, DecimalField, TextField, FloatField, IntegerField
from django.db.utils import IntegrityError
from django.template import defaultfilters
from ccx_keys.locator import CCXLocator
from model_utils.models import TimeStampedModel
from config_models.models import ConfigurationModel
from lms.djangoapps import django_comment_client
from openedx.core.djangoapps.catalog.models import CatalogIntegration
from openedx.core.djangoapps.lang_pref.api import get_closest_released_language
from openedx.core.djangoapps.models.course_details import CourseDetails
from static_replace.models import AssetBaseUrlConfig
from xmodule import course_metadata_utils, block_metadata_utils
from xmodule.course_module import CourseDescriptor, DEFAULT_START_DATE
from xmodule.error_module import ErrorDescriptor
from xmodule.modulestore.django import modulestore
from openedx.core.djangoapps.xmodule_django.models import CourseKeyField, UsageKeyField
log = logging.getLogger(__name__)
class CourseOverview(TimeStampedModel):
"""
Model for storing and caching basic information about a course.
This model contains basic course metadata such as an ID, display name,
image URL, and any other information that would be necessary to display
a course as part of:
user dashboard (enrolled courses)
course catalog (courses to enroll in)
course about (meta data about the course)
"""
class Meta(object):
app_label = 'course_overviews'
# IMPORTANT: Bump this whenever you modify this model and/or add a migration.
VERSION = 6
# Cache entry versioning.
version = IntegerField()
# Course identification
id = CourseKeyField(db_index=True, primary_key=True, max_length=255)
_location = UsageKeyField(max_length=255)
org = TextField(max_length=255, default='outdated_entry')
display_name = TextField(null=True)
display_number_with_default = TextField()
display_org_with_default = TextField()
# Start/end dates
start = DateTimeField(null=True)
end = DateTimeField(null=True)
advertised_start = TextField(null=True)
announcement = DateTimeField(null=True)
# URLs
course_image_url = TextField()
social_sharing_url = TextField(null=True)
end_of_course_survey_url = TextField(null=True)
# Certification data
certificates_display_behavior = TextField(null=True)
certificates_show_before_end = BooleanField(default=False)
cert_html_view_enabled = BooleanField(default=False)
has_any_active_web_certificate = BooleanField(default=False)
cert_name_short = TextField()
cert_name_long = TextField()
certificate_available_date = DateTimeField(default=None, null=True)
# Grading
lowest_passing_grade = DecimalField(max_digits=5, decimal_places=2, null=True)
# Access parameters
days_early_for_beta = FloatField(null=True)
mobile_available = BooleanField(default=False)
visible_to_staff_only = BooleanField(default=False)
_pre_requisite_courses_json = TextField() # JSON representation of list of CourseKey strings
# Enrollment details
enrollment_start = DateTimeField(null=True)
enrollment_end = DateTimeField(null=True)
enrollment_domain = TextField(null=True)
invitation_only = BooleanField(default=False)
max_student_enrollments_allowed = IntegerField(null=True)
# Catalog information
catalog_visibility = TextField(null=True)
short_description = TextField(null=True)
course_video_url = TextField(null=True)
effort = TextField(null=True)
self_paced = BooleanField(default=False)
marketing_url = TextField(null=True)
eligible_for_financial_aid = BooleanField(default=True)
language = TextField(null=True)
@classmethod
def _create_or_update(cls, course):
"""
Creates or updates a CourseOverview object from a CourseDescriptor.
Does not touch the database, simply constructs and returns an overview
from the given course.
Arguments:
course (CourseDescriptor): any course descriptor object
Returns:
CourseOverview: created or updated overview extracted from the given course
"""
from lms.djangoapps.certificates.api import get_active_web_certificate
from openedx.core.lib.courses import course_image_url
# Workaround for a problem discovered in https://openedx.atlassian.net/browse/TNL-2806.
# If the course has a malformed grading policy such that
# course._grading_policy['GRADE_CUTOFFS'] = {}, then
# course.lowest_passing_grade will raise a ValueError.
# Work around this for now by defaulting to None.
try:
lowest_passing_grade = course.lowest_passing_grade
except ValueError:
lowest_passing_grade = None
display_name = course.display_name
start = course.start
end = course.end
max_student_enrollments_allowed = course.max_student_enrollments_allowed
if isinstance(course.id, CCXLocator):
from lms.djangoapps.ccx.utils import get_ccx_from_ccx_locator
ccx = get_ccx_from_ccx_locator(course.id)
display_name = ccx.display_name
start = ccx.start
end = ccx.due
max_student_enrollments_allowed = ccx.max_student_enrollments_allowed
course_overview = cls.objects.filter(id=course.id)
if course_overview.exists():
log.info('Updating course overview for %s.', unicode(course.id))
course_overview = course_overview.first()
else:
log.info('Creating course overview for %s.', unicode(course.id))
course_overview = cls()
course_overview.version = cls.VERSION
course_overview.id = course.id
course_overview._location = course.location
course_overview.org = course.location.org
course_overview.display_name = display_name
course_overview.display_number_with_default = course.display_number_with_default
course_overview.display_org_with_default = course.display_org_with_default
course_overview.start = start
course_overview.end = end
course_overview.advertised_start = course.advertised_start
course_overview.announcement = course.announcement
course_overview.course_image_url = course_image_url(course)
course_overview.social_sharing_url = course.social_sharing_url
course_overview.certificates_display_behavior = course.certificates_display_behavior
course_overview.certificates_show_before_end = course.certificates_show_before_end
course_overview.cert_html_view_enabled = course.cert_html_view_enabled
course_overview.has_any_active_web_certificate = (get_active_web_certificate(course) is not None)
course_overview.cert_name_short = course.cert_name_short
course_overview.cert_name_long = course.cert_name_long
course_overview.certificate_available_date = course.certificate_available_date
course_overview.lowest_passing_grade = lowest_passing_grade
course_overview.end_of_course_survey_url = course.end_of_course_survey_url
course_overview.days_early_for_beta = course.days_early_for_beta
course_overview.mobile_available = course.mobile_available
course_overview.visible_to_staff_only = course.visible_to_staff_only
course_overview._pre_requisite_courses_json = json.dumps(course.pre_requisite_courses)
course_overview.enrollment_start = course.enrollment_start
course_overview.enrollment_end = course.enrollment_end
course_overview.enrollment_domain = course.enrollment_domain
course_overview.invitation_only = course.invitation_only
course_overview.max_student_enrollments_allowed = max_student_enrollments_allowed
course_overview.catalog_visibility = course.catalog_visibility
course_overview.short_description = CourseDetails.fetch_about_attribute(course.id, 'short_description')
course_overview.effort = CourseDetails.fetch_about_attribute(course.id, 'effort')
course_overview.course_video_url = CourseDetails.fetch_video_url(course.id)
course_overview.self_paced = course.self_paced
if not CatalogIntegration.is_enabled():
course_overview.language = course.language
return course_overview
@classmethod
def load_from_module_store(cls, course_id):
"""
Load a CourseDescriptor, create or update a CourseOverview from it, cache the
overview, and return it.
Arguments:
course_id (CourseKey): the ID of the course overview to be loaded.
Returns:
CourseOverview: overview of the requested course.
Raises:
- CourseOverview.DoesNotExist if the course specified by course_id
was not found.
- IOError if some other error occurs while trying to load the
course from the module store.
"""
store = modulestore()
with store.bulk_operations(course_id):
course = store.get_course(course_id)
if isinstance(course, CourseDescriptor):
course_overview = cls._create_or_update(course)
try:
with transaction.atomic():
course_overview.save()
# Remove and recreate all the course tabs
CourseOverviewTab.objects.filter(course_overview=course_overview).delete()
CourseOverviewTab.objects.bulk_create([
CourseOverviewTab(tab_id=tab.tab_id, course_overview=course_overview)
for tab in course.tabs
])
# Remove and recreate course images
CourseOverviewImageSet.objects.filter(course_overview=course_overview).delete()
CourseOverviewImageSet.create(course_overview, course)
except IntegrityError:
# There is a rare race condition that will occur if
# CourseOverview.get_from_id is called while a
# another identical overview is already in the process
# of being created.
# One of the overviews will be saved normally, while the
# other one will cause an IntegrityError because it tries
# to save a duplicate.
# (see: https://openedx.atlassian.net/browse/TNL-2854).
pass
except Exception: # pylint: disable=broad-except
log.exception(
"CourseOverview for course %s failed!",
course_id,
)
raise
return course_overview
elif course is not None:
raise IOError(
"Error while loading course {} from the module store: {}",
unicode(course_id),
course.error_msg if isinstance(course, ErrorDescriptor) else unicode(course)
)
else:
raise cls.DoesNotExist()
@classmethod
def get_from_id(cls, course_id):
"""
Load a CourseOverview object for a given course ID.
First, we try to load the CourseOverview from the database. If it
doesn't exist, we load the entire course from the modulestore, create a
CourseOverview object from it, and then cache it in the database for
future use.
Arguments:
course_id (CourseKey): the ID of the course overview to be loaded.
Returns:
CourseOverview: overview of the requested course.
Raises:
- CourseOverview.DoesNotExist if the course specified by course_id
was not found.
- IOError if some other error occurs while trying to load the
course from the module store.
"""
try:
course_overview = cls.objects.select_related('image_set').get(id=course_id)
if course_overview.version < cls.VERSION:
# Throw away old versions of CourseOverview, as they might contain stale data.
course_overview.delete()
course_overview = None
except cls.DoesNotExist:
course_overview = None
# Regenerate the thumbnail images if they're missing (either because
# they were never generated, or because they were flushed out after
# a change to CourseOverviewImageConfig.
if course_overview and not hasattr(course_overview, 'image_set'):
CourseOverviewImageSet.create(course_overview)
return course_overview or cls.load_from_module_store(course_id)
@classmethod
def get_from_ids_if_exists(cls, course_ids):
"""
Return a dict mapping course_ids to CourseOverviews, if they exist.
This method will *not* generate new CourseOverviews or delete outdated
ones. It exists only as a small optimization used when CourseOverviews
are known to exist, for common situations like the student dashboard.
Callers should assume that this list is incomplete and fall back to
get_from_id if they need to guarantee CourseOverview generation.
"""
return {
overview.id: overview
for overview
in cls.objects.select_related('image_set').filter(
id__in=course_ids,
version__gte=cls.VERSION
)
}
def clean_id(self, padding_char='='):
"""
Returns a unique deterministic base32-encoded ID for the course.
Arguments:
padding_char (str): Character used for padding at end of base-32
-encoded string, defaulting to '='
"""
return course_metadata_utils.clean_course_key(self.location.course_key, padding_char)
@property
def location(self):
"""
Returns the UsageKey of this course.
UsageKeyField has a strange behavior where it fails to parse the "run"
of a course out of the serialized form of a Mongo Draft UsageKey. This
method is a wrapper around _location attribute that fixes the problem
by calling map_into_course, which restores the run attribute.
"""
if self._location.run is None:
self._location = self._location.map_into_course(self.id)
return self._location
@property
def number(self):
"""
Returns this course's number.
This is a "number" in the sense of the "course numbers" that you see at
lots of universities. For example, given a course
"Intro to Computer Science" with the course key "edX/CS-101/2014", the
course number would be "CS-101"
"""
return course_metadata_utils.number_for_course_location(self.location)
@property
def url_name(self):
"""
Returns this course's URL name.
"""
return block_metadata_utils.url_name_for_block(self)
@property
def display_name_with_default(self):
"""
Return reasonable display name for the course.
"""
return block_metadata_utils.display_name_with_default(self)
@property
def display_name_with_default_escaped(self):
"""
DEPRECATED: use display_name_with_default
Return html escaped reasonable display name for the course.
Note: This newly introduced method should not be used. It was only
introduced to enable a quick search/replace and the ability to slowly
migrate and test switching to display_name_with_default, which is no
longer escaped.
"""
return block_metadata_utils.display_name_with_default_escaped(self)
@property
def dashboard_start_display(self):
"""
Return start date to diplay on learner's dashboard, preferably `Course Advertised Start`
"""
return self.advertised_start or self.start
def has_started(self):
"""
Returns whether the the course has started.
"""
return course_metadata_utils.has_course_started(self.start)
def has_ended(self):
"""
Returns whether the course has ended.
"""
return course_metadata_utils.has_course_ended(self.end)
def has_marketing_url(self):
"""
Returns whether the course has marketing url.
"""
return settings.FEATURES.get('ENABLE_MKTG_SITE') and bool(self.marketing_url)
def has_social_sharing_url(self):
"""
Returns whether the course has social sharing url.
"""
is_social_sharing_enabled = getattr(settings, 'SOCIAL_SHARING_SETTINGS', {}).get('CUSTOM_COURSE_URLS')
return is_social_sharing_enabled and bool(self.social_sharing_url)
def starts_within(self, days):
"""
Returns True if the course starts with-in given number of days otherwise returns False.
"""
return course_metadata_utils.course_starts_within(self.start, days)
@property
def start_date_is_still_default(self):
"""
Checks if the start date set for the course is still default, i.e.
.start has not been modified, and .advertised_start has not been set.
"""
return course_metadata_utils.course_start_date_is_default(
self.start,
self.advertised_start,
)
@property
def sorting_score(self):
"""
Returns a tuple that can be used to sort the courses according
the how "new" they are. The "newness" score is computed using a
heuristic that takes into account the announcement and
(advertised) start dates of the course if available.
The lower the number the "newer" the course.
"""
return course_metadata_utils.sorting_score(self.start, self.advertised_start, self.announcement)
@property
def start_type(self):
"""
Returns the type of the course's 'start' field.
"""
if self.advertised_start:
return u'string'
elif self.start != DEFAULT_START_DATE:
return u'timestamp'
else:
return u'empty'
@property
def start_display(self):
"""
Returns the display value for the course's start date.
"""
if self.advertised_start:
return self.advertised_start
elif self.start != DEFAULT_START_DATE:
return defaultfilters.date(self.start, "DATE_FORMAT")
else:
return None
def may_certify(self):
"""
Returns whether it is acceptable to show the student a certificate
download link.
"""
return course_metadata_utils.may_certify_for_course(
self.certificates_display_behavior,
self.certificates_show_before_end,
self.has_ended(),
self.certificate_available_date,
self.self_paced
)
@property
def pre_requisite_courses(self):
"""
Returns a list of ID strings for this course's prerequisite courses.
"""
return json.loads(self._pre_requisite_courses_json)
@pre_requisite_courses.setter
def pre_requisite_courses(self, value):
"""
Django requires there be a setter for this, but it is not
necessary for the way we currently use it. Due to the way
CourseOverviews are constructed raising errors here will
cause a lot of issues. These should not be mutable after
construction, so for now we just eat this.
"""
pass
@classmethod
def update_select_courses(cls, course_keys, force_update=False):
"""
A side-effecting method that updates CourseOverview objects for
the given course_keys.
Arguments:
course_keys (list[CourseKey]): Identifies for which courses to
return CourseOverview objects.
force_update (boolean): Optional parameter that indicates
whether the requested CourseOverview objects should be
forcefully updated (i.e., re-synched with the modulestore).
"""
log.info('Generating course overview for %d courses.', len(course_keys))
log.debug('Generating course overview(s) for the following courses: %s', course_keys)
action = CourseOverview.load_from_module_store if force_update else CourseOverview.get_from_id
for course_key in course_keys:
try:
action(course_key)
except Exception as ex: # pylint: disable=broad-except
log.exception(
'An error occurred while generating course overview for %s: %s',
unicode(course_key),
ex.message,
)
log.info('Finished generating course overviews.')
@classmethod
def get_all_courses(cls, orgs=None, filter_=None):
"""
Returns all CourseOverview objects in the database.
Arguments:
orgs (list[string]): Optional parameter that allows case-insensitive
filtering by organization.
filter_ (dict): Optional parameter that allows custom filtering.
"""
# Note: If a newly created course is not returned in this QueryList,
# make sure the "publish" signal was emitted when the course was
# created. For tests using CourseFactory, use emit_signals=True.
course_overviews = CourseOverview.objects.all()
if orgs:
# In rare cases, courses belonging to the same org may be accidentally assigned
# an org code with a different casing (e.g., Harvardx as opposed to HarvardX).
# Case-insensitive matching allows us to deal with this kind of dirty data.
course_overviews = course_overviews.filter(org__iregex=r'(' + '|'.join(orgs) + ')')
if filter_:
course_overviews = course_overviews.filter(**filter_)
return course_overviews
@classmethod
def get_all_course_keys(cls):
"""
Returns all course keys from course overviews.
"""
return CourseOverview.objects.values_list('id', flat=True)
def is_discussion_tab_enabled(self):
"""
Returns True if course has discussion tab and is enabled
"""
tabs = self.tabs.all()
# creates circular import; hence explicitly referenced is_discussion_enabled
for tab in tabs:
if tab.tab_id == "discussion" and django_comment_client.utils.is_discussion_enabled(self.id):
return True
return False
@property
def image_urls(self):
"""
Return a dict with all known URLs for this course image.
Current resolutions are:
raw = original upload from the user
small = thumbnail with dimensions CourseOverviewImageConfig.current().small
large = thumbnail with dimensions CourseOverviewImageConfig.current().large
If no thumbnails exist, the raw (originally uploaded) image will be
returned for all resolutions.
"""
# This is either the raw image that the course team uploaded, or the
# settings.DEFAULT_COURSE_ABOUT_IMAGE_URL if they didn't specify one.
raw_image_url = self.course_image_url
# Default all sizes to return the raw image if there is no
# CourseOverviewImageSet associated with this CourseOverview. This can
# happen because we're disabled via CourseOverviewImageConfig.
urls = {
'raw': raw_image_url,
'small': raw_image_url,
'large': raw_image_url,
}
# If we do have a CourseOverviewImageSet, we still default to the raw
# images if our thumbnails are blank (might indicate that there was a
# processing error of some sort while trying to generate thumbnails).
if hasattr(self, 'image_set') and CourseOverviewImageConfig.current().enabled:
urls['small'] = self.image_set.small_url or raw_image_url
urls['large'] = self.image_set.large_url or raw_image_url
return self.apply_cdn_to_urls(urls)
@property
def pacing(self):
""" Returns the pacing for the course.
Potential values:
self: Self-paced courses
instructor: Instructor-led courses
"""
return 'self' if self.self_paced else 'instructor'
@property
def closest_released_language(self):
"""
Returns the language code that most closely matches this course' language and is fully
supported by the LMS, or None if there are no fully supported languages that
match the target.
"""
return get_closest_released_language(self.language) if self.language else None
def apply_cdn_to_urls(self, image_urls):
"""
Given a dict of resolutions -> urls, return a copy with CDN applied.
If CDN does not exist or is disabled, just returns the original. The
URLs that we store in CourseOverviewImageSet are all already top level
paths, so we don't need to go through the /static remapping magic that
happens with other course assets. We just need to add the CDN server if
appropriate.
"""
cdn_config = AssetBaseUrlConfig.current()
if not cdn_config.enabled:
return image_urls
base_url = cdn_config.base_url
return {
resolution: self._apply_cdn_to_url(url, base_url)
for resolution, url in image_urls.items()
}
def _apply_cdn_to_url(self, url, base_url):
"""
Applies a new CDN/base URL to the given URL.
If a URL is absolute, we skip switching the host since it could
be a hostname that isn't behind our CDN, and we could unintentionally
break the URL overall.
"""
# The URL can't be empty.
if not url:
return url
_, netloc, path, params, query, fragment = urlparse(url)
# If this is an absolute URL, just return it as is. It could be a domain
# that isn't ours, and thus CDNing it would actually break it.
if netloc:
return url
return urlunparse((None, base_url, path, params, query, fragment))
def __unicode__(self):
"""Represent ourselves with the course key."""
return unicode(self.id)
class CourseOverviewTab(models.Model):
"""
Model for storing and caching tabs information of a course.
"""
tab_id = models.CharField(max_length=50)
course_overview = models.ForeignKey(CourseOverview, db_index=True, related_name="tabs")
class CourseOverviewImageSet(TimeStampedModel):
"""
Model for Course overview images. Each column is an image type/size.
You should basically never use this class directly. Read from
CourseOverview.image_urls instead.
Special Notes on Deployment/Rollback/Changes:
1. By default, this functionality is disabled. To turn it on, you have to
create a CourseOverviewImageConfig entry via Django Admin and select
enabled=True.
2. If it is enabled in configuration, it will lazily create thumbnails as
individual CourseOverviews are requested. This is independent of the
CourseOverview's cls.VERSION scheme. This is to better support the use
case where someone might want to change the thumbnail resolutions for
their theme -- we didn't want to tie the code-based data schema of
CourseOverview to configuration changes.
3. A CourseOverviewImageSet is automatically deleted when the CourseOverview
it belongs to is deleted. So it will be regenerated whenever there's a
new publish or the CourseOverview schema version changes. It's not
particularly smart about this, and will just re-write the same thumbnails
over and over to the same location without checking to see if there were
changes.
4. Just because a CourseOverviewImageSet is successfully created does not
mean that any thumbnails exist. There might have been a processing error,
or there might simply be no source image to create a thumbnail out of.
In this case, accessing CourseOverview.image_urls will return the value
for course.course_image_url for all resolutions. CourseOverviewImageSet
will *not* try to regenerate if there is a model entry with blank values
for the URLs -- the assumption is that either there's no data there or
something has gone wrong and needs fixing in code.
5. If you want to change thumbnail resolutions, you need to create a new
CourseOverviewImageConfig with the desired dimensions and then wipe the
values in CourseOverviewImageSet.
Logical next steps that I punted on for this first cut:
1. Converting other parts of the app to use this.
Our first cut only affects About Pages and the Student Dashboard. But
most places that use course_image_url() should be converted -- e.g.
course discovery, mobile, etc.
2. Center cropping the image before scaling.
This is desirable, but it involves a few edge cases (what the rounding
policy is, what to do with undersized images, etc.) The behavior that
we implemented is at least no worse than what was already there in terms
of distorting images.
3. Automatically invalidating entries based on CourseOverviewImageConfig.
There are two basic paths I can think of for this. The first is to
completely wipe this table when the config changes. The second is to
actually tie the config as a foreign key from this model -- so you could
do the comparison to see if the image_set's config_id matched
CourseOverviewImageConfig.current() and invalidate it if they didn't
match. I punted on this mostly because it's just not something that
happens much at all in practice, there is an understood (if manual)
process to do it, and it can happen in a follow-on PR if anyone is
interested in extending this functionality.
"""
course_overview = models.OneToOneField(CourseOverview, db_index=True, related_name="image_set")
small_url = models.TextField(blank=True, default="")
large_url = models.TextField(blank=True, default="")
@classmethod
def create(cls, course_overview, course=None):
"""
Create thumbnail images for this CourseOverview.
This will save the CourseOverviewImageSet before it returns.
"""
from openedx.core.lib.courses import create_course_image_thumbnail
# If image thumbnails are not enabled, do nothing.
config = CourseOverviewImageConfig.current()
if not config.enabled:
return
# If a course object was provided, use that. Otherwise, pull it from
# CourseOverview's course_id. This happens because sometimes we are
# generated as part of the CourseOverview creation (course is available
# and passed in), and sometimes the CourseOverview already exists.
if not course:
course = modulestore().get_course(course_overview.id)
image_set = cls(course_overview=course_overview)
if course.course_image:
# Try to create a thumbnails of the course image. If this fails for any
# reason (weird format, non-standard URL, etc.), the URLs will default
# to being blank. No matter what happens, we don't want to bubble up
# a 500 -- an image_set is always optional.
try:
image_set.small_url = create_course_image_thumbnail(course, config.small)
image_set.large_url = create_course_image_thumbnail(course, config.large)
except Exception: # pylint: disable=broad-except
log.exception(
"Could not create thumbnail for course %s with image %s (small=%s), (large=%s)",
course.id,
course.course_image,
config.small,
config.large
)
# Regardless of whether we created thumbnails or not, we need to save
# this record before returning. If no thumbnails were created (there was
# an error or the course has no source course_image), our url fields
# just keep their blank defaults.
try:
with transaction.atomic():
image_set.save()
course_overview.image_set = image_set
except (IntegrityError, ValueError):
# In the event of a race condition that tries to save two image sets
# to the same CourseOverview, we'll just silently pass on the one
# that fails. They should be the same data anyway.
#
# The ValueError above is to catch the following error that can
# happen in Django 1.8.4+ if the CourseOverview object fails to save
# (again, due to race condition).
#
# Example: ValueError: save() prohibited to prevent data loss due
# to unsaved related object 'course_overview'.")
pass
def __unicode__(self):
return u"CourseOverviewImageSet({}, small_url={}, large_url={})".format(
self.course_overview_id, self.small_url, self.large_url
)
class CourseOverviewImageConfig(ConfigurationModel):
"""
This sets the size of the thumbnail images that Course Overviews will generate
to display on the about, info, and student dashboard pages. If you make any
changes to this, you will have to regenerate CourseOverviews in order for it
to take effect. You might want to do this if you're doing precise theming of
your install of edx-platform... but really, you probably don't want to do this
at all at the moment, given how new this is. :-P
"""
# Small thumbnail, for things like the student dashboard
small_width = models.IntegerField(default=375)
small_height = models.IntegerField(default=200)
# Large thumbnail, for things like the about page
large_width = models.IntegerField(default=750)
large_height = models.IntegerField(default=400)
@property
def small(self):
"""Tuple for small image dimensions in pixels -- (width, height)"""
return (self.small_width, self.small_height)
@property
def large(self):
"""Tuple for large image dimensions in pixels -- (width, height)"""
return (self.large_width, self.large_height)
def __unicode__(self):
return u"CourseOverviewImageConfig(enabled={}, small={}, large={})".format(
self.enabled, self.small, self.large
)
|
agpl-3.0
| 2,396,641,003,756,264,000
| 40.211874
| 114
| 0.651225
| false
| 4.524668
| true
| false
| false
|
Oire/TWBlue
|
src/twitter/twitter.py
|
1
|
3261
|
# -*- coding: utf-8 -*-
import BaseHTTPServer
import webbrowser
from urlparse import urlparse, parse_qs
from twython import Twython, TwythonError
import config
import application
import output
import sound
import time
logged = False
verifier = None
class handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
global logged
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
logged = True
params = parse_qs(urlparse(self.path).query)
global verifier
verifier = params.get('oauth_verifier', [None])[0]
self.wfile.write("You have successfully logged in to Twitter with TW Blue. "
"You can close this window now.")
self.wfile.close()
class twitter(object):
def login(self, user_key=None, user_secret=None):
if user_key != None and user_secret != None:
self.twitter = Twython(application.app_key, application.app_secret, user_key, user_secret)
elif config.main != None:
self.twitter = Twython(application.app_key, application.app_secret, config.main["twitter"]["user_key"], config.main["twitter"]["user_secret"])
else:
self.twitter = Twython(application.app_key, application.app_secret, self.final_step['oauth_token'], self.final_step['oauth_token_secret'])
self.credentials = self.twitter.verify_credentials()
def authorise(self):
httpd = BaseHTTPServer.HTTPServer(('127.0.0.1', 8080), handler)
twitter = Twython(application.app_key, application.app_secret, auth_endpoint='authorize')
auth = twitter.get_authentication_tokens("http://127.0.0.1:8080")
webbrowser.open_new_tab(auth['auth_url'])
global logged, verifier
while logged == False:
httpd.handle_request()
self.twitter = Twython(application.app_key, application.app_secret, auth['oauth_token'], auth['oauth_token_secret'])
final = self.twitter.get_authorized_tokens(verifier)
self.save_configuration(final["oauth_token"], final["oauth_token_secret"])
httpd.server_close()
def save_configuration(self, user_key=None, user_secret=None):
if user_key != None and user_secret != None:
config.main["twitter"]["user_key"] = user_key
config.main["twitter"]["user_secret"] = user_secret
else:
config.main['twitter']['user_key'] = self.final_step['oauth_token']
config.main['twitter']['user_secret'] = self.final_step['oauth_token_secret']
config.main.write()
def api_call(self, call_name, action="", _sound=None, report_success=False, report_failure=True, preexec_message="", *args, **kwargs):
finished = False
tries = 0
if preexec_message:
output.speak(preexec_message, True)
while finished==False and tries < 25:
try:
val = getattr(self.twitter, call_name)(*args, **kwargs)
finished = True
except TwythonError as e:
# if hasattr(e, 'reason') and e.reason.startswith("Failed to send request"):
output.speak(e.message)
if report_failure and hasattr(e, 'message'):
output.speak(_("%s failed. Reason: %s") % (action, e.message))
finished = True
except:
tries = tries + 1
time.sleep(5)
# raise e
if report_success:
output.speak(_("%s succeeded.") % action)
if _sound != None: sound.player.play(_sound)
# return val
|
gpl-2.0
| 5,429,948,604,041,343,000
| 36.930233
| 145
| 0.687213
| false
| 3.552288
| true
| false
| false
|
PokeAPI/pokeapi
|
pokemon_v2/migrations/0009_pokemontypepast.py
|
1
|
1924
|
# Generated by Django 2.1.11 on 2021-02-06 22:03
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("pokemon_v2", "0008_auto_20201123_2045"),
]
operations = [
migrations.CreateModel(
name="PokemonTypePast",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("slot", models.IntegerField()),
(
"generation",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="pokemontypepast",
to="pokemon_v2.Generation",
),
),
(
"pokemon",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="pokemontypepast",
to="pokemon_v2.Pokemon",
),
),
(
"type",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="pokemontypepast",
to="pokemon_v2.Type",
),
),
],
options={
"abstract": False,
},
),
]
|
bsd-3-clause
| 8,417,753,935,987,722,000
| 30.032258
| 68
| 0.366424
| false
| 5.692308
| false
| false
| false
|
oferb/OpenTrains
|
webserver/opentrain/algorithm/utils.py
|
1
|
1361
|
import os
import gtfs.models
import analysis.models
import numpy as np
from scipy import spatial
import shelve
try:
import matplotlib.pyplot as plt
except ImportError:
pass
import simplekml
import config
import itertools
import datetime
def enum(**enums):
return type('Enum', (), enums)
def get_XY_pos(relativeNullPoint, p):
""" Calculates X and Y distances in meters.
"""
deltaLatitude = p.latitude - relativeNullPoint.latitude
deltaLongitude = p.longitude - relativeNullPoint.longitude
latitudeCircumference = 40075160 * cos(relativeNullPoint.latitude * pi / 180)
resultX = deltaLongitude * latitudeCircumference / 360
resultY = deltaLatitude * 40008000 / 360
return resultX, resultY
def query_coords(point_tree, query_coords, query_accuracies):
if isinstance( query_accuracies, ( int, long, float ) ):
res = point_tree.query_ball_point(query_coords, query_accuracies)
else:
res = [point_tree.query_ball_point(query_coords[i], query_accuracies[i]) for i in xrange(len(query_accuracies))]
return res
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
def is_strictly_increasing(L):
return all(x<y for x, y in zip(L, L[1:]))
def is_increasing(L):
return all(x<=y for x, y in zip(L, L[1:]))
|
bsd-3-clause
| 4,164,793,536,097,898,500
| 29.266667
| 120
| 0.703894
| false
| 3.394015
| false
| false
| false
|
robotpy/pyfrc
|
pyfrc/physics/motor_cfgs.py
|
1
|
3366
|
from collections import namedtuple
MotorModelConfig = namedtuple(
"MotorModelConfig",
[
"name",
"nominalVoltage",
"freeSpeed",
"freeCurrent",
"stallTorque",
"stallCurrent",
],
)
MotorModelConfig.__doc__ = """
Configuration parameters useful for simulating a motor. Typically these
parameters can be obtained from the manufacturer via a data sheet or other
specification.
RobotPy contains MotorModelConfig objects for many motors that are commonly
used in FRC. If you find that we're missing a motor you care about, please
file a bug report and let us know!
.. note:: The motor configurations that come with pyfrc are defined using the
pint units library. See :ref:`units`
"""
MotorModelConfig.name.__doc__ = "Descriptive name of motor"
MotorModelConfig.nominalVoltage.__doc__ = "Nominal voltage for the motor"
MotorModelConfig.freeSpeed.__doc__ = "No-load motor speed (``1 / [time]``)"
MotorModelConfig.freeCurrent.__doc__ = "No-load motor current"
MotorModelConfig.stallTorque.__doc__ = (
"Stall torque (``[length]**2 * [mass] / [time]**2``)"
)
MotorModelConfig.stallCurrent.__doc__ = "Stall current"
from .units import units
NOMINAL_VOLTAGE = 12 * units.volts
#: Motor configuration for CIM
MOTOR_CFG_CIM = MotorModelConfig(
"CIM",
NOMINAL_VOLTAGE,
5310 * units.cpm,
2.7 * units.amps,
2.42 * units.N_m,
133 * units.amps,
)
#: Motor configuration for Mini CIM
MOTOR_CFG_MINI_CIM = MotorModelConfig(
"MiniCIM",
NOMINAL_VOLTAGE,
5840 * units.cpm,
3.0 * units.amps,
1.41 * units.N_m,
89.0 * units.amps,
)
#: Motor configuration for Bag Motor
MOTOR_CFG_BAG = MotorModelConfig(
"Bag",
NOMINAL_VOLTAGE,
13180 * units.cpm,
1.8 * units.amps,
0.43 * units.N_m,
53.0 * units.amps,
)
#: Motor configuration for 775 Pro
MOTOR_CFG_775PRO = MotorModelConfig(
"775Pro",
NOMINAL_VOLTAGE,
18730 * units.cpm,
0.7 * units.amps,
0.71 * units.N_m,
134 * units.amps,
)
#: Motor configuration for Andymark RS 775-125
MOTOR_CFG_775_125 = MotorModelConfig(
"RS775-125",
NOMINAL_VOLTAGE,
5800 * units.cpm,
1.6 * units.amps,
0.28 * units.N_m,
18.0 * units.amps,
)
#: Motor configuration for Banebots RS 775
MOTOR_CFG_BB_RS775 = MotorModelConfig(
"RS775",
NOMINAL_VOLTAGE,
13050 * units.cpm,
2.7 * units.amps,
0.72 * units.N_m,
97.0 * units.amps,
)
#: Motor configuration for Andymark 9015
MOTOR_CFG_AM_9015 = MotorModelConfig(
"AM-9015",
NOMINAL_VOLTAGE,
14270 * units.cpm,
3.7 * units.amps,
0.36 * units.N_m,
71.0 * units.amps,
)
#: Motor configuration for Banebots RS 550
MOTOR_CFG_BB_RS550 = MotorModelConfig(
"RS550",
NOMINAL_VOLTAGE,
19000 * units.cpm,
0.4 * units.amps,
0.38 * units.N_m,
84.0 * units.amps,
)
#: Motor configuration for NEO 550 Brushless Motor
MOTOR_CFG_NEO_550 = MotorModelConfig(
"NEO 550",
NOMINAL_VOLTAGE,
11000 * units.cpm,
1.4 * units.amps,
0.97 * units.N_m,
100 * units.amps,
)
#: Motor configuration for Falcon 500 Brushless Motor
MOTOR_CFG_FALCON_500 = MotorModelConfig(
"Falcon 500",
NOMINAL_VOLTAGE,
6380 * units.cpm,
1.5 * units.amps,
4.69 * units.N_m,
257 * units.amps,
)
del units
|
mit
| 3,968,190,741,376,040,400
| 23.042857
| 81
| 0.643494
| false
| 2.842905
| true
| false
| false
|
MERegistro/meregistro
|
meregistro/apps/titulos/models/NormativaNacional.py
|
1
|
1738
|
# -*- coding: utf-8 -*-
from django.db import models
from apps.titulos.models.EstadoNormativaNacional import EstadoNormativaNacional
import datetime
class NormativaNacional(models.Model):
numero = models.CharField(max_length=50, unique=True)
descripcion = models.CharField(max_length=255)
observaciones = models.CharField(max_length=255, null=True, blank=True)
estado = models.ForeignKey(EstadoNormativaNacional) # Concuerda con el último estado en NormativaNacionalEstado
fecha_alta = models.DateField(auto_now_add=True)
class Meta:
app_label = 'titulos'
db_table = 'titulos_normativa_nacional'
def __unicode__(self):
return str(self.numero)
"Sobreescribo el init para agregarle propiedades"
def __init__(self, *args, **kwargs):
super(NormativaNacional, self).__init__(*args, **kwargs)
self.estados = self.get_estados()
def registrar_estado(self):
import datetime
from apps.titulos.models.NormativaNacionalEstado import NormativaNacionalEstado
registro = NormativaNacionalEstado(estado=self.estado)
registro.fecha = datetime.date.today()
registro.normativa_nacional_id = self.id
registro.save()
def get_estados(self):
from apps.titulos.models.NormativaNacionalEstado import NormativaNacionalEstado
try:
estados = NormativaNacionalEstado.objects.filter(normativa_nacional=self).order_by('fecha', 'id')
except:
estados = {}
return estados
"Algún título nacional está asociado a la normativa?"
def asociada_titulo_nacional(self):
from apps.titulos.models.TituloNacional import TituloNacional
return TituloNacional.objects.filter(normativa_nacional__id=self.id).exists()
"Eliminable?"
def is_deletable(self):
ret = self.asociada_titulo_nacional() == False
return ret
|
bsd-3-clause
| 6,765,823,261,245,573,000
| 33
| 112
| 0.767589
| false
| 2.627273
| false
| false
| false
|
mahajrod/MACE
|
scripts/old/FS_distribution.py
|
1
|
1645
|
#!/usr/bin/env python
__author__ = 'Sergei F. Kliver'
import os, sys
import argparse
import numpy as np
from MACE.General.File import split_filename, make_list_of_path_to_files
from MACE.Parsers.VCF import CollectionVCF
def vcf_filter(filename):
return True if filename[-4:] == ".vcf" else False
def is_homozygous(record):
return record.is_homozygous()
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--output", action="store", dest="output", default="FS_distributions",
help="Directory to write output files")
parser.add_argument("-i", "--input_vcf", action="store", dest="input_vcf", type=lambda s: s.split(","),
help="Comma-separated list of vcf files or directories containing them", required=True)
parser.add_argument("-e", "--extension_list", action="store", dest="extension_list", type=lambda s: s.split(","),
default=[".png"],
help="Comma-separated list of extensions of figures. Default: .png")
args = parser.parse_args()
files_list = sorted(make_list_of_path_to_files(args.input_vcf, vcf_filter))
try:
os.mkdir(args.output)
except OSError:
pass
bins = np.arange(0, 66, 5)
for filename in files_list:
if args.output != "stdout":
print("Drawing distribution of FS in %s ..." % filename)
directory, prefix, extension = split_filename(filename)
variants = CollectionVCF(from_file=True, in_file=filename)
variants.draw_info_distribution("FS", is_homozygous, outfile_prefix="%s/%s" % (args.output, prefix),
extension_list=args.extension_list, bins=bins)
|
apache-2.0
| 6,022,405,341,987,015,000
| 32.571429
| 113
| 0.657143
| false
| 3.485169
| false
| false
| false
|
PeachyPrinter/peachyinstaller
|
windows/src/application.py
|
1
|
3502
|
import json
class Application(object):
def __init__(self,
id,
name,
available_version=None,
download_location=None,
relitive_install_path=None,
executable_path=None,
installed_path=None,
icon=None,
current_version=None,
shortcut_path=None):
self.id = id
self.name = name
self.available_version = available_version
self.download_location = download_location
self.relitive_install_path = relitive_install_path
self.executable_path = executable_path
self.installed_path = installed_path
self.icon = icon
self.current_version = current_version
self.shortcut_path = shortcut_path
@classmethod
def from_configs(cls, web_config, installed_config=None):
if installed_config and installed_config['id'] != web_config['id']:
raise Exception("Unexpected error processing config")
id = web_config['id']
name = web_config['name']['en-us']
available_version = web_config['available_version']
download_location = web_config['location']
relitive_install_path = web_config['install_path']
icon = web_config['icon']
executable_path = web_config['executable']
if installed_config:
installed_path = installed_config['installed_path']
current_version = installed_config['current_version']
shortcut_path = installed_config['shortcut_path']
else:
installed_path = None
current_version = None
shortcut_path = None
return cls(id, name, available_version, download_location, relitive_install_path, executable_path, installed_path, icon, current_version, shortcut_path)
def get_json(self):
this = {
"id": self.id,
"name": {
"en-us": self.name,
},
"available_version": self.available_version,
"download_location": self.download_location,
"relitive_install_path": self.relitive_install_path,
"executable_path": self.executable_path,
"installed_path": self.installed_path,
"icon": self.icon,
"current_version": self.current_version,
"shortcut_path": self.shortcut_path,
}
for (key, value) in this.items():
if value is None:
del this[key]
return json.dumps(this)
@property
def actions(self):
if self.current_version is not None:
if self.current_version == self.available_version:
return ['remove']
else:
return ['remove', 'upgrade']
else:
return ['install']
def __eq__(self, other):
return (
self.id == other.id and
self.name == other.name and
self.available_version == other.available_version and
self.download_location == other.download_location and
self.relitive_install_path == other.relitive_install_path and
self.executable_path == other.executable_path and
self.installed_path == other.installed_path and
self.icon == other.icon and
self.current_version == other.current_version and
self.shortcut_path == other.shortcut_path
)
|
apache-2.0
| -4,559,871,324,107,743,000
| 37.911111
| 160
| 0.566248
| false
| 4.512887
| true
| false
| false
|
andrucuna/python
|
interactivepython-coursera/interactivepython/week0/Variables.py
|
1
|
1333
|
__author__ = 'andrucuna'
# variables - placeholders for important values
# used to avoid recomputing values and to
# give values names that help reader understand code
# valid variable names - consists of letters, numbers, underscore (_)
# starts with letter or underscore
# case sensitive (capitalization matters)
# legal names - ninja, Ninja, n_i_n_j_a
# illegal names - 1337, 1337ninja
# Python convention - multiple words joined by _
# legal names - elite_ninja, leet_ninja, ninja_1337
# illegal name 1337_ninja
# assign to variable name using single equal sign =
# (remember that double equals == is used to test equality)
# examples
my_name = "Andres Ruiz"
print my_name
my_age = 27
print my_age
# birthday - add one
my_age += 1
print my_age
# the story of the magic pill
magic_pill = 30
print my_age - magic_pill
my_grand_dad = 74
print my_grand_dad - 2 * magic_pill
# Temperature examples
# convert from Fahrenheit to Celsuis
# c = 5 / 9 * (f - 32)
# use explanatory names
temp_Fahrenheit = 212
temp_Celsius = 5.0 / 9.0 * (temp_Fahrenheit - 32)
print temp_Celsius
# test it! 32 Fahrenheit is 0 Celsius, 212 Fahrenheit is 100 Celsius
# convert from Celsius to Fahrenheit
# f = 9 / 5 * c + 32
temp_Celsius = 100
temp_Fahrenheit = 9.0 / 5.0 * temp_Celsius + 32
print temp_Fahrenheit
# test it!
|
gpl-2.0
| -2,776,057,309,682,962,400
| 16.773333
| 69
| 0.705926
| false
| 2.83617
| false
| false
| false
|
vivek8943/soft-boiled
|
src/algorithms/slp.py
|
1
|
13262
|
import numpy as np
import itertools
from collections import namedtuple, defaultdict
import math
from math import floor, ceil, radians, sin, cos, asin, sqrt, pi
import pandas as pd
from src.utils.geo import bb_center, GeoCoord, haversine
LocEstimate = namedtuple('LocEstimate', ['geo_coord', 'dispersion', 'dispersion_std_dev'])
def median(distance_func, vertices, weights=None):
"""
given a python list of vertices, and a distance function, this will find the vertex that is most central
relative to all other vertices. All of the vertices must have geocoords
Args:
distance_func (function) : A function to calculate the distance between two GeoCoord objects
vertices (list) : List of GeoCoord objects
Returns:
LocEstimate: The median point
"""
#get the distance between any two points
distances = map(lambda (v0, v1) :distance_func(v0.geo_coord, v1.geo_coord), itertools.combinations (vertices, 2))
#create a dictionary with keys representing the index of a location
m = { a: list() for a in range(len(vertices)) }
#add the distances from each point to the dict
for (k0,k1),distance in zip(itertools.combinations(range(len(vertices)), 2), distances):
#a distance can be None if one of the vertices does not have a geocoord
if(weights is None):
m[k0].append(distance)
m[k1].append(distance)
else:
# Weight distances by weight of destination vertex
# Ex: distance=3, weight =4 extends m[k0] with [3, 3, 3, 3]
m[k0].extend([distance]*weights[k1])
m[k1].extend([distance]*weights[k0])
summed_values = map(sum, m.itervalues())
idx = summed_values.index(min(summed_values))
if weights is not None and weights[idx] > 1:
# Handle self-weight (i.e. if my vertex has weight of 6 there are 5 additional self connections if
# Starting from my location)
m[idx].extend([0.0]*(weights[idx]-1))
return LocEstimate(geo_coord=vertices[idx].geo_coord, dispersion=np.median(m[idx]), dispersion_std_dev=np.std(m[idx]))
def get_known_locs(sqlCtx, table_name, include_places=True, min_locs=3, num_partitions=30, dispersion_threshold=50):
'''
Given a loaded twitter table, this will return all the twitter users with locations. A user's location is determined
by the median location of all known tweets. A user must have at least min_locs locations in order for a location to be
estimated
Args:
sqlCtx (Spark SQL Context) : A Spark SQL context
table_name (string): Table name that was registered when loading the data
min_locs (int) : Minimum number tweets that have a location in order to infer a location for the user
num_partitions (int) : Optimizer for specifying the number of partitions for the resulting
RDD to use.
dispersion_threhold (int) : A distance threhold on the dispersion of the estimated location for a user.
We consider those estimated points with dispersion greater than the treshold unable to be
predicted given how dispersed the tweet distances are from one another.
Returns:
locations (rdd of LocEstimate) : Found locations of users. This rdd is often used as the ground truth of locations
'''
geo_coords = sqlCtx.sql('select user.id_str, geo.coordinates from %s where geo.coordinates is not null' % table_name)\
.map(lambda row: (row.id_str, row.coordinates))
if(include_places):
place_coords = sqlCtx.sql("select user.id_str, place.bounding_box.coordinates from %s "%table_name +
"where geo.coordinates is null and size(place.bounding_box.coordinates) > 0 and place.place_type " +
"in ('city', 'neighborhood', 'poi')").map(lambda row: (row.id_str, bb_center(row.coordinates)))
geo_coords = geo_coords.union(place_coords)
return geo_coords.groupByKey()\
.filter(lambda (id_str,coord_list): len(coord_list) >= min_locs)\
.map(lambda (id_str,coords): (id_str, median(haversine, [LocEstimate(GeoCoord(lat,lon), None, None)\
for lat,lon in coords])))\
.filter(lambda (id_str, loc): loc.dispersion < dispersion_threshold)\
.coalesce(num_partitions).cache()
def get_edge_list(sqlCtx, table_name, num_partitions=300):
'''
Given a loaded twitter table, this will return the @mention network in the form (src_id, (dest_id, num_@mentions))
Args:
sqlCtx (Spark SQL Context) : A Spark SQL context
table_name (string) : Table name that was registered when loading the data
num_paritions (int) : Optimizer for specifying the number of paritions for the resulting RDD to use
Returns:
edges (rdd (src_id, (dest_id, weight))) : edges loaded from the table
'''
tmp_edges = sqlCtx.sql('select user.id_str, entities.user_mentions from %s where size(entities.user_mentions) > 0' % table_name)\
.flatMap(lambda row : [((row.id_str, mentioned_user.id_str),1)\
for mentioned_user in row.user_mentions\
if mentioned_user.id_str is not None and row.id_str != mentioned_user.id_str])\
.reduceByKey(lambda x,y:x+y)
return tmp_edges.map(lambda ((src_id,dest_id),num_mentions): ((dest_id,src_id),num_mentions))\
.join(tmp_edges)\
.map(lambda ((src_id,dest_id), (count0, count1)): (src_id, (dest_id, min(count0,count1))))\
.coalesce(num_partitions).cache()
def train_slp(locs_known, edge_list, num_iters, neighbor_threshold=3, dispersion_threshold=100):
'''
Core SLP algorithm
Args:
locs_known (rdd of LocEstimate objects) : Locations that are known for the SLP network
edge_list (rdd of edges (src_id, (dest_id, weight))) : edges representing the at mention
network
num_iters (int) : number of iterations to run the algorithm
neighbor_threshold (int) : The minimum number of neighbors required in order for SLP to
try and predict a location of a node in the network
dispersion_theshold (int) : The maximum median distance amoung a local at mention network
in order to predict a node's location.
Returns:
locations (rdd of LocEstimate objects) : The locations found and known
'''
num_partitions = edge_list.getNumPartitions()
# Filter edge list so we never attempt to estimate a "known" location
filtered_edge_list = edge_list.keyBy(lambda (src_id, (dst_id, weight)): dst_id)\
.leftOuterJoin(locs_known)\
.flatMap(lambda (dst_id, (edge, loc_known)): [edge] if loc_known is None else [] )
l = locs_known
for i in range(num_iters):
l = filtered_edge_list.join(l)\
.map(lambda (src_id, ((dst_id, weight), known_vertex)) : (dst_id, (known_vertex, weight)))\
.groupByKey()\
.filter(lambda (src_id, neighbors) : neighbors.maxindex >= neighbor_threshold)\
.map(lambda (src_id, neighbors) :\
(src_id, median(haversine, [v for v,w in neighbors],[w for v,w in neighbors])))\
.filter(lambda (src_id, estimated_loc): estimated_loc.dispersion < dispersion_threshold)\
.union(locs_known).coalesce(num_partitions)
return l
def evaluate(locs_known, edges, holdout_func, slp_closure):
'''
This function is used to assess various stats regarding how well SLP is running.
Given all locs that are known and all edges that are known, this funciton will first
apply the holdout to the locs_known, allowing for a ground truth comparison to be used.
Then, it applies the non-holdout set to the training function, which should yield the
locations of the holdout for comparison.
For example::
holdout = lambda (src_id) : src_id[-1] == '6'
trainer = lambda l, e : slp.train_slp(l, e, 3)
results = evaluate(locs_known, edges, holdout, trainer)
Args:
locs_known (rdd of LocEstimate objects) : The complete list of locations
edges (rdd of (src_id, (dest_id, weight)): all available edge information
holdout_func (function) : function responsible for filtering a holdout data set. For example::
lambda (src_id) : src_id[-1] == '6'
can be used to get approximately 10% of the data since the src_id's are evenly distributed numeric values
slp_closure (function closure): a closure over the slp train function. For example::
lambda locs, edges :\n
slp.train_slp(locs, edges, 4, neighbor_threshold=4, dispersion_threshold=150)
can be used for training with specific threshold parameters
Returns:
results (dict) : stats of the results from the SLP algorithm
`median:` median difference of predicted versus actual
`mean:` mean difference of predicted versus actual
`coverage:` ratio of number of predicted locations to number of original unknown locations
`reserved_locs:` number of known locations used to train
`total_locs:` number of known locations input into this function
`found_locs:` number of predicted locations
`holdout_ratio:` ratio of the holdout set to the entire set
'''
reserved_locs = locs_known.filter(lambda (src_id, loc): not holdout_func(src_id))
num_locs = reserved_locs.count()
total_locs = locs_known.count()
print('Total Locations %s' % total_locs)
results = slp_closure(reserved_locs, edges)
errors = results\
.filter(lambda (src_id, loc): holdout_func(src_id))\
.join(locs_known)\
.map(lambda (src_id, (vtx_found, vtx_actual)) :\
(src_id, (haversine(vtx_found.geo_coord, vtx_actual.geo_coord), vtx_found)))
errors_local = errors.map(lambda (src_id, (dist, est_loc)) : dist).collect()
#because cannot easily calculate median in RDDs we will bring deltas local for stats calculations.
#With larger datasets, we may need to do this in the cluster, but for now will leave.
return (errors, {
'median': np.median(errors_local),
'mean': np.mean(errors_local),
'coverage':len(errors_local)/float(total_locs - num_locs),
'reserved_locs': num_locs,
'total_locs':total_locs,
'found_locs': len(errors_local),
'holdout_ratio' : 1 - num_locs/float(total_locs)
})
def predict_country_slp(tweets, bounding_boxes, sc):
'''
Take a set of estimates of user locations and estimate the country that user is in
Args:
tweets (RDD (id_str, LocEstimate))
bounding_boxes (list [(country_code, (min_lat, max_lat, min_lon, max_lon)),...])
Returns:
Country Codes (list) : Predicted countries reperesented as their numeric codes
'''
# Convert Bounding boxes to allow for more efficient lookups
bb_lookup_lat = defaultdict(set)
bb_lookup_lon = defaultdict(set)
for i, (cc, (min_lat, max_lat, min_lon, max_lon)) in enumerate(bounding_boxes):
for lon in range(int(math.floor(min_lon)), int(math.ceil(max_lon))):
bb_lookup_lon[lon].add(i)
for lat in range(int(math.floor(min_lat)), int(math.ceil(max_lat))):
bb_lookup_lat[lat].add(i)
# Do country lookups and return an RDD that is (id_str, [country_codes])
return tweets.mapValues(lambda loc_estimate: _predict_country_using_lookup_slp(loc_estimate,\
sc.broadcast(bb_lookup_lat),\
sc.broadcast(bb_lookup_lon),\
sc.broadcast(bounding_boxes)))
def _predict_country_using_lookup_slp(loc_estimate, lat_dict, lon_dict, bounding_boxes):
'''
Internal helper function that uses broadcast lookup tables to take a single location estimate and show
what country bounding boxes include that point
Args:
loc_estimate (LocEstimate) : Estimate location
lat_dict (broadcast dictionary {integer_lat:set([bounding_box_indexes containing this lat])}) :
Indexed lookup dictionary for finding countries that exist at the specified latitude
lon_dict ((broadcast dictionary) {integer_lon:set([bounding_box_indexes containing this lon])})) :
Index lookup dictionary for finding countries that exist at the speficied longitude
bounding_boxes (broadcast list [(country_code, (min_lat, max_lat, min_lon, max_lon)),...]) :
List of countries and their boudning boxes
'''
lat = loc_estimate.geo_coord.lat
lon = loc_estimate.geo_coord.lon
countries = set()
potential_lats = lat_dict.value[math.floor(lat)]
potential_lons = lon_dict.value[math.floor(lon)]
intersection = potential_lats.intersection(potential_lons)
if len(intersection) == 0:
return []
#raise ValueError('uh oh')
else:
for index in intersection:
cc, (min_lat, max_lat, min_lon, max_lon) = bounding_boxes.value[index]
if min_lon < lon and lon < max_lon and min_lat < lat and lat < max_lat:
countries.add(cc)
return list(countries)
|
apache-2.0
| 4,146,304,402,109,052,000
| 43.804054
| 133
| 0.653748
| false
| 3.80436
| false
| false
| false
|
thammegowda/incubator-joshua
|
scripts/support/query_http.py
|
3
|
2476
|
#!/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Allows a file to be queried against a Joshua HTTP server. The file should be tokenized
and normalized, with one sentence per line. This script takes that file, packages it up
into blocks of size 100 (changeable with -b), and sends it to the server. The JSON output
is dumped to STDOUT. If you wish to only dump the "curl" commands instead of calling them,
add "--dry-run".
Usage:
query_http.py --dry-run -s localhost -p 5674 /path/to/corpus
"""
import sys
import urllib
import argparse
import subprocess
parser = argparse.ArgumentParser(description='Send a (tokenized) test set to a Joshua HTTP server')
parser.add_argument('-s', '--server', dest='server', default='localhost', help='server host')
parser.add_argument('-p', '--port', dest='port', type=int, default=5674, help='server port')
parser.add_argument('-b', '--blocksize', dest='size', type=int, default=100, help='number of sentences at a time')
parser.add_argument('--dry-run', default=None, action='store_true', help='print curl commands only (don\'t run')
parser.add_argument('test_file', help='the (tokenized) test file')
args = parser.parse_args()
sentences = []
def process(sentence = None):
global sentences
if sentence is None or len(sentences) == args.size:
urlstr = '{}:{}/translate?{}'.format(args.server, args.port, urllib.urlencode(sentences))
cmd = 'curl -s "{}"'.format(urlstr)
if args.dry_run:
print cmd
else:
subprocess.call(cmd, shell=True)
sentences = []
if sentence is not None:
sentences.append(('q', sentence.rstrip()))
for line in open(args.test_file):
process(line.rstrip())
process()
|
apache-2.0
| -7,411,478,688,813,369,000
| 37.6875
| 114
| 0.712439
| false
| 3.862715
| false
| false
| false
|
wanderer2/pymc3
|
pymc3/examples/arma_example.py
|
1
|
2135
|
from pymc3 import Normal, sample, Model, plots, Potential, variational, HalfCauchy
from theano import scan, shared
import numpy as np
"""
ARMA example
It is interesting to note just how much more compact this is that the original STAN example
The original implementation is in the STAN documentation by Gelman et al and is reproduced below
Example from STAN- slightly altered
data {
int<lower=1> T;
real y[T];
}
parameters {
// assume err[0] == 0
}
nu[t] <- mu + phi * y[t-1] + theta * err[t-1];
err[t] <- y[t] - nu[t];
}
mu ~ normal(0,10);
phi ~ normal(0,2);
theta ~ normal(0,2);
real mu;
real phi;
real theta;
real<lower=0> sigma;
} model {
vector[T] nu;
vector[T] err;
nu[1] <- mu + phi * mu;
err[1] <- y[1] - nu[1];
for (t in 2:T) {
// num observations
// observed outputs
// mean coeff
// autoregression coeff
// moving avg coeff
// noise scale
// prediction for time t
// error for time t
sigma ~ cauchy(0,5);
err ~ normal(0,sigma);
// priors
// likelihood
Ported to PyMC3 by Peadar Coyle and Chris Fonnesbeck (c) 2016.
"""
def build_model():
y = shared(np.array([15, 10, 16, 11, 9, 11, 10, 18], dtype=np.float32))
with Model() as arma_model:
sigma = HalfCauchy('sigma', 5)
theta = Normal('theta', 0, sd=2)
phi = Normal('phi', 0, sd=2)
mu = Normal('mu', 0, sd=10)
err0 = y[0] - (mu + phi * mu)
def calc_next(last_y, this_y, err, mu, phi, theta):
nu_t = mu + phi * last_y + theta * err
return this_y - nu_t
err, _ = scan(fn=calc_next,
sequences=dict(input=y, taps=[-1, 0]),
outputs_info=[err0],
non_sequences=[mu, phi, theta])
Potential('like', Normal.dist(0, sd=sigma).logp(err))
mu, sds, elbo = variational.advi(n=2000)
return arma_model
def run(n_samples=1000):
model = build_model()
with model:
trace = sample(draws=n_samples)
burn = n_samples // 10
plots.traceplot(trace[burn:])
plots.forestplot(trace[burn:])
if __name__ == '__main__':
run()
|
apache-2.0
| 4,759,934,765,427,243,000
| 23.261364
| 96
| 0.578923
| false
| 3.05
| false
| false
| false
|
mheap/ansible
|
lib/ansible/plugins/connection/winrm.py
|
1
|
30176
|
# (c) 2014, Chris Church <chris@ninemoreminutes.com>
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
author: Ansible Core Team
connection: winrm
short_description: Run tasks over Microsoft's WinRM
description:
- Run commands or put/fetch on a target via WinRM
- This plugin allows extra arguments to be passed that are supported by the protocol but not explicitly defined here.
They should take the form of variables declared with the following pattern `ansible_winrm_<option>`.
version_added: "2.0"
requirements:
- pywinrm (python library)
options:
# figure out more elegant 'delegation'
remote_addr:
description:
- Address of the windows machine
default: inventory_hostname
vars:
- name: ansible_host
- name: ansible_winrm_host
remote_user:
keywords:
- name: user
- name: remote_user
description:
- The user to log in as to the Windows machine
vars:
- name: ansible_user
- name: ansible_winrm_user
port:
description:
- port for winrm to connect on remote target
- The default is the https (5896) port, if using http it should be 5895
vars:
- name: ansible_port
- name: ansible_winrm_port
default: 5986
keywords:
- name: port
type: integer
scheme:
description:
- URI scheme to use
- If not set, then will default to C(https) or C(http) if I(port) is
C(5985).
choices: [http, https]
vars:
- name: ansible_winrm_scheme
path:
description: URI path to connect to
default: '/wsman'
vars:
- name: ansible_winrm_path
transport:
description:
- List of winrm transports to attempt to to use (ssl, plaintext, kerberos, etc)
- If None (the default) the plugin will try to automatically guess the correct list
- The choices avialable depend on your version of pywinrm
type: list
vars:
- name: ansible_winrm_transport
kerberos_command:
description: kerberos command to use to request a authentication ticket
default: kinit
vars:
- name: ansible_winrm_kinit_cmd
kerberos_mode:
description:
- kerberos usage mode.
- The managed option means Ansible will obtain kerberos ticket.
- While the manual one means a ticket must already have been obtained by the user.
- If having issues with Ansible freezing when trying to obtain the
Kerberos ticket, you can either set this to C(manual) and obtain
it outside Ansible or install C(pexpect) through pip and try
again.
choices: [managed, manual]
vars:
- name: ansible_winrm_kinit_mode
connection_timeout:
description:
- Sets the operation and read timeout settings for the WinRM
connection.
- Corresponds to the C(operation_timeout_sec) and
C(read_timeout_sec) args in pywinrm so avoid setting these vars
with this one.
- The default value is whatever is set in the installed version of
pywinrm.
vars:
- name: ansible_winrm_connection_timeout
"""
import base64
import inspect
import os
import re
import traceback
import json
import tempfile
import subprocess
HAVE_KERBEROS = False
try:
import kerberos
HAVE_KERBEROS = True
except ImportError:
pass
from ansible.errors import AnsibleError, AnsibleConnectionFailure
from ansible.errors import AnsibleFileNotFound
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six.moves.urllib.parse import urlunsplit
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.six import binary_type
from ansible.plugins.connection import ConnectionBase
from ansible.plugins.shell.powershell import leaf_exec
from ansible.utils.hashing import secure_hash
from ansible.utils.path import makedirs_safe
try:
import winrm
from winrm import Response
from winrm.protocol import Protocol
HAS_WINRM = True
except ImportError as e:
HAS_WINRM = False
WINRM_IMPORT_ERR = e
try:
import xmltodict
HAS_XMLTODICT = True
except ImportError as e:
HAS_XMLTODICT = False
XMLTODICT_IMPORT_ERR = e
try:
import pexpect
HAS_PEXPECT = True
except ImportError as e:
HAS_PEXPECT = False
# used to try and parse the hostname and detect if IPv6 is being used
try:
import ipaddress
HAS_IPADDRESS = True
except ImportError:
HAS_IPADDRESS = False
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Connection(ConnectionBase):
'''WinRM connections over HTTP/HTTPS.'''
transport = 'winrm'
module_implementation_preferences = ('.ps1', '.exe', '')
become_methods = ['runas']
allow_executable = False
has_pipelining = True
allow_extras = True
def __init__(self, *args, **kwargs):
self.always_pipeline_modules = True
self.has_native_async = True
self.protocol = None
self.shell_id = None
self.delegate = None
self._shell_type = 'powershell'
super(Connection, self).__init__(*args, **kwargs)
def set_options(self, task_keys=None, var_options=None, direct=None):
if not HAS_WINRM:
return
super(Connection, self).set_options(task_keys=None, var_options=var_options, direct=direct)
self._winrm_host = self.get_option('remote_addr')
self._winrm_user = self.get_option('remote_user')
self._winrm_pass = self._play_context.password
self._become_method = self._play_context.become_method
self._become_user = self._play_context.become_user
self._become_pass = self._play_context.become_pass
self._winrm_port = self.get_option('port')
self._winrm_scheme = self.get_option('scheme')
# old behaviour, scheme should default to http if not set and the port
# is 5985 otherwise https
if self._winrm_scheme is None:
self._winrm_scheme = 'http' if self._winrm_port == 5985 else 'https'
self._winrm_path = self.get_option('path')
self._kinit_cmd = self.get_option('kerberos_command')
self._winrm_transport = self.get_option('transport')
self._winrm_connection_timeout = self.get_option('connection_timeout')
if hasattr(winrm, 'FEATURE_SUPPORTED_AUTHTYPES'):
self._winrm_supported_authtypes = set(winrm.FEATURE_SUPPORTED_AUTHTYPES)
else:
# for legacy versions of pywinrm, use the values we know are supported
self._winrm_supported_authtypes = set(['plaintext', 'ssl', 'kerberos'])
# calculate transport if needed
if self._winrm_transport is None or self._winrm_transport[0] is None:
# TODO: figure out what we want to do with auto-transport selection in the face of NTLM/Kerb/CredSSP/Cert/Basic
transport_selector = ['ssl'] if self._winrm_scheme == 'https' else ['plaintext']
if HAVE_KERBEROS and ((self._winrm_user and '@' in self._winrm_user)):
self._winrm_transport = ['kerberos'] + transport_selector
else:
self._winrm_transport = transport_selector
unsupported_transports = set(self._winrm_transport).difference(self._winrm_supported_authtypes)
if unsupported_transports:
raise AnsibleError('The installed version of WinRM does not support transport(s) %s' %
to_native(list(unsupported_transports), nonstring='simplerepr'))
# if kerberos is among our transports and there's a password specified, we're managing the tickets
kinit_mode = self.get_option('kerberos_mode')
if kinit_mode is None:
# HACK: ideally, remove multi-transport stuff
self._kerb_managed = "kerberos" in self._winrm_transport and (self._winrm_pass is not None and self._winrm_pass != "")
elif kinit_mode == "managed":
self._kerb_managed = True
elif kinit_mode == "manual":
self._kerb_managed = False
# arg names we're going passing directly
internal_kwarg_mask = set(['self', 'endpoint', 'transport', 'username', 'password', 'scheme', 'path', 'kinit_mode', 'kinit_cmd'])
self._winrm_kwargs = dict(username=self._winrm_user, password=self._winrm_pass)
argspec = inspect.getargspec(Protocol.__init__)
supported_winrm_args = set(argspec.args)
supported_winrm_args.update(internal_kwarg_mask)
passed_winrm_args = set([v.replace('ansible_winrm_', '') for v in self.get_option('_extras')])
unsupported_args = passed_winrm_args.difference(supported_winrm_args)
# warn for kwargs unsupported by the installed version of pywinrm
for arg in unsupported_args:
display.warning("ansible_winrm_{0} unsupported by pywinrm (is an up-to-date version of pywinrm installed?)".format(arg))
# pass through matching extras, excluding the list we want to treat specially
for arg in passed_winrm_args.difference(internal_kwarg_mask).intersection(supported_winrm_args):
self._winrm_kwargs[arg] = self.get_option('_extras')['ansible_winrm_%s' % arg]
# Until pykerberos has enough goodies to implement a rudimentary kinit/klist, simplest way is to let each connection
# auth itself with a private CCACHE.
def _kerb_auth(self, principal, password):
if password is None:
password = ""
self._kerb_ccache = tempfile.NamedTemporaryFile()
display.vvvvv("creating Kerberos CC at %s" % self._kerb_ccache.name)
krb5ccname = "FILE:%s" % self._kerb_ccache.name
os.environ["KRB5CCNAME"] = krb5ccname
krb5env = dict(KRB5CCNAME=krb5ccname)
# stores various flags to call with kinit, we currently only use this
# to set -f so we can get a forward-able ticket (cred delegation)
kinit_flags = []
if boolean(self.get_option('_extras').get('ansible_winrm_kerberos_delegation', False)):
kinit_flags.append('-f')
kinit_cmdline = [self._kinit_cmd]
kinit_cmdline.extend(kinit_flags)
kinit_cmdline.append(principal)
# pexpect runs the process in its own pty so it can correctly send
# the password as input even on MacOS which blocks subprocess from
# doing so. Unfortunately it is not available on the built in Python
# so we can only use it if someone has installed it
if HAS_PEXPECT:
proc_mechanism = "pexpect"
command = kinit_cmdline.pop(0)
password = to_text(password, encoding='utf-8',
errors='surrogate_or_strict')
display.vvvv("calling kinit with pexpect for principal %s"
% principal)
try:
child = pexpect.spawn(command, kinit_cmdline, timeout=60,
env=krb5env)
except pexpect.ExceptionPexpect as err:
err_msg = "Kerberos auth failure when calling kinit cmd " \
"'%s': %s" % (command, to_native(err))
raise AnsibleConnectionFailure(err_msg)
try:
child.expect(".*:")
child.sendline(password)
except OSError as err:
# child exited before the pass was sent, Ansible will raise
# error based on the rc below, just display the error here
display.vvvv("kinit with pexpect raised OSError: %s"
% to_native(err))
# technically this is the stdout + stderr but to match the
# subprocess error checking behaviour, we will call it stderr
stderr = child.read()
child.wait()
rc = child.exitstatus
else:
proc_mechanism = "subprocess"
password = to_bytes(password, encoding='utf-8',
errors='surrogate_or_strict')
display.vvvv("calling kinit with subprocess for principal %s"
% principal)
try:
p = subprocess.Popen(kinit_cmdline, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=krb5env)
except OSError as err:
err_msg = "Kerberos auth failure when calling kinit cmd " \
"'%s': %s" % (self._kinit_cmd, to_native(err))
raise AnsibleConnectionFailure(err_msg)
stdout, stderr = p.communicate(password + b'\n')
rc = p.returncode != 0
if rc != 0:
err_msg = "Kerberos auth failure for principal %s with %s: %s" \
% (principal, proc_mechanism, to_native(stderr.strip()))
raise AnsibleConnectionFailure(err_msg)
display.vvvvv("kinit succeeded for principal %s" % principal)
def _winrm_connect(self):
'''
Establish a WinRM connection over HTTP/HTTPS.
'''
display.vvv("ESTABLISH WINRM CONNECTION FOR USER: %s on PORT %s TO %s" %
(self._winrm_user, self._winrm_port, self._winrm_host), host=self._winrm_host)
winrm_host = self._winrm_host
if HAS_IPADDRESS:
display.vvvv("checking if winrm_host %s is an IPv6 address" % winrm_host)
try:
ipaddress.IPv6Address(winrm_host)
except ipaddress.AddressValueError:
pass
else:
winrm_host = "[%s]" % winrm_host
netloc = '%s:%d' % (winrm_host, self._winrm_port)
endpoint = urlunsplit((self._winrm_scheme, netloc, self._winrm_path, '', ''))
errors = []
for transport in self._winrm_transport:
if transport == 'kerberos':
if not HAVE_KERBEROS:
errors.append('kerberos: the python kerberos library is not installed')
continue
if self._kerb_managed:
self._kerb_auth(self._winrm_user, self._winrm_pass)
display.vvvvv('WINRM CONNECT: transport=%s endpoint=%s' % (transport, endpoint), host=self._winrm_host)
try:
winrm_kwargs = self._winrm_kwargs.copy()
if self._winrm_connection_timeout:
winrm_kwargs['operation_timeout_sec'] = self._winrm_connection_timeout
winrm_kwargs['read_timeout_sec'] = self._winrm_connection_timeout + 1
protocol = Protocol(endpoint, transport=transport, **winrm_kwargs)
# open the shell from connect so we know we're able to talk to the server
if not self.shell_id:
self.shell_id = protocol.open_shell(codepage=65001) # UTF-8
display.vvvvv('WINRM OPEN SHELL: %s' % self.shell_id, host=self._winrm_host)
return protocol
except Exception as e:
err_msg = to_text(e).strip()
if re.search(to_text(r'Operation\s+?timed\s+?out'), err_msg, re.I):
raise AnsibleError('the connection attempt timed out')
m = re.search(to_text(r'Code\s+?(\d{3})'), err_msg)
if m:
code = int(m.groups()[0])
if code == 401:
err_msg = 'the specified credentials were rejected by the server'
elif code == 411:
return protocol
errors.append(u'%s: %s' % (transport, err_msg))
display.vvvvv(u'WINRM CONNECTION ERROR: %s\n%s' % (err_msg, to_text(traceback.format_exc())), host=self._winrm_host)
if errors:
raise AnsibleConnectionFailure(', '.join(map(to_native, errors)))
else:
raise AnsibleError('No transport found for WinRM connection')
def _winrm_send_input(self, protocol, shell_id, command_id, stdin, eof=False):
rq = {'env:Envelope': protocol._get_soap_header(
resource_uri='http://schemas.microsoft.com/wbem/wsman/1/windows/shell/cmd',
action='http://schemas.microsoft.com/wbem/wsman/1/windows/shell/Send',
shell_id=shell_id)}
stream = rq['env:Envelope'].setdefault('env:Body', {}).setdefault('rsp:Send', {})\
.setdefault('rsp:Stream', {})
stream['@Name'] = 'stdin'
stream['@CommandId'] = command_id
stream['#text'] = base64.b64encode(to_bytes(stdin))
if eof:
stream['@End'] = 'true'
protocol.send_message(xmltodict.unparse(rq))
def _winrm_exec(self, command, args=(), from_exec=False, stdin_iterator=None):
if not self.protocol:
self.protocol = self._winrm_connect()
self._connected = True
if from_exec:
display.vvvvv("WINRM EXEC %r %r" % (command, args), host=self._winrm_host)
else:
display.vvvvvv("WINRM EXEC %r %r" % (command, args), host=self._winrm_host)
command_id = None
try:
stdin_push_failed = False
command_id = self.protocol.run_command(self.shell_id, to_bytes(command), map(to_bytes, args), console_mode_stdin=(stdin_iterator is None))
try:
if stdin_iterator:
for (data, is_last) in stdin_iterator:
self._winrm_send_input(self.protocol, self.shell_id, command_id, data, eof=is_last)
except Exception as ex:
display.warning("FATAL ERROR DURING FILE TRANSFER: %s" % to_text(ex))
stdin_push_failed = True
# NB: this can hang if the receiver is still running (eg, network failed a Send request but the server's still happy).
# FUTURE: Consider adding pywinrm status check/abort operations to see if the target is still running after a failure.
resptuple = self.protocol.get_command_output(self.shell_id, command_id)
# ensure stdout/stderr are text for py3
# FUTURE: this should probably be done internally by pywinrm
response = Response(tuple(to_text(v) if isinstance(v, binary_type) else v for v in resptuple))
# TODO: check result from response and set stdin_push_failed if we have nonzero
if from_exec:
display.vvvvv('WINRM RESULT %r' % to_text(response), host=self._winrm_host)
else:
display.vvvvvv('WINRM RESULT %r' % to_text(response), host=self._winrm_host)
display.vvvvvv('WINRM STDOUT %s' % to_text(response.std_out), host=self._winrm_host)
display.vvvvvv('WINRM STDERR %s' % to_text(response.std_err), host=self._winrm_host)
if stdin_push_failed:
stderr = to_bytes(response.std_err, encoding='utf-8')
if self.is_clixml(stderr):
stderr = self.parse_clixml_stream(stderr)
raise AnsibleError('winrm send_input failed; \nstdout: %s\nstderr %s' % (to_native(response.std_out), to_native(stderr)))
return response
finally:
if command_id:
self.protocol.cleanup_command(self.shell_id, command_id)
def _connect(self):
if not HAS_WINRM:
raise AnsibleError("winrm or requests is not installed: %s" % to_native(WINRM_IMPORT_ERR))
elif not HAS_XMLTODICT:
raise AnsibleError("xmltodict is not installed: %s" % to_native(XMLTODICT_IMPORT_ERR))
super(Connection, self)._connect()
if not self.protocol:
self.protocol = self._winrm_connect()
self._connected = True
return self
def _reset(self): # used by win_reboot (and any other action that might need to bounce the state)
self.protocol = None
self.shell_id = None
self._connect()
def _create_raw_wrapper_payload(self, cmd, environment=None):
environment = {} if environment is None else environment
payload = {
'module_entry': to_text(base64.b64encode(to_bytes(cmd))),
'powershell_modules': {},
'actions': ['exec'],
'exec': to_text(base64.b64encode(to_bytes(leaf_exec))),
'environment': environment,
'min_ps_version': None,
'min_os_version': None
}
return json.dumps(payload)
def _wrapper_payload_stream(self, payload, buffer_size=200000):
payload_bytes = to_bytes(payload)
byte_count = len(payload_bytes)
for i in range(0, byte_count, buffer_size):
yield payload_bytes[i:i + buffer_size], i + buffer_size >= byte_count
def exec_command(self, cmd, in_data=None, sudoable=True):
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
cmd_parts = self._shell._encode_script(cmd, as_list=True, strict_mode=False, preserve_rc=False)
# TODO: display something meaningful here
display.vvv("EXEC (via pipeline wrapper)")
stdin_iterator = None
if in_data:
stdin_iterator = self._wrapper_payload_stream(in_data)
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:], from_exec=True, stdin_iterator=stdin_iterator)
result.std_out = to_bytes(result.std_out)
result.std_err = to_bytes(result.std_err)
# parse just stderr from CLIXML output
if self.is_clixml(result.std_err):
try:
result.std_err = self.parse_clixml_stream(result.std_err)
except Exception:
# unsure if we're guaranteed a valid xml doc- use raw output in case of error
pass
return (result.status_code, result.std_out, result.std_err)
def is_clixml(self, value):
return value.startswith(b"#< CLIXML")
# hacky way to get just stdout- not always sure of doc framing here, so use with care
def parse_clixml_stream(self, clixml_doc, stream_name='Error'):
clear_xml = clixml_doc.replace(b'#< CLIXML\r\n', b'')
doc = xmltodict.parse(clear_xml)
lines = [l.get('#text', '').replace('_x000D__x000A_', '') for l in doc.get('Objs', {}).get('S', {}) if l.get('@S') == stream_name]
return '\r\n'.join(lines)
# FUTURE: determine buffer size at runtime via remote winrm config?
def _put_file_stdin_iterator(self, in_path, out_path, buffer_size=250000):
in_size = os.path.getsize(to_bytes(in_path, errors='surrogate_or_strict'))
offset = 0
with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
for out_data in iter((lambda: in_file.read(buffer_size)), b''):
offset += len(out_data)
self._display.vvvvv('WINRM PUT "%s" to "%s" (offset=%d size=%d)' % (in_path, out_path, offset, len(out_data)), host=self._winrm_host)
# yes, we're double-encoding over the wire in this case- we want to ensure that the data shipped to the end PS pipeline is still b64-encoded
b64_data = base64.b64encode(out_data) + b'\r\n'
# cough up the data, as well as an indicator if this is the last chunk so winrm_send knows to set the End signal
yield b64_data, (in_file.tell() == in_size)
if offset == 0: # empty file, return an empty buffer + eof to close it
yield "", True
def put_file(self, in_path, out_path):
super(Connection, self).put_file(in_path, out_path)
out_path = self._shell._unquote(out_path)
display.vvv('PUT "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host)
if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound('file or module does not exist: "%s"' % to_native(in_path))
script_template = u'''
begin {{
$path = '{0}'
$DebugPreference = "Continue"
$ErrorActionPreference = "Stop"
Set-StrictMode -Version 2
$fd = [System.IO.File]::Create($path)
$sha1 = [System.Security.Cryptography.SHA1CryptoServiceProvider]::Create()
$bytes = @() #initialize for empty file case
}}
process {{
$bytes = [System.Convert]::FromBase64String($input)
$sha1.TransformBlock($bytes, 0, $bytes.Length, $bytes, 0) | Out-Null
$fd.Write($bytes, 0, $bytes.Length)
}}
end {{
$sha1.TransformFinalBlock($bytes, 0, 0) | Out-Null
$hash = [System.BitConverter]::ToString($sha1.Hash).Replace("-", "").ToLowerInvariant()
$fd.Close()
Write-Output "{{""sha1"":""$hash""}}"
}}
'''
script = script_template.format(self._shell._escape(out_path))
cmd_parts = self._shell._encode_script(script, as_list=True, strict_mode=False, preserve_rc=False)
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:], stdin_iterator=self._put_file_stdin_iterator(in_path, out_path))
# TODO: improve error handling
if result.status_code != 0:
raise AnsibleError(to_native(result.std_err))
put_output = json.loads(result.std_out)
remote_sha1 = put_output.get("sha1")
if not remote_sha1:
raise AnsibleError("Remote sha1 was not returned")
local_sha1 = secure_hash(in_path)
if not remote_sha1 == local_sha1:
raise AnsibleError("Remote sha1 hash {0} does not match local hash {1}".format(to_native(remote_sha1), to_native(local_sha1)))
def fetch_file(self, in_path, out_path):
super(Connection, self).fetch_file(in_path, out_path)
in_path = self._shell._unquote(in_path)
out_path = out_path.replace('\\', '/')
display.vvv('FETCH "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host)
buffer_size = 2**19 # 0.5MB chunks
makedirs_safe(os.path.dirname(out_path))
out_file = None
try:
offset = 0
while True:
try:
script = '''
$path = "%(path)s"
If (Test-Path -Path $path -PathType Leaf)
{
$buffer_size = %(buffer_size)d
$offset = %(offset)d
$stream = New-Object -TypeName IO.FileStream($path, [IO.FileMode]::Open, [IO.FileAccess]::Read, [IO.FileShare]::ReadWrite)
$stream.Seek($offset, [System.IO.SeekOrigin]::Begin) > $null
$buffer = New-Object -TypeName byte[] $buffer_size
$bytes_read = $stream.Read($buffer, 0, $buffer_size)
if ($bytes_read -gt 0) {
$bytes = $buffer[0..($bytes_read - 1)]
[System.Convert]::ToBase64String($bytes)
}
$stream.Close() > $null
}
ElseIf (Test-Path -Path $path -PathType Container)
{
Write-Host "[DIR]";
}
Else
{
Write-Error "$path does not exist";
Exit 1;
}
''' % dict(buffer_size=buffer_size, path=self._shell._escape(in_path), offset=offset)
display.vvvvv('WINRM FETCH "%s" to "%s" (offset=%d)' % (in_path, out_path, offset), host=self._winrm_host)
cmd_parts = self._shell._encode_script(script, as_list=True, preserve_rc=False)
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:])
if result.status_code != 0:
raise IOError(to_native(result.std_err))
if result.std_out.strip() == '[DIR]':
data = None
else:
data = base64.b64decode(result.std_out.strip())
if data is None:
makedirs_safe(out_path)
break
else:
if not out_file:
# If out_path is a directory and we're expecting a file, bail out now.
if os.path.isdir(to_bytes(out_path, errors='surrogate_or_strict')):
break
out_file = open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb')
out_file.write(data)
if len(data) < buffer_size:
break
offset += len(data)
except Exception:
traceback.print_exc()
raise AnsibleError('failed to transfer file to "%s"' % to_native(out_path))
finally:
if out_file:
out_file.close()
def close(self):
if self.protocol and self.shell_id:
display.vvvvv('WINRM CLOSE SHELL: %s' % self.shell_id, host=self._winrm_host)
self.protocol.close_shell(self.shell_id)
self.shell_id = None
self.protocol = None
self._connected = False
|
gpl-3.0
| 3,836,380,334,355,508,000
| 42.733333
| 156
| 0.57728
| false
| 4.070687
| false
| false
| false
|
Aloomaio/googleads-python-lib
|
examples/ad_manager/v201808/user_service/get_all_users.py
|
1
|
1759
|
#!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all users.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
def main(client):
# Initialize appropriate service.
user_service = client.GetService('UserService', version='v201808')
# Create a statement to select users.
statement = ad_manager.StatementBuilder(version='v201808')
# Retrieve a small amount of users at a time, paging
# through until all users have been retrieved.
while True:
response = user_service.getUsersByStatement(statement.ToStatement())
if 'results' in response and len(response['results']):
for user in response['results']:
# Print out some information for each user.
print('User with ID "%d" and name "%s" was found.\n' % (user['id'],
user['name']))
statement.offset += statement.limit
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
|
apache-2.0
| 8,587,936,234,622,361,000
| 34.897959
| 78
| 0.693576
| false
| 4.178147
| false
| false
| false
|
dailab/roomba-python
|
demo.py
|
1
|
3196
|
import create
import time
import io
import os
import sys
import argparse
# define silence
r = 30
# map note names in the lilypad notation to irobot commands
c4 = 60
cis4 = des4 = 61
d4 = 62
dis4 = ees4 = 63
e4 = 64
f4 = 65
fis4 = ges4 = 66
g4 = 67
gis4 = aes4 = 68
a4 = 69
ais4 = bes4 = 70
b4 = 71
c5 = 72
cis5 = des5 = 73
d5 = 74
dis5 = ees5 = 75
e5 = 76
f5 = 77
fis5 = ges5 = 78
g5 = 79
gis5 = aes5 = 80
a5 = 81
ais5 = bes5 = 82
b5 = 83
c6 = 84
cis6 = des6 = 85
d6 = 86
dis6 = ees6 = 87
e6 = 88
f6 = 89
fis6 = ges6 = 90
# define some note lengths
# change the top MEASURE (4/4 time) to get faster/slower speeds
MEASURE = 160
HALF = MEASURE/2
Q = MEASURE/4
E = MEASURE/8
Ed = MEASURE*3/16
S = MEASURE/16
MEASURE_TIME = MEASURE/64.
ROOMBA_PORT = "/dev/rfcomm0"
FIFO_PATH = "/tmp/roombaCommands"
#parser
parser = argparse.ArgumentParser(description="Roomba Voice Command Control Software")
parser.add_argument("-k", dest="keyword", help="Keyword for addressing the roomba", default="")
parser.add_argument("-p", dest="path", help="path where creating the FIFO", default=FIFO_PATH)
parser.add_argument("-r", dest="roomba", help="serial port to the roomba", default=ROOMBA_PORT)
#parsing args
args = parser.parse_args()
keyword = args.keyword.lower()
print("keyword is " + keyword)
FIFO_PATH = args.path
print("created fifo in "+ FIFO_PATH)
ROOMBA_PORT = args.roomba
print("roomba port set to "+ ROOMBA_PORT)
#telekom jingle
telekom = [(c4,S), (c4,S), (c4,S), (e4,S), (c4,Q)]
#fifo init
try:
os.mkfifo(FIFO_PATH, 0766)
except:
os.unlink(FIFO_PATH)
os.mkfifo(FIFO_PATH, 0766)
#robot init
robot = create.Create(ROOMBA_PORT, create.SAFE_MODE)
robot.setSong(1, telekom)
def clean_up():
print("clean up and exit")
os.unlink(FIFO_PATH)
robot.close()
sys.exit(0)
def main():
exit_loop = False
fifo = open(FIFO_PATH, "r")
while exit_loop == False:
line = fifo.readline()
if line != "":
#line = keyword_ignore.sub("", line).strip(" ").strip("\n")
line = line.lower().replace(keyword, "").strip(" ").strip("\n")
print(line)
if line == "clean":
robot.toSafeMode()
time.sleep(.5)
print("starting to clean")
robot._write(create.CLEAN)
if line == "spot":
robot.toSafeMode()
time.sleep(.5)
print("starting to spot clean")
robot._write(create.SPOT)
if line == "stop":
print("stopping")
robot.toSafeMode()
time.sleep(.5)
if line == "dock":
robot.toSafeMode()
time.sleep(.5)
print("seeking dock")
robot._write(create.FORCESEEKINGDOCK)
if line == "jingle":
robot.toSafeMode()
time.sleep(.5)
robot.playSongNumber(1)
if line == "close":
exit_loop = True
try:
main()
except:
print("\nexception -> ")
clean_up()
|
artistic-2.0
| 8,155,639,683,856,493,000
| 23.030075
| 95
| 0.556008
| false
| 3.108949
| false
| false
| false
|
sedruk/Red-DiscordBot
|
cogs/russianroulette.py
|
1
|
14234
|
# Roulette.py was created by Redjumpman for Redbot
# This will create a rrgame.JSON file and a data folder
import os
import random
import asyncio
from time import gmtime, strftime
from discord.ext import commands
from .utils.dataIO import dataIO
from .utils import checks
from __main__ import send_cmd_help
kill_message = ["I was really pulling for {0} too. Oh well!",
"I guess {0} really wasn't a pea-brain!",
"Ahhh now that {0} is gone we can quit playing! No? Ok fine!",
("All things considered, I think we can all agree that {0} was a "
"straight shooter."),
"Noooooooo. Not {0}!", "I call dibs on {0}\'s stuff. Too soon?",
"Well I guess {0} and I won't be doing that thing anymore...",
"Here lies {0}. A loser.", "RIP {0}.", "I kinda hated {0} anyway.",
"Hey {0}! I'm back with your snacks! Oh...",
"{0}, you\'re like modern art now!", "Called it!",
"Really guys? {0}\'s dead? Well this server officially blows now.",
"Does this mean I don't have to return the book {0} lent me?",
"Oh come on! Now {0}\'s blood is all over my server!",
"I\'ll always remember {0}...", "Well at least {0} stopped crying.",
"Don\'t look at me. You guys are cleaning up {0}.",
"What I'm not crying. *sniff*", "I TOLD YOU, YOU COULD DO IT!",
"Well I'm sure someone will miss you, {0}.", "Never forget. {0}."
"Yeah. Real smart guys. Just kill off all the fun people.",
"I think I got some splatter on me. Gross",
"I told you it would blow your mind!", "Well this is fun...",
"I go to get popcorn and you all start without me. Rude.",
"Oh God. Just before {0} pulled the trigger they shit their pants.",
"I guess I\'ll dig this hole a little bigger...",
"10/10 would watch {0} blow their brains out again.",
"Well I hope {0} has life insurance...",
"See you in the next life, {0}", "AND THEIR OFF! Oh... wrong game."
"I don\'t know how, but I think {1} cheated.",
"{0} always said they wanted to go out with a bang.",
"So don\'t sing *another one bites the dust* ?",
"I can\'t tell if the smile on {1}\'s face is relief or insanity.",
"Oh stop crying {1}. {0} knew what they were getting into.",
"So that\'s what a human looks like on the inside!",
"My condolences {1}. I know you were *so* close to {0}.",
"GOD NO. PLEASE NO. PLEASE GOD NO. NOOOOOOOOOOOOOOOOOOOOOOO!",
"Time of death {2}. Cause: Stupidity.", "BOOM HEADSHOT! Sorry..."
"Don\'t act like you didn\'t enjoy that, {1}!",
"Is it weird that I wish {1} was dead instead?",
"Oh real great. {0} dies and I\'m still stuck with {1}. Real. Great.",
"Are you eating cheetos? Have some respect {1}! {0} just died!"]
class Russianroulette:
"""Allows 6 players to play Russian Roulette"""
def __init__(self, bot):
self.bot = bot
self.file_path = "data/JumperCogs/roulette/russian.json"
self.system = dataIO.load_json(self.file_path)
self.version = "2.2.01"
@commands.group(pass_context=True, no_pm=True)
async def setrussian(self, ctx):
"""Russian Roulette Settings"""
if ctx.invoked_subcommand is None:
await send_cmd_help(ctx)
@commands.command(name="rrversion", pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def _version_setrussian(self):
"""Shows the version of Russian Roulette"""
await self.bot.say("You are currently running Russian Roulette version "
"{}".format(self.version))
@setrussian.command(name="minbet", pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def _minbet_setrussian(self, ctx, bet: int):
"""Set the minimum starting bet for Russian Roulette games"""
server = ctx.message.server
settings = self.check_server_settings(server)
if bet > 0:
settings["System"]["Min Bet"] = bet
dataIO.save_json(self.file_path, self.system)
msg = "The initial bet to play russian roulette is set to {}".format(bet)
else:
msg = "I need a number higher than 0."
await self.bot.say(msg)
@commands.command(pass_context=True, no_pm=True)
@checks.admin_or_permissions(manage_server=True)
async def resetrr(self, ctx):
"""Reset command if game is stuck."""
server = ctx.message.server
settings = self.check_server_settings(server)
self.reset_game(settings)
await self.bot.say("Russian Roulette system has been reset.")
@commands.command(pass_context=True, no_pm=True, aliases=["rr"])
async def russian(self, ctx, bet: int):
user = ctx.message.author
server = ctx.message.server
settings = self.check_server_settings(server)
bank = self.bot.get_cog("Economy").bank
if await self.logic_checks(settings, user, bet):
if settings["System"]["Roulette Initial"]:
if user.id in settings["Players"]:
msg = "You are already in the circle. Don\'t be so eager to die."
elif len(settings["Players"].keys()) >= 6:
msg = "Sorry. The max amount of players is 6."
else:
if bet == settings["System"]["Start Bet"]:
self.player_add(settings, user, bet)
self.subtract_credits(settings, user, bet)
msg = "{} has joined the roulette circle".format(user.name)
else:
start_bet = settings["System"]["Start Bet"]
msg = "Your bet must be equal to {}.".format(start_bet)
await self.bot.say(msg)
else:
self.initial_set(settings, bet)
self.player_add(settings, user, bet)
self.subtract_credits(settings, user, bet)
await self.bot.say("{} has started a game of roulette with a starting bet of "
"{}\nThe game will start in 30 seconds or when 5 more "
"players join.".format(user.name, bet))
await asyncio.sleep(30)
if len(settings["Players"].keys()) == 1:
await self.bot.say("Sorry I can't let you play by yourself, that's just "
"suicide.\nTry again when you find some 'friends'.")
player = list(settings["Players"].keys())[0]
mobj = server.get_member(player)
initial_bet = settings["Players"][player]["Bet"]
bank.deposit_credits(mobj, initial_bet)
self.reset_game(settings)
else:
settings["System"]["Active"] = not settings["System"]["Active"]
await self.bot.say("Gather around! The game of russian roulette is starting.\n"
"I'm going to load a round into this six shot **revolver**, "
"give it a good spin, and pass it off to someone at random. "
"**If** everyone is lucky enough to have a turn, I\'ll "
"start all over. Good luck!")
await asyncio.sleep(5)
await self.roulette_game(settings, server)
self.reset_game(settings)
async def logic_checks(self, settings, user, bet):
if settings["System"]["Active"]:
await self.bot.say("A game of roulette is already active. Wait for it to end.")
return False
elif bet < settings["System"]["Min Bet"]:
min_bet = settings["System"]["Min Bet"]
await self.bot.say("Your bet must be greater than or equal to {}.".format(min_bet))
return False
elif len(settings["Players"].keys()) >= 6:
await self.bot.say("There are too many players playing at the moment")
return False
elif not self.enough_credits(user, bet):
await self.bot.say("You do not have enough credits or may need to register a bank "
"account")
return False
else:
return True
async def roulette_game(self, settings, server):
pot = settings["System"]["Pot"]
turn = 0
count = len(settings["Players"].keys())
while count > 0:
players = [server.get_member(x) for x in list(settings["Players"].keys())]
if count > 1:
count -= 1
turn += 1
await self.roulette_round(settings, server, players, turn)
else:
winner = players[0]
await self.bot.say("Congratulations {}, you're the only person alive. Enjoy your "
"blood money...\n{} credits were deposited into {}\'s "
"account".format(winner.mention, pot, winner.name))
bank = self.bot.get_cog("Economy").bank
bank.deposit_credits(winner, pot)
break
async def roulette_round(self, settings, server, players, turn):
roulette_circle = players[:]
chamber = 6
await self.bot.say("*{} put one round into the six shot revolver and gave it a good spin. "
"With a flick of the wrist, it locks in place."
"*".format(self.bot.user.name))
await asyncio.sleep(4)
await self.bot.say("Let's begin round {}.".format(turn))
while chamber >= 1:
if not roulette_circle:
roulette_circle = players[:] # Restart the circle when list is exhausted
chance = random.randint(1, chamber)
player = random.choice(roulette_circle)
await self.bot.say("{} presses the revolver to their temple and slowly squeezes the "
"trigger...".format(player.name))
if chance == 1:
await asyncio.sleep(4)
msg = "**BOOM**\n```{} died and was removed from the group.```".format(player.name)
await self.bot.say(msg)
msg2 = random.choice(kill_message)
settings["Players"].pop(player.id)
remaining = [server.get_member(x) for x in list(settings["Players"].keys())]
player2 = random.choice(remaining)
death_time = strftime("%H:%M:%S", gmtime())
await asyncio.sleep(5)
await self.bot.say(msg2.format(player.name, player2.name, death_time))
await asyncio.sleep(5)
break
else:
await asyncio.sleep(4)
await self.bot.say("**CLICK**\n```{} survived and passed the "
"revolver.```".format(player.name))
await asyncio.sleep(3)
roulette_circle.remove(player)
chamber -= 1
def reset_game(self, settings):
settings["System"]["Pot"] = 0
settings["System"]["Active"] = False
settings["System"]["Start Bet"] = 0
settings["System"]["Roulette Initial"] = False
settings["Players"] = {}
def player_add(self, settings, user, bet):
settings["System"]["Pot"] += bet
settings["Players"][user.id] = {"Name": user.name,
"Mention": user.mention,
"Bet": bet}
def initial_set(self, settings, bet):
settings["System"]["Start Bet"] = bet
settings["System"]["Roulette Initial"] = True
def subtract_credits(self, settings, user, bet):
bank = self.bot.get_cog('Economy').bank
bank.withdraw_credits(user, bet)
def enough_credits(self, user, amount):
bank = self.bot.get_cog('Economy').bank
if bank.account_exists(user):
if bank.can_spend(user, amount):
return True
else:
return False
else:
return False
def check_server_settings(self, server):
if server.id not in self.system["Servers"]:
default = {"System": {"Pot": 0,
"Active": False,
"Start Bet": 0,
"Roulette Initial": False,
"Min Bet": 50},
"Players": {}
}
self.system["Servers"][server.id] = default
dataIO.save_json(self.file_path, self.system)
print("Creating default russian roulette settings for Server: {}".format(server.name))
path = self.system["Servers"][server.id]
return path
else:
path = self.system["Servers"][server.id]
return path
def check_folders():
if not os.path.exists("data/JumperCogs/roulette"):
print("Creating data/JumperCogs/roulette folder...")
os.makedirs("data/JumperCogs/roulette")
def check_files():
system = {"Servers": {}}
f = "data/JumperCogs/roulette/russian.json"
if not dataIO.is_valid_json(f):
print("Creating default russian.json...")
dataIO.save_json(f, system)
def setup(bot):
check_folders()
check_files()
bot.add_cog(Russianroulette(bot))
|
gpl-3.0
| -5,136,934,830,560,906,000
| 47.252595
| 100
| 0.523535
| false
| 4.076174
| false
| false
| false
|
dirk-thomas/vcstool
|
vcstool/commands/import_.py
|
1
|
8799
|
import argparse
import os
from shutil import which
import sys
import urllib.request as request
from vcstool import __version__ as vcstool_version
from vcstool.clients import vcstool_clients
from vcstool.clients.vcs_base import run_command
from vcstool.executor import ansi
from vcstool.executor import execute_jobs
from vcstool.executor import output_repositories
from vcstool.executor import output_results
from vcstool.streams import set_streams
import yaml
from .command import add_common_arguments
from .command import Command
class ImportCommand(Command):
command = 'import'
help = 'Import the list of repositories'
def __init__(
self, args, url, version=None, recursive=False, shallow=False
):
super(ImportCommand, self).__init__(args)
self.url = url
self.version = version
self.force = args.force
self.retry = args.retry
self.skip_existing = args.skip_existing
self.recursive = recursive
self.shallow = shallow
def get_parser():
parser = argparse.ArgumentParser(
description='Import the list of repositories', prog='vcs import')
group = parser.add_argument_group('"import" command parameters')
group.add_argument(
'--input', type=file_or_url_type, default='-',
help='Where to read YAML from', metavar='FILE_OR_URL')
group.add_argument(
'--force', action='store_true', default=False,
help="Delete existing directories if they don't contain the "
'repository being imported')
group.add_argument(
'--shallow', action='store_true', default=False,
help='Create a shallow clone without a history')
group.add_argument(
'--recursive', action='store_true', default=False,
help='Recurse into submodules')
group.add_argument(
'--retry', type=int, metavar='N', default=2,
help='Retry commands requiring network access N times on failure')
group.add_argument(
'--skip-existing', action='store_true', default=False,
help="Don't overwrite existing directories or change custom checkouts "
'in repos using the same URL (but fetch repos with same URL)')
return parser
def file_or_url_type(value):
if os.path.exists(value) or '://' not in value:
return argparse.FileType('r')(value)
# use another user agent to avoid getting a 403 (forbidden) error,
# since some websites blacklist or block unrecognized user agents
return request.Request(
value, headers={'User-Agent': 'vcstool/' + vcstool_version})
def get_repositories(yaml_file):
try:
root = yaml.safe_load(yaml_file)
except yaml.YAMLError as e:
raise RuntimeError('Input data is not valid yaml format: %s' % e)
try:
repositories = root['repositories']
return get_repos_in_vcstool_format(repositories)
except KeyError as e:
raise RuntimeError('Input data is not valid format: %s' % e)
except TypeError as e:
# try rosinstall file format
try:
return get_repos_in_rosinstall_format(root)
except Exception:
raise RuntimeError('Input data is not valid format: %s' % e)
def get_repos_in_vcstool_format(repositories):
repos = {}
if repositories is None:
print(
ansi('yellowf') + 'List of repositories is empty' + ansi('reset'),
file=sys.stderr)
return repos
for path in repositories:
repo = {}
attributes = repositories[path]
try:
repo['type'] = attributes['type']
repo['url'] = attributes['url']
if 'version' in attributes:
repo['version'] = attributes['version']
except KeyError as e:
print(
ansi('yellowf') + (
"Repository '%s' does not provide the necessary "
'information: %s' % (path, e)) + ansi('reset'),
file=sys.stderr)
continue
repos[path] = repo
return repos
def get_repos_in_rosinstall_format(root):
repos = {}
for i, item in enumerate(root):
if len(item.keys()) != 1:
raise RuntimeError('Input data is not valid format')
repo = {'type': list(item.keys())[0]}
attributes = list(item.values())[0]
try:
path = attributes['local-name']
except KeyError as e:
print(
ansi('yellowf') + (
'Repository #%d does not provide the necessary '
'information: %s' % (i, e)) + ansi('reset'),
file=sys.stderr)
continue
try:
repo['url'] = attributes['uri']
if 'version' in attributes:
repo['version'] = attributes['version']
except KeyError as e:
print(
ansi('yellowf') + (
"Repository '%s' does not provide the necessary "
'information: %s' % (path, e)) + ansi('reset'),
file=sys.stderr)
continue
repos[path] = repo
return repos
def generate_jobs(repos, args):
jobs = []
for path, repo in repos.items():
path = os.path.join(args.path, path)
clients = [c for c in vcstool_clients if c.type == repo['type']]
if not clients:
from vcstool.clients.none import NoneClient
job = {
'client': NoneClient(path),
'command': None,
'cwd': path,
'output':
"Repository type '%s' is not supported" % repo['type'],
'returncode': NotImplemented
}
jobs.append(job)
continue
client = clients[0](path)
command = ImportCommand(
args, repo['url'],
str(repo['version']) if 'version' in repo else None,
recursive=args.recursive, shallow=args.shallow)
job = {'client': client, 'command': command}
jobs.append(job)
return jobs
def add_dependencies(jobs):
paths = [job['client'].path for job in jobs]
for job in jobs:
job['depends'] = set()
path = job['client'].path
while True:
parent_path = os.path.dirname(path)
if parent_path == path:
break
path = parent_path
if path in paths:
job['depends'].add(path)
def main(args=None, stdout=None, stderr=None):
set_streams(stdout=stdout, stderr=stderr)
parser = get_parser()
add_common_arguments(
parser, skip_hide_empty=True, skip_nested=True, path_nargs='?',
path_help='Base path to clone repositories to')
args = parser.parse_args(args)
try:
input_ = args.input
if isinstance(input_, request.Request):
input_ = request.urlopen(input_)
repos = get_repositories(input_)
except (RuntimeError, request.URLError) as e:
print(ansi('redf') + str(e) + ansi('reset'), file=sys.stderr)
return 1
jobs = generate_jobs(repos, args)
add_dependencies(jobs)
if args.repos:
output_repositories([job['client'] for job in jobs])
workers = args.workers
# for ssh URLs check if the host is known to prevent ssh asking for
# confirmation when using more than one worker
if workers > 1:
ssh_keygen = None
checked_hosts = set()
for job in list(jobs):
if job['command'] is None:
continue
url = job['command'].url
# only check the host from a ssh URL
if not url.startswith('git@') or ':' not in url:
continue
host = url[4:].split(':', 1)[0]
# only check each host name once
if host in checked_hosts:
continue
checked_hosts.add(host)
# get ssh-keygen path once
if ssh_keygen is None:
ssh_keygen = which('ssh-keygen') or False
if not ssh_keygen:
continue
result = run_command([ssh_keygen, '-F', host], '')
if result['returncode']:
print(
'At least one hostname (%s) is unknown, switching to a '
'single worker to allow interactively answering the ssh '
'question to confirm the fingerprint' % host)
workers = 1
break
results = execute_jobs(
jobs, show_progress=True, number_of_workers=workers,
debug_jobs=args.debug)
output_results(results)
any_error = any(r['returncode'] for r in results)
return 1 if any_error else 0
if __name__ == '__main__':
sys.exit(main())
|
apache-2.0
| 6,492,176,253,853,990,000
| 32.712644
| 79
| 0.575406
| false
| 4.240482
| false
| false
| false
|
danielballan/docs
|
source/_cookbook/csv_writer.py
|
1
|
3011
|
# -*- coding: utf-8 -*-
"""
========================================
A Minimal CSV writer for data collection
========================================
Problem
-------
Write (a subset of) the data to a CSV file during data collection.
Approach
--------
Write a callback function that integrates Python's built-in csv module with
bluesky.
Example Solution
----------------
"""
###############################################################################
# Boiler plate imports and configuration
import path
import os
import bluesky as bs
import bluesky.plans as bp
import bluesky.callbacks as bc
import csv
from bluesky.examples import motor, det
import matplotlib.pyplot as plt
# Do this if running the example interactively;
# skip it when building the documentation.
import os
if 'BUILDING_DOCS' not in os.environ:
from bluesky.utils import install_qt_kicker # for notebooks, qt -> nb
install_qt_kicker()
plt.ion()
det.exposure_time = .1 # simulate detector exposure time
RE = bs.RunEngine({})
###############################################################################
# Define a callback class which writes out a CSV file
class CSVWriter(bc.CallbackBase):
def __init__(self, fields, fname_format, fpath):
self._path = path.Path(fpath)
os.makedirs(self._path, exist_ok=True)
self._fname_fomat = fname_format
self._fields = fields
self._writer = None
self._fout = None
def close(self):
if self._fout is not None:
self._fout.close()
self._fout = None
self._writer = None
def start(self, doc):
self.close()
fname = self._path / self._fname_fomat.format(**doc)
self._fout = open(fname, 'xt')
self._writer = csv.writer(self._fout)
def descriptor(self, doc):
if self._writer is not None:
self._writer.writerow(self._fields)
def event(self, doc):
data = doc['data']
if self._writer is not None:
self._writer.writerow(data[k] for k in self._fields)
def stop(self, doc):
self.close()
###############################################################################
# Set up some callbacks
def create_cbs():
return [bc.LiveTable([motor, det]), bc.LivePlot('det', 'motor')]
fmt = '{user}_{uid:.6s}.csv'
export_path = '/tmp/export_demo'
csv_writer = CSVWriter(('motor', 'det'), fmt, export_path)
# send all documents to the CSV writer
RE.subscribe('all', csv_writer)
###############################################################################
# run the scan
uid, = RE(bp.scan([det], motor, -5, 5, 11),
create_cbs(), user='tcaswell')
###############################################################################
# check file
fname = os.path.join(export_path,
'{user}_{uid:.6s}.csv'.format(user='tcaswell', uid=uid))
print("--- {} ---".format(fname))
with open(fname, 'r') as fin:
for ln in fin:
print(ln.strip())
|
bsd-2-clause
| 6,292,943,657,254,038,000
| 24.091667
| 79
| 0.524078
| false
| 4.05249
| false
| false
| false
|
ajaniv/django-core-models
|
django_core_models/locations/tests/test_validation.py
|
1
|
5553
|
"""
.. module:: django_core_models.location.tests.test_validation
:synopsis: location application validation unit test module.
*location* application validation unit test module.
"""
from __future__ import print_function
from django.core.exceptions import ValidationError
from django_core_utils.tests.test_utils import BaseModelTestCase
from . import factories
from ..validation import (country_validation, language_validation,
post_office_box_validation, postal_code_validation,
province_validation, state_validation)
from .factories import (CountryModelFactory, LanguageModelFactory,
ProvinceModelFactory, StateModelFactory)
class ValidationTestCase(BaseModelTestCase):
"""Base validation unit test class."""
def country_usa(self):
return factories.country_usa()
def country_france(self):
return factories.country_france()
valid_post_office_boxes = (
'PO Box 001', 'P.O. Box 002', 'po b 001', 'po bin 001',
'Post O bin 001', 'P. Office bin 001',
'P.O.Box 003')
invalid_post_office_boxes = ('004 P.O. Box', '005 PO Box', '006', 'abc')
class PostOfficeBoxValidationTestCase(ValidationTestCase):
"""Post office box validation unit test class."""
def test_post_office_box_validation_usa(self):
for pob in valid_post_office_boxes:
post_office_box_validation(self.country_usa(), pob)
def test_usa_post_office_box_validation_exceptions_usa(self):
for pob in invalid_post_office_boxes:
with self.assertRaises(ValidationError):
post_office_box_validation(self.country_usa(), pob)
valid_postal_codes = ('12345', '12345-6789', '12345 - 6789')
invalid_postal_codes = ('1234', '1234A', '12345 A', '12345-6789A')
class PostalCodeValidationTestCase(ValidationTestCase):
"""Postal code validation unit test class."""
def test_postal_code_validation_usa(self):
for postal_box in valid_postal_codes:
postal_code_validation(self.country_usa(), postal_box)
def test_postal_code_validation_exceptions_usa(self):
for pob in invalid_postal_codes:
with self.assertRaises(ValidationError):
postal_code_validation(self.country_usa(), pob)
class CountryValidationTestCase(ValidationTestCase):
"""Country validation unit test class."""
def test_country_validation_usa(self):
country_validation(self.country_usa())
def test_postal_code_validation_exceptions_usa(self):
with self.assertRaises(ValidationError):
country_validation(CountryModelFactory(
name="USA", iso_code="US"))
class LanguageValidationTestCase(ValidationTestCase):
"""Language validation unit test class."""
def test_language_validation_usa(self):
language_validation(LanguageModelFactory(
name=LanguageModelFactory.LANGUAGE_FRENCH,
iso_code=LanguageModelFactory.ISO_639_2_FR))
def test_language_validation_exceptions_usa(self):
with self.assertRaises(ValidationError):
country_validation(CountryModelFactory(
name="French", iso_code="zz"))
class ProvinceValidationTestCase(ValidationTestCase):
"""Province validation unit test class."""
def test_province_validation(self):
province_validation(ProvinceModelFactory(
name=ProvinceModelFactory.PROVINCE_NORMANDY,
iso_code=ProvinceModelFactory.ISO_3166_2_NORMANDY,
country=self.country_france()))
def test_province_validation_invalid_iso(self):
with self.assertRaises(ValidationError):
province_validation(ProvinceModelFactory(
name=ProvinceModelFactory.PROVINCE_NORMANDY,
iso_code="FR-P",
country=self.country_france()))
def test_province_validation_invalid_name(self):
with self.assertRaises(ValidationError):
province_validation(StateModelFactory(
name="Bad name",
iso_code=ProvinceModelFactory.ISO_3166_2_NORMANDY,
country=self.country_france()))
def test_state_validation_invalid_country(self):
with self.assertRaises(ValidationError):
province_validation(StateModelFactory(
name=ProvinceModelFactory.PROVINCE_NORMANDY,
iso_code=ProvinceModelFactory.ISO_3166_2_NORMANDY,
country=self.country_usa()))
class StateValidationTestCase(ValidationTestCase):
"""State validation unit test class."""
def test_state_validation(self):
state_validation(StateModelFactory(
name="New Jersey", iso_code="US-NJ",
country=self.country_usa()))
def test_state_validation_invalid_iso(self):
with self.assertRaises(ValidationError):
state_validation(StateModelFactory(
name="New Jersey",
iso_code="US-NJT",
country=self.country_usa()))
def test_state_validation_invalid_name(self):
with self.assertRaises(ValidationError):
state_validation(StateModelFactory(
name="Old Jersey",
iso_code="US-NJ",
country=self.country_usa()))
def test_state_validation_invalid_country(self):
with self.assertRaises(ValidationError):
state_validation(StateModelFactory(
name="New Jersey",
iso_code="US-NJ",
country=self.country_france()))
|
mit
| -865,493,100,001,814,000
| 36.02
| 77
| 0.660004
| false
| 4.015184
| true
| false
| false
|
nitely/Spirit
|
spirit/core/utils/decorators.py
|
1
|
1458
|
# -*- coding: utf-8 -*-
from functools import wraps
from django.core.exceptions import PermissionDenied
from django.contrib.auth.views import redirect_to_login
from django.shortcuts import redirect
from spirit.core.conf import settings
def moderator_required(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
user = request.user
if not user.is_authenticated:
return redirect_to_login(next=request.get_full_path(),
login_url=settings.LOGIN_URL)
if not user.st.is_moderator:
raise PermissionDenied
return view_func(request, *args, **kwargs)
return wrapper
def administrator_required(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
user = request.user
if not user.is_authenticated:
return redirect_to_login(next=request.get_full_path(),
login_url=settings.LOGIN_URL)
if not user.st.is_administrator:
raise PermissionDenied
return view_func(request, *args, **kwargs)
return wrapper
def guest_only(view_func):
# TODO: test!
@wraps(view_func)
def wrapper(request, *args, **kwargs):
if request.user.is_authenticated:
return redirect(request.GET.get('next', request.user.st.get_absolute_url()))
return view_func(request, *args, **kwargs)
return wrapper
|
mit
| 2,144,196,783,148,449,000
| 25.509091
| 88
| 0.631001
| false
| 4.05
| false
| false
| false
|
roryyorke/python-control
|
control/margins.py
|
1
|
13996
|
"""margins.py
Functions for computing stability margins and related functions.
Routines in this module:
margins.stability_margins
margins.phase_crossover_frequencies
margins.margin
"""
# Python 3 compatibility (needs to go here)
from __future__ import print_function
"""Copyright (c) 2011 by California Institute of Technology
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the California Institute of Technology nor
the names of its contributors may be used to endorse or promote
products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH
OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
Author: Richard M. Murray
Date: 14 July 2011
$Id$
"""
import math
import numpy as np
import scipy as sp
from . import xferfcn
from .lti import issiso
from . import frdata
__all__ = ['stability_margins', 'phase_crossover_frequencies', 'margin']
# helper functions for stability_margins
def _polyimsplit(pol):
"""split a polynomial with (iw) applied into a real and an
imaginary part with w applied"""
rpencil = np.zeros_like(pol)
ipencil = np.zeros_like(pol)
rpencil[-1::-4] = 1.
rpencil[-3::-4] = -1.
ipencil[-2::-4] = 1.
ipencil[-4::-4] = -1.
return pol * rpencil, pol*ipencil
def _polysqr(pol):
"""return a polynomial squared"""
return np.polymul(pol, pol)
# Took the framework for the old function by
# Sawyer B. Fuller <minster@caltech.edu>, removed a lot of the innards
# and replaced with analytical polynomial functions for LTI systems.
#
# idea for the frequency data solution copied/adapted from
# https://github.com/alchemyst/Skogestad-Python/blob/master/BODE.py
# Rene van Paassen <rene.vanpaassen@gmail.com>
#
# RvP, July 8, 2014, corrected to exclude phase=0 crossing for the gain
# margin polynomial
# RvP, July 8, 2015, augmented to calculate all phase/gain crossings with
# frd data. Correct to return smallest phase
# margin, smallest gain margin and their frequencies
# RvP, Jun 10, 2017, modified the inclusion of roots found for phase
# crossing to include all >= 0, made subsequent calc
# insensitive to div by 0
# also changed the selection of which crossings to
# return on basis of "A note on the Gain and Phase
# Margin Concepts" Journal of Control and Systems
# Engineering, Yazdan Bavafi-Toosi, Dec 2015, vol 3
# issue 1, pp 51-59, closer to Matlab behavior, but
# not completely identical in edge cases, which don't
# cross but touch gain=1
def stability_margins(sysdata, returnall=False, epsw=0.0):
"""Calculate stability margins and associated crossover frequencies.
Parameters
----------
sysdata: LTI system or (mag, phase, omega) sequence
sys : LTI system
Linear SISO system
mag, phase, omega : sequence of array_like
Arrays of magnitudes (absolute values, not dB), phases (degrees),
and corresponding frequencies. Crossover frequencies returned are
in the same units as those in `omega` (e.g., rad/sec or Hz).
returnall: bool, optional
If true, return all margins found. If False (default), return only the
minimum stability margins. For frequency data or FRD systems, only
margins in the given frequency region can be found and returned.
epsw: float, optional
Frequencies below this value (default 0.0) are considered static gain,
and not returned as margin.
Returns
-------
gm: float or array_like
Gain margin
pm: float or array_loke
Phase margin
sm: float or array_like
Stability margin, the minimum distance from the Nyquist plot to -1
wg: float or array_like
Frequency for gain margin (at phase crossover, phase = -180 degrees)
wp: float or array_like
Frequency for phase margin (at gain crossover, gain = 1)
ws: float or array_like
Frequency for stability margin (complex gain closest to -1)
"""
try:
if isinstance(sysdata, frdata.FRD):
sys = frdata.FRD(sysdata, smooth=True)
elif isinstance(sysdata, xferfcn.TransferFunction):
sys = sysdata
elif getattr(sysdata, '__iter__', False) and len(sysdata) == 3:
mag, phase, omega = sysdata
sys = frdata.FRD(mag * np.exp(1j * phase * math.pi/180),
omega, smooth=True)
else:
sys = xferfcn._convert_to_transfer_function(sysdata)
except Exception as e:
print (e)
raise ValueError("Margin sysdata must be either a linear system or "
"a 3-sequence of mag, phase, omega.")
# calculate gain of system
if isinstance(sys, xferfcn.TransferFunction):
# check for siso
if not issiso(sys):
raise ValueError("Can only do margins for SISO system")
# real and imaginary part polynomials in omega:
rnum, inum = _polyimsplit(sys.num[0][0])
rden, iden = _polyimsplit(sys.den[0][0])
# test (imaginary part of tf) == 0, for phase crossover/gain margins
test_w_180 = np.polyadd(np.polymul(inum, rden), np.polymul(rnum, -iden))
w_180 = np.roots(test_w_180)
# first remove imaginary and negative frequencies, epsw removes the
# "0" frequency for type-2 systems
w_180 = np.real(w_180[(np.imag(w_180) == 0) * (w_180 >= epsw)])
# evaluate response at remaining frequencies, to test for phase 180 vs 0
with np.errstate(all='ignore'):
resp_w_180 = np.real(
np.polyval(sys.num[0][0], 1.j*w_180) /
np.polyval(sys.den[0][0], 1.j*w_180))
# only keep frequencies where the negative real axis is crossed
w_180 = w_180[np.real(resp_w_180) <= 0.0]
# and sort
w_180.sort()
# test magnitude is 1 for gain crossover/phase margins
test_wc = np.polysub(np.polyadd(_polysqr(rnum), _polysqr(inum)),
np.polyadd(_polysqr(rden), _polysqr(iden)))
wc = np.roots(test_wc)
wc = np.real(wc[(np.imag(wc) == 0) * (wc > epsw)])
wc.sort()
# stability margin was a bitch to elaborate, relies on magnitude to
# point -1, then take the derivative. Second derivative needs to be >0
# to have a minimum
test_wstabd = np.polyadd(_polysqr(rden), _polysqr(iden))
test_wstabn = np.polyadd(_polysqr(np.polyadd(rnum,rden)),
_polysqr(np.polyadd(inum,iden)))
test_wstab = np.polysub(
np.polymul(np.polyder(test_wstabn),test_wstabd),
np.polymul(np.polyder(test_wstabd),test_wstabn))
# find the solutions, for positive omega, and only real ones
wstab = np.roots(test_wstab)
wstab = np.real(wstab[(np.imag(wstab) == 0) *
(np.real(wstab) >= 0)])
# and find the value of the 2nd derivative there, needs to be positive
wstabplus = np.polyval(np.polyder(test_wstab), wstab)
wstab = np.real(wstab[(np.imag(wstab) == 0) * (wstab > epsw) *
(wstabplus > 0.)])
wstab.sort()
else:
# a bit coarse, have the interpolated frd evaluated again
def _mod(w):
"""Calculate |G(jw)| - 1"""
return np.abs(sys._evalfr(w)[0][0]) - 1
def _arg(w):
"""Calculate the phase angle at -180 deg"""
return np.angle(-sys._evalfr(w)[0][0])
def _dstab(w):
"""Calculate the distance from -1 point"""
return np.abs(sys._evalfr(w)[0][0] + 1.)
# Find all crossings, note that this depends on omega having
# a correct range
widx = np.where(np.diff(np.sign(_mod(sys.omega))))[0]
wc = np.array(
[sp.optimize.brentq(_mod, sys.omega[i], sys.omega[i+1])
for i in widx])
# find the phase crossings ang(H(jw) == -180
widx = np.where(np.diff(np.sign(_arg(sys.omega))))[0]
widx = widx[np.real(sys._evalfr(sys.omega[widx])[0][0]) <= 0]
w_180 = np.array(
[sp.optimize.brentq(_arg, sys.omega[i], sys.omega[i+1])
for i in widx])
# find all stab margins?
widx, = np.where(np.diff(np.sign(np.diff(_dstab(sys.omega)))) > 0)
wstab = np.array(
[sp.optimize.minimize_scalar(_dstab,
bracket=(sys.omega[i], sys.omega[i+1])
).x
for i in widx])
wstab = wstab[(wstab >= sys.omega[0]) * (wstab <= sys.omega[-1])]
# margins, as iterables, converted frdata and xferfcn calculations to
# vector for this
with np.errstate(all='ignore'):
gain_w_180 = np.abs(sys._evalfr(w_180)[0][0])
GM = 1.0/gain_w_180
SM = np.abs(sys._evalfr(wstab)[0][0]+1)
PM = np.remainder(np.angle(sys._evalfr(wc)[0][0], deg=True), 360.0) - 180.0
if returnall:
return GM, PM, SM, w_180, wc, wstab
else:
if GM.shape[0] and not np.isinf(GM).all():
with np.errstate(all='ignore'):
gmidx = np.where(np.abs(np.log(GM)) ==
np.min(np.abs(np.log(GM))))
else:
gmidx = -1
if PM.shape[0]:
pmidx = np.where(np.abs(PM) == np.amin(np.abs(PM)))[0]
return (
(not gmidx != -1 and float('inf')) or GM[gmidx][0],
(not PM.shape[0] and float('inf')) or PM[pmidx][0],
(not SM.shape[0] and float('inf')) or np.amin(SM),
(not gmidx != -1 and float('nan')) or w_180[gmidx][0],
(not wc.shape[0] and float('nan')) or wc[pmidx][0],
(not wstab.shape[0] and float('nan')) or wstab[SM==np.amin(SM)][0])
# Contributed by Steffen Waldherr <waldherr@ist.uni-stuttgart.de>
def phase_crossover_frequencies(sys):
"""Compute frequencies and gains at intersections with real axis
in Nyquist plot.
Call as:
omega, gain = phase_crossover_frequencies()
Returns
-------
omega: 1d array of (non-negative) frequencies where Nyquist plot
intersects the real axis
gain: 1d array of corresponding gains
Examples
--------
>>> tf = TransferFunction([1], [1, 2, 3, 4])
>>> PhaseCrossoverFrequenies(tf)
(array([ 1.73205081, 0. ]), array([-0.5 , 0.25]))
"""
# Convert to a transfer function
tf = xferfcn._convert_to_transfer_function(sys)
# if not siso, fall back to (0,0) element
#! TODO: should add a check and warning here
num = tf.num[0][0]
den = tf.den[0][0]
# Compute frequencies that we cross over the real axis
numj = (1.j)**np.arange(len(num)-1,-1,-1)*num
denj = (-1.j)**np.arange(len(den)-1,-1,-1)*den
allfreq = np.roots(np.imag(np.polymul(numj,denj)))
realfreq = np.real(allfreq[np.isreal(allfreq)])
realposfreq = realfreq[realfreq >= 0.]
# using real() to avoid rounding errors and results like 1+0j
# it would be nice to have a vectorized version of self.evalfr here
gain = np.real(np.asarray([tf._evalfr(f)[0][0] for f in realposfreq]))
return realposfreq, gain
def margin(*args):
"""margin(sysdata)
Calculate gain and phase margins and associated crossover frequencies
Parameters
----------
sysdata : LTI system or (mag, phase, omega) sequence
sys : StateSpace or TransferFunction
Linear SISO system
mag, phase, omega : sequence of array_like
Input magnitude, phase (in deg.), and frequencies (rad/sec) from
bode frequency response data
Returns
-------
gm : float
Gain margin
pm : float
Phase margin (in degrees)
wg: float
Frequency for gain margin (at phase crossover, phase = -180 degrees)
wp: float
Frequency for phase margin (at gain crossover, gain = 1)
Margins are calculated for a SISO open-loop system.
If there is more than one gain crossover, the one at the smallest
margin (deviation from gain = 1), in absolute sense, is
returned. Likewise the smallest phase margin (in absolute sense)
is returned.
Examples
--------
>>> sys = tf(1, [1, 2, 1, 0])
>>> gm, pm, wg, wp = margin(sys)
"""
if len(args) == 1:
sys = args[0]
margin = stability_margins(sys)
elif len(args) == 3:
margin = stability_margins(args)
else:
raise ValueError("Margin needs 1 or 3 arguments; received %i."
% len(args))
return margin[0], margin[1], margin[3], margin[4]
|
bsd-3-clause
| 1,176,016,653,817,876,700
| 37.032609
| 80
| 0.61539
| false
| 3.556798
| true
| false
| false
|
wrenoud/blueberry-bush
|
RepositoryState.py
|
1
|
1808
|
import os
from FileState import FileStateLocal
class Remote(object):
def __init__(self): pass
def create(self): pass
def update(self): pass
def modified(self): pass
def delete(self): pass
class RepositoryState(object):
"""Manages the sync information, this includes the local root, ingnores, files, and update que"""
def __init__(self, rootpath):
self.root = os.path.abspath(rootpath)
self.local_files = {}
def create(self, src_path):
local_file = FileStateLocal(self.root, src_path)
if local_file.check_exists():
local_file.check_modified()
local_file.check_size()
local_file.check_hash()
self.local_files[local_file.key] = local_file
return local_file.key
def modified(self, src_path):
src_key = FileStateLocal.as_key(self.root,src_path)
self.local_files[src_key].check_size()
self.local_files[src_key].check_modified()
self.local_files[src_key].check_hash()
return src_key
def move(self, src_path, dest_path):
src_key = FileStateLocal.as_key(self.root,src_path)
self.local_files[src_key].local_path = dest_path
dest_key = self.local_files[src_key].key
self.local_files[dest_key] = self.local_files.pop(src_key)
return (src_key,dest_key)
def delete(self, src_path):
src_key = FileStateLocal.as_key(self.root,src_path)
del self.local_files[src_key]
return src_key
def ignore(self, src_path):
dir, name = os.path.split(src_path)
if name.startswith(".~lock"): return True
if name.endswith("~"): return True
if name == STATE_FILE: return True
return False
|
gpl-2.0
| -8,155,816,514,990,178,000
| 30.719298
| 101
| 0.601217
| false
| 3.637827
| false
| false
| false
|
texnokrates/electroballz
|
electroballz/single_coeff.py
|
1
|
1461
|
from scipy import *
from scipy.special import sph_jn, sph_yn
# The following is an entirely computationally inefficient draft, intended for basic orientation.
def jl(l,z):
"""Wrapper for sph_jn (discards the unnecessary data)"""
return sph_jn(n, z)[0][l]
def yl(l,z):
"""Wrapper for sph_yn (discards the unnecessary data)"""
return sph_yn(l, z)[0][l]
def h1l(l,z):
"""First spherical Hankel function"""
return jl(l,z) + 1j*yl(l,z)
def h2l(l,z):
"""Second spherical Hankel function"""
return j1(l,z) - 1j*yl(l,z)
def bf_coeff(l, km, k0, etam, eta0, r):
"""Ratios between (b1lm,f1lm) and a1lm. See the single_spherical_wave_scatter.nb file"""
sph_j_kmr = sph_jn(l, km*r)
sph_j_k0r = sph_jn(l, k0*r)
sph_y_k0r = sph_yn(l, k0*r)
jm = sph_j_kmr[0][l]
h01 = sph_j_k0r[0][l] + 1j * sph_y_k0r[0][l]
h02 = sph_j_k0r[0][l] - 1j * sph_y_k0r[0][l]
Jm = jm + km * r * sph_j_kmr[1][l]
H01 = h01 + k0 * r * (sph_j_k0r[1][l] + 1j * sph_y_k0r[1][l])
H02 = h02 + k0 * r * (sph_j_k0r[1][l] - 1j * sph_y_k0r[1][l])
denom1 = h01*Jm*k0*eta0 - H01*jm*km*etam
b1_a1 = - (h02*Jm*k0*eta0 - H02*jm*km*etam) / denom1
f1_a1 = - k0 * sqrt(eta0*etam) * (H01*h02 - h01*H02) / denom1
denom2 = (H01*jm*km*eta0 - h01*Jm*k0*etam)
b2_a2 = - (H02*jm*km*eta0 - h02*Jm*k0*etam) / denom2
f2_a2 = - k0 * sqrt(eta0*etam) * (-H01*h02 + h01*H02) / denom2
return (b1_a1, f1_a1, b2_a2, f2_a2)
|
gpl-3.0
| -3,926,437,970,418,607,000
| 30.76087
| 97
| 0.577002
| false
| 1.987755
| false
| false
| false
|
firebitsbr/termineter
|
framework/modules/dump_tables.py
|
1
|
3439
|
# framework/modules/dump_tables.py
#
# Copyright 2011 Spencer J. McIntyre <SMcIntyre [at] SecureState [dot] net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import os
from time import sleep
from c1218.errors import C1218ReadTableError
from c1219.data import C1219_TABLES
from framework.templates import TermineterModuleOptical
class Module(TermineterModuleOptical):
def __init__(self, *args, **kwargs):
TermineterModuleOptical.__init__(self, *args, **kwargs)
self.version = 2
self.author = ['Spencer McIntyre']
self.description = 'Dump Readable C12.19 Tables From The Device To A CSV File'
self.detailed_description = 'This module will enumerate the readable tables on the smart meter and write them out to a CSV formated file for analysis. The format is table id, table name, table data length, table data. The table data is represented in hex.'
self.options.add_integer('LOWER', 'table id to start reading from', default=0)
self.options.add_integer('UPPER', 'table id to stop reading from', default=256)
self.options.add_string('FILE', 'file to write the csv data into', default='smart_meter_tables.csv')
def run(self):
conn = self.frmwk.serial_connection
logger = self.logger
lower_boundary = self.options['LOWER']
upper_boundary = self.options['UPPER']
out_file = open(self.options['FILE'], 'w', 1)
if not self.frmwk.serial_login():
logger.warning('meter login failed, some tables may not be accessible')
number_of_tables = 0
self.frmwk.print_status('Starting Dump. Writing table data to: ' + self.options.get_option_value('FILE'))
for tableid in xrange(lower_boundary, (upper_boundary + 1)):
try:
data = conn.get_table_data(tableid)
except C1218ReadTableError as error:
data = None
if error.code == 10: # ISSS
conn.stop()
logger.warning('received ISSS error, connection stopped, will sleep before retrying')
sleep(0.5)
if not self.frmwk.serial_login():
logger.warning('meter login failed, some tables may not be accessible')
try:
data = conn.get_table_data(tableid)
except C1218ReadTableError as error:
data = None
if error.code == 10:
raise error # tried to re-sync communications but failed, you should reconnect and rerun the module
if data:
self.frmwk.print_status('Found readable table, ID: ' + str(tableid) + ' Name: ' + (C1219_TABLES.get(tableid) or 'UNKNOWN'))
# format is: table id, table name, table data length, table data
out_file.write(','.join([str(tableid), (C1219_TABLES.get(tableid) or 'UNKNOWN'), str(len(data)), data.encode('hex')]) + os.linesep)
number_of_tables += 1
out_file.close()
self.frmwk.print_status('Successfully copied ' + str(number_of_tables) + ' tables to disk.')
return
|
gpl-3.0
| 2,490,079,865,939,088,000
| 45.472973
| 259
| 0.719104
| false
| 3.361681
| false
| false
| false
|
skeezix/compo4all
|
spaghetti-server/singlescore_handler.py
|
1
|
12920
|
# update_hi - receive binary and i) parse it, ii) update json tally as needed, iii) store .hi file for later
# get_hi -- fetch a bin for the emu
# get_json_tally - dump highscore table as json (for fancy frontend to display, say)
# get_html_tally - dump highscore in vaguely readable html table (for web browser quickies)
# get_last_modify_epoch - get epoch-time of last tally modify
import logging
import json
import array
import os
import pprint
import time
import traceback
import profile
from paths import _basepath
import modulemap
import activity_log
SCOREBOARD_MAX=500
logging.info ( "LOADING: singlescore_handler" )
# "score" should not be supplied, unless its multiscore sending its shit here
def update_hi ( req, score_int=None ):
#pp = pprint.PrettyPrinter ( indent=4 )
# base game path
writepath = _basepath ( req )
try:
logging.debug ( "Attempt to create dirs %s" % ( writepath ) )
os.makedirs ( writepath )
except:
pass
# pull up existing tally file
#
tally = _read_tally ( req )
sb = tally [ 'scoreboard' ]
# parse new hi buffer
#
if score_int:
hi = score_int
else:
hi = parse_hi_bin ( req, req [ '_bindata' ] )
# is any of this new buffer better than existing tally?
# if so, update tally file and record it
# if not, we're done
# new tally update? great ..
# .. store hi-file
# .. store new tally file
# -------
# does this score factor into the high score table, or too low to count?
if False and hi < sb [ SCOREBOARD_MAX - 1 ][ 'score' ]:
logging.info ( "hidb - %s - submitter score of %d is NOT sufficient to enter scoreboard (lowest %d, highest %d)" % ( req [ 'gamename' ], hi, sb [ SCOREBOARD_MAX - 1 ][ 'score' ], sb [ 0 ][ 'score' ] ) )
return
# is score same as existing top .. if so, its just resubmitting the score they pulled down, likely, so.. discard
if False and hi == sb [ 0 ][ 'score' ]:
logging.info ( "hidb - %s - submitter score of %d is same as highest score .. probably just looping. (lowest %d, highest %d)" % ( req [ 'gamename' ], hi, sb [ SCOREBOARD_MAX - 1 ][ 'score' ], sb [ 0 ][ 'score' ] ) )
return
# okay, so the guys score is at least better than one of them.. start at top, pushing the way down
if False:
logging.info ( "hidb - %s - submitter score of %d IS sufficient to enter scoreboard (lowest %d, highest %d)" % ( req [ 'gamename' ], hi, sb [ SCOREBOARD_MAX - 1 ][ 'score' ], sb [ 0 ][ 'score' ] ) )
# determine desired sort order
order = 'highest-first'
try:
_order = modulemap.gamemap [ req [ 'gamename' ] ] [ '_general'] [ 'ordering' ]
logging.info ( 'hidb - %s - ordering from conf is %s' % ( req [ 'gamename' ], _order ) )
if _order in ( 'highest-first' ,'lowest-first' ):
order = _order
else:
order = 'highest-first'
except:
pp = pprint.PrettyPrinter(indent=4)
pp.pprint ( modulemap.gamemap [ req [ 'gamename' ] ] )
print modulemap.gamemap [ req [ 'gamename' ] ]
traceback.print_exc()
logging.info ( 'hidb - %s - ordering -> exception .. assuming highest-first' % ( req [ 'gamename' ] ) )
order = 'highest-first'
logging.info ( 'hidb - %s - ordering to use is %s' % ( req [ 'gamename' ], order ) )
# create new score entry
d = dict()
d [ 'prid' ] = req [ 'prid' ]
d [ 'score' ] = hi
d [ 'time' ] = int ( time.time() )
# old: insert with manual assumed-ascending sort order
if False:
for i in range ( SCOREBOARD_MAX ):
if hi > sb [ i ][ 'score' ]:
# log the activity
activity_log.log_entry ( req, d, i )
# insert
sb.insert ( i, d )
# drop off last guy
sb.pop()
# if we updated the first entry, the very highest score, spit out a new .hi file
# (mspacman only has a single high score, so we only update it for the highest score.. not a whole table)
if i == 0 and score_int == None:
f = open ( writepath + req [ 'gamename' ] + ".hi", "w" )
f.write ( build_hi_bin ( req, sb [ 0 ][ 'score' ] ) )
f.close()
break
# insert at first, assuming a post-sort; we can drop the 'worst' entry after sort
if True:
sb.insert ( 0, d )
# update activity log.. try to find the entry match and publish it
if True:
for i in range ( SCOREBOARD_MAX ):
if d [ 'prid' ] == sb [ i ] [ 'prid' ] and d [ 'score' ] == sb [ i ] [ 'score' ] and d [ 'time' ] == sb [ i ] [ 'time' ]:
activity_log.log_entry ( req, d, i )
break
# post-sort to games desired sort order
# reverse=False -> ascending (lowest first), lowest is best
# reverse=True -> descending (highest first), highest is best -> most typical case
def _sortvalue ( entry ):
if entry [ 'score' ] == 0:
if order == 'lowest-first':
return 999999999999
else:
return -1
else:
return entry [ 'score' ]
if True:
reversify = True
if order == 'lowest-first':
reversify = False
try:
sb.sort ( key=_sortvalue, reverse=reversify )
except:
traceback.print_exc()
# drop 'worst' (last, since we sorted) entry
if True:
sb.pop()
#logging.info ( 'hidb - %s - sorted ' % ( req [ 'gamename' ] ) )
# update stats and write out the updated tally file
tally [ 'hi' ] = sb [ 0 ][ 'score' ]
tally [ 'prid' ] = sb [ 0 ][ 'prid' ]
tallyfile = json.dumps ( tally )
f = open ( writepath + req [ 'gamename' ] + ".json", "w" )
f.write ( tallyfile )
f.close()
#logging.debug ( "received len %d" % ( req [ '_binlen' ] ) )
return
def get_hi ( req ):
req [ '_bindata' ] = build_hi_bin ( req, 0 )
req [ '_binlen' ] = len ( req [ '_bindata' ] )
logging.info ( "%s - pulled generated zero-score hi file (len %s)" % ( req [ 'gamename' ], req [ '_binlen' ] ) )
'''
writepath = _basepath ( req )
try:
f = open ( writepath + req [ 'gamename' ] + ".hi", "r" )
bindata = f.read()
f.close()
req [ '_bindata' ] = bindata
req [ '_binlen' ] = len ( bindata )
logging.info ( "%s - pulled existant hi file (len %s)" % ( req [ 'gamename' ], req [ '_binlen' ] ) )
except:
req [ '_bindata' ] = build_hi_bin ( req, 270 )
req [ '_binlen' ] = len ( req [ '_bindata' ] )
logging.info ( "%s - pulled generated zero-score hi file (len %s)" % ( req [ 'gamename' ], req [ '_binlen' ] ) )
'''
return
def get_json_tally ( req, raw=False ):
tally = _read_tally ( req )
for ent in tally [ 'scoreboard' ]:
prident = profile.fetch_pridfile_as_dict ( ent [ 'prid' ] )
if prident == None:
prident = profile.NULL_PROFILE
ent [ 'shortname' ] = prident [ 'shortname' ]
ent [ 'longname' ] = prident [ 'longname' ]
if '_general' in modulemap.gamemap [ req [ 'gamename' ] ]:
if 'dispunit' in modulemap.gamemap [ req [ 'gamename' ] ][ '_general' ]:
ent [ 'dispunit' ] = modulemap.gamemap [ req [ 'gamename' ] ] [ '_general' ][ 'dispunit' ]
del ent [ 'prid' ]
if raw:
req [ '_bindata' ] = tally
else:
req [ '_bindata' ] = json.dumps ( tally )
req [ '_binlen' ] = len ( req [ '_bindata' ] )
return
def get_html_tally ( req ):
tally = _read_tally ( req )
if '_backdate' in req:
if req [ '_backdate' ].isdigit():
timeframe = 'Specific Month: ' + req [ '_backdate' ]
else:
timeframe = 'All Time'
else:
timeframe = 'Current Month'
html = ''
html += "<h2>" + req [ 'gamename' ] + "</h2>\n"
html += "<h3>" + timeframe + "</h3>\n"
html += "<table>\n"
html += '<tr>\n'
html += ' <td style="padding:0 15px 0 15px;"><b>Rank</b></td>\n'
html += ' <td style="padding:0 15px 0 15px;"><b>Initial</b></td>\n'
html += ' <td style="padding:0 15px 0 15px;"><b>Name</b></td>\n'
html += ' <td style="padding:0 15px 0 15px;"><b>Score</b></td>\n'
html += ' <td style="padding:0 15px 0 15px;"><b>When</b></td>\n'
html += '</tr>\n'
i = 1
pridcache = dict()
lastprident = None
lastrun = 0 # for an RLE-like run count
for ent in tally [ 'scoreboard' ]:
prident = None
if ent [ 'prid' ]:
try:
prident = pridcache [ ent [ 'prid' ] ]
except:
prident = profile.fetch_pridfile_as_dict ( ent [ 'prid' ] )
pridcache [ ent [ 'prid' ] ] = prident
if prident == None:
prident = profile.NULL_PROFILE
tlocal = time.localtime ( ent [ 'time' ] )
tdisplay = time.strftime ( '%d-%b-%Y', tlocal )
# units
unit = ''
if '_general' in modulemap.gamemap [ req [ 'gamename' ] ]:
if 'dispunit' in modulemap.gamemap [ req [ 'gamename' ] ][ '_general' ]:
unit = ' ' + str ( modulemap.gamemap [ req [ 'gamename' ] ][ '_general' ][ 'dispunit' ] )
showrow = 1 # 0 no, 1 yes, 2 ellipses
if False: # True -> force to full length display
lastprident = None # if uncommented, forces full display .. no ellipses hidden entries
if lastprident == prident:
showrow = 0
lastrun += 1
else:
# if not first row, and the RLE is significant .. show an ellipses
if lastprident != None and lastrun > 0:
showrow = 2
else:
showrow = 1
# last and current are not the same, so RLE is back to zero
lastrun = 0
if showrow == 0:
pass # suppress
else:
if showrow == 2:
# so our last row is not same as this row, and last guy was not also the first
# row.. so show "..."
html += '<tr>\n'
html += ' <td style="padding:0 15px 0 15px;">' + "" + "</td>\n"
html += ' <td style="padding:0 15px 0 15px;">' + "" + "</td>\n"
html += ' <td style="padding:0 15px 0 15px;">' + "..." + "</td>\n"
html += ' <td style="padding:0 15px 0 15px;"></td>\n'
html += ' <td style="padding:0 15px 0 15px;"></td>\n'
html += '</tr>\n'
# showrow == 1, or showrow == 2 .. show this line
html += '<tr>\n'
html += ' <td style="padding:0 15px 0 15px;">' + str ( i ) + "</td>\n"
html += ' <td style="padding:0 15px 0 15px;">' + prident [ 'shortname' ] + "</td>\n"
html += ' <td style="padding:0 15px 0 15px;">' + prident [ 'longname' ] + "</td>\n"
if ent [ 'score' ] > 0:
html += ' <td style="padding:0 15px 0 15px;">' + str ( ent [ 'score' ] ) + unit + "</td>\n"
else:
html += ' <td style="padding:0 15px 0 15px;">-</td>\n'
if ent [ 'time' ] > 0:
html += ' <td style="padding:0 15px 0 15px;">' + tdisplay + "</td>\n"
else:
html += ' <td style="padding:0 15px 0 15px;"></td>\n'
html += '</tr>\n'
lastprident = prident
i += 1
html += "</table>\n"
html += "<p>%d unique profiles in the leaderboard</p>\n" % ( len ( pridcache ) )
req [ '_bindata' ] = html
req [ '_binlen' ] = len ( req [ '_bindata' ] )
return
def get_last_modify_epoch ( req ):
try:
filename = _basepath ( req ) + req [ 'gamename' ] + ".json"
return int ( os.path.getmtime ( filename ) )
except:
return 0
# ---------------
def _read_tally ( req ):
writepath = _basepath ( req )
try:
f = open ( writepath + req [ 'gamename' ] + ".json", "r" )
tallyfile = f.read()
f.close()
tally = json.loads ( tallyfile )
except:
logging.warning ( "%s - assuming new score file (all zeroes)" % ( req [ 'gamename' ] ) )
tally = dict()
tally [ 'hi' ] = 0
tally [ 'prid' ] = '_default_'
scoreboard = list()
for i in range ( SCOREBOARD_MAX ):
scoreboard.append ( { 'prid': '_default_', 'score': 0, 'time': 0 } )
tally [ 'scoreboard' ] = scoreboard
return tally
def parse_hi_bin ( req, bindata ):
return modulemap.gamemap [ req [ 'gamename' ] ][ 'module' ].parse_hi_bin ( req, bindata )
def build_hi_bin ( req, hiscore ):
return modulemap.gamemap [ req [ 'gamename' ] ][ 'module' ].build_hi_bin ( req, hiscore )
def done ( req ):
pass
|
gpl-2.0
| 5,346,319,761,198,488,000
| 33.453333
| 223
| 0.516254
| false
| 3.379545
| false
| false
| false
|
wpjesus/codematch
|
ietf/submit/models.py
|
1
|
3263
|
import re
import datetime
from django.db import models
from ietf.doc.models import Document
from ietf.person.models import Person
from ietf.group.models import Group
from ietf.name.models import DraftSubmissionStateName
from ietf.utils.accesstoken import generate_random_key, generate_access_token
def parse_email_line(line):
"""Split line on the form 'Some Name <email@example.com>'"""
m = re.match("([^<]+) <([^>]+)>$", line)
if m:
return dict(name=m.group(1), email=m.group(2))
else:
return dict(name=line, email="")
class Submission(models.Model):
state = models.ForeignKey(DraftSubmissionStateName)
remote_ip = models.CharField(max_length=100, blank=True)
access_key = models.CharField(max_length=255, default=generate_random_key)
auth_key = models.CharField(max_length=255, blank=True)
# draft metadata
name = models.CharField(max_length=255, db_index=True)
group = models.ForeignKey(Group, null=True, blank=True)
title = models.CharField(max_length=255, blank=True)
abstract = models.TextField(blank=True)
rev = models.CharField(max_length=3, blank=True)
pages = models.IntegerField(null=True, blank=True)
authors = models.TextField(blank=True, help_text="List of author names and emails, one author per line, e.g. \"John Doe <john@example.org>\".")
note = models.TextField(blank=True)
replaces = models.CharField(max_length=1000, blank=True)
first_two_pages = models.TextField(blank=True)
file_types = models.CharField(max_length=50, blank=True)
file_size = models.IntegerField(null=True, blank=True)
document_date = models.DateField(null=True, blank=True)
submission_date = models.DateField(default=datetime.date.today)
submitter = models.CharField(max_length=255, blank=True, help_text="Name and email of submitter, e.g. \"John Doe <john@example.org>\".")
idnits_message = models.TextField(blank=True)
def __unicode__(self):
return u"%s-%s" % (self.name, self.rev)
def authors_parsed(self):
res = []
for line in self.authors.replace("\r", "").split("\n"):
line = line.strip()
if line:
res.append(parse_email_line(line))
return res
def submitter_parsed(self):
return parse_email_line(self.submitter)
def access_token(self):
return generate_access_token(self.access_key)
def existing_document(self):
return Document.objects.filter(name=self.name).first()
class SubmissionEvent(models.Model):
submission = models.ForeignKey(Submission)
time = models.DateTimeField(default=datetime.datetime.now)
by = models.ForeignKey(Person, null=True, blank=True)
desc = models.TextField()
def __unicode__(self):
return u"%s %s by %s at %s" % (self.submission.name, self.desc, self.by.plain_name() if self.by else "(unknown)", self.time)
class Meta:
ordering = ("-time", "-id")
class Preapproval(models.Model):
"""Pre-approved draft submission name."""
name = models.CharField(max_length=255, db_index=True)
by = models.ForeignKey(Person)
time = models.DateTimeField(default=datetime.datetime.now)
def __unicode__(self):
return self.name
|
bsd-3-clause
| 8,802,075,955,322,732,000
| 35.662921
| 153
| 0.680968
| false
| 3.519957
| false
| false
| false
|
esben/setuptools_scm
|
setuptools_scm/version.py
|
1
|
4137
|
from __future__ import print_function
import datetime
import re
from .utils import trace
from pkg_resources import iter_entry_points
from distutils import log
try:
from pkg_resources import parse_version, SetuptoolsVersion
except ImportError as e:
parse_version = SetuptoolsVersion = None
def _warn_if_setuptools_outdated():
if parse_version is None:
log.warn("your setuptools is too old (<12)")
log.warn("setuptools_scm functionality is degraded")
def callable_or_entrypoint(group, callable_or_name):
trace('ep', (group, callable_or_name))
if isinstance(callable_or_name, str):
for ep in iter_entry_points(group, callable_or_name):
return ep.load()
else:
return callable_or_name
def tag_to_version(tag):
trace('tag', tag)
# lstrip the v because of py2/py3 differences in setuptools
# also required for old versions of setuptools
version = tag.rsplit('-', 1)[-1].lstrip('v')
if parse_version is None:
return version
version = parse_version(version)
trace('version', repr(version))
if isinstance(version, SetuptoolsVersion):
return version
def tags_to_versions(tags):
versions = map(tag_to_version, tags)
return [v for v in versions if v is not None]
class ScmVersion(object):
def __init__(self, tag_version,
distance=None, node=None, dirty=False,
**kw):
self.tag = tag_version
if dirty and distance is None:
distance = 0
self.distance = distance
self.node = node
self.time = datetime.datetime.now()
self.extra = kw
self.dirty = dirty
@property
def exact(self):
return self.distance is None
def __repr__(self):
return self.format_with(
'<ScmVersion {tag} d={distance}'
' n={node} d={dirty} x={extra}>')
def format_with(self, fmt):
return fmt.format(
time=self.time,
tag=self.tag, distance=self.distance,
node=self.node, dirty=self.dirty, extra=self.extra)
def format_choice(self, clean_format, dirty_format):
return self.format_with(dirty_format if self.dirty else clean_format)
def meta(tag, distance=None, dirty=False, node=None, **kw):
if SetuptoolsVersion is None or not isinstance(tag, SetuptoolsVersion):
tag = tag_to_version(tag)
trace('version', tag)
assert tag is not None, 'cant parse version %s' % tag
return ScmVersion(tag, distance, node, dirty, **kw)
def guess_next_version(tag_version, distance):
version = str(tag_version)
if '.dev' in version:
prefix, tail = version.rsplit('.dev', 1)
assert tail == '0', 'own dev numbers are unsupported'
return '%s.dev%s' % (prefix, distance)
else:
prefix, tail = re.match('(.*?)(\d+)$', version).groups()
return '%s%d.dev%s' % (prefix, int(tail) + 1, distance)
def guess_next_dev_version(version):
if version.exact:
return version.format_with("{tag}")
else:
return guess_next_version(version.tag, version.distance)
def get_local_node_and_date(version):
if version.exact:
return version.format_choice("", "+d{time:%Y%m%d}")
else:
return version.format_choice("+n{node}", "+n{node}.d{time:%Y%m%d}")
def get_local_dirty_tag(version):
return version.format_choice('', '+dirty')
def postrelease_version(version):
if version.exact:
return version.format_with('{tag}')
else:
return version.format_with('{tag}.post{distance}')
def format_version(version, **config):
trace('scm version', version)
trace('config', config)
version_scheme = callable_or_entrypoint(
'setuptools_scm.version_scheme', config['version_scheme'])
local_scheme = callable_or_entrypoint(
'setuptools_scm.local_scheme', config['local_scheme'])
main_version = version_scheme(version)
trace('version', main_version)
local_version = local_scheme(version)
trace('local_version', local_version)
return version_scheme(version) + local_scheme(version)
|
mit
| 6,615,941,135,986,023,000
| 29.19708
| 77
| 0.639594
| false
| 3.723672
| false
| false
| false
|
firtek/flap.py
|
flap.py
|
1
|
4358
|
#!/usr/bin/env python3
from copy import copy
import pyglet
from pyglet import gl
import settings
from sprites import Bird, Background, Floor, Pipe
from utils import get_sprite, check_collision
def main(callback=None):
#global score set to -1 because on first pipe score is increased
global score
score = -1
# Initialize window
window = pyglet.window.Window(width=settings.window_width * settings.scale,
height=settings.window_height * settings.scale,
resizable=False)
window.clear()
scoreLabel = pyglet.text.Label("0", font_size=40, x=window.width//2, y=window.height, anchor_x='center', anchor_y='top')
# To pass to the callback
def click():
window.dispatch_event('on_mouse_press')
# Set up sprites
bird = Bird(window=window)
background = Background()
floor = Floor()
pipes = []
tap_to_start = get_sprite('tap.png')
gameover = get_sprite('gameover.png')
# Set up game state, which indicates whether the game has started and how long
# we have to wait until the next pipe appears.
class GameState(object):
def __init__(self):
self.reset()
def reset(self):
self.started = False
self.t_to_next_pipe = 2
# reset score label
scoreLabel._set_text("0")
state = GameState()
def update(dt):
global score
if not state.started:
return
if bird.alive:
state.t_to_next_pipe -= dt
if state.t_to_next_pipe < 0:
pipe = Pipe(space=75 * settings.scale, window=window)
pipes.append(pipe)
state.t_to_next_pipe += 2
# update score -- problem is for the first one
score += 1
# directly setting text on
scoreLabel._set_text(str(score))
for pipe in copy(pipes):
if not pipe.visible:
pipes.remove(pipe)
# Move everything
background.update(dt)
for pipe in pipes:
pipe.update(dt)
floor.update(dt)
# Check for collisions
collision = check_collision(bird, floor) or any([check_collision(bird, pipe) for pipe in pipes])
if collision or bird.y > window.height:
bird.die()
if not bird.dead:
bird.update(dt)
if bird.dying and bird.y < -100:
bird.stop()
# reset the score on death
score = -1
# function to be used in key & mouse events
def still_playing():
if bird.alive:
bird.flap()
elif not state.started:
state.started = True
bird.start()
bird.flap()
elif bird.dead:
bird.reset()
pipes.clear()
state.reset()
@window.event
def on_mouse_press(*args):
still_playing()
@window.event
def on_key_press(*args):
still_playing()
@window.event
def on_draw():
window.clear()
background.blit()
for pipe in pipes:
pipe.blit()
floor.blit()
bird.blit()
if not state.started:
tap_to_start.blit(0.5 * (window.width - tap_to_start.width * 0.37), 0.43 * window.height)
if bird.dying or bird.dead:
gameover.blit(0.5 * (window.width - gameover.width), 0.5 * window.height)
if callback is not None:
import numpy as np
buf = (gl.GLubyte * (3 * window.width * window.height))(0)
gl.glReadPixels(0, 0, window.width, window.height,
gl.GL_RGB,
gl.GL_UNSIGNED_BYTE, buf)
array = np.frombuffer(buf, dtype='<u1')
array = array.reshape(window.height, window.width, 3)
array = array[::settings.scale, ::settings.scale]
callback(array, click, alive=bird.alive)
# draw score
scoreLabel.draw()
gl.glEnable(gl.GL_BLEND)
gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA)
pyglet.clock.schedule_interval(update, 0.01)
pyglet.app.run()
if __name__ == "__main__":
main()
|
bsd-2-clause
| 8,000,193,271,814,344,000
| 24.786982
| 124
| 0.539468
| false
| 3.969035
| false
| false
| false
|
mauriceling/dose
|
dose/copads/statisticsdistribution.py
|
1
|
134396
|
"""
Classes for Various Statistical Distributions.
References:
- Regress+ A compendium of common probability distributions (version 2.3)
by Michael P. McLaughlin (mpmcl@mitre.org)
http://www.causascientia.org/math_stat/Dists/Compendium.pdf
- Hand-book on statistical distributions for experimentalists
Internal report SUF-PFY/96-01. University of Stockholms
by Christian Walck (walck@physto.se)
Distributions:
- BetaDistribution(location, scale, p, q)
- PowerFunctionDistribution(shape)
- BinomialDistribution(success, trial)
- BernoulliDistribution(success)
- BradfordDistribution
- BurrDistribution
- CauchyDistribution(location=0.0, scale=1.0)
- LorentzDistribution (alias of CauchyDistribution)
- ChiDistribution
- HalfNormalDistribution(location, scale)
- MaxwellDistribution(scale)
- RayleighDistribution(scale)
- CosineDistribution(location=0.0, scale=1.0)
- DoubleGammaDistribution
- DoubleWeibullDistribution
- ExponentialDistribution(location=0.0, scale=1.0)
- NegativeExponentialDistribution (alias of ExponentialDistribution)
- ExtremeLBDistribution
- FDistribution
- FiskDistribution
- LogLogisticDistribution (alias of FiskDistribution)
- FoldedNormalDistribution
- GammaDistribution
- ChiSquareDistribution(df)
- ErlangDistribution(shape)
- FurryDistribution (alias of GammaDistribution)
- GenLogisticDistribution
- GeometricDistribution(success=0.5)
- GumbelDistribution(location, scale)
- FisherTippettDistribution (alias of GumbelDistribution)
- GompertzDistribution (alias of GumbelDistribution)
- LogWeibullDistribution (alias of GumbelDistribution)
- HyperbolicSecantDistribution
- HypergeometricDistribution
- InverseNormalDistribution
- WaldDistribution (alias of InverseNormalDistribution)
- LaplaceDistribution
- BilateralExponentialDistribution (alias of LaplaceDistribution)
- DoubleExponentialDistribution (alias of LaplaceDistribution)
- LogarithmicDistribution(shape)
- LogisticDistribution
- SechSquaredDistribution (alias of LogisticDistribution)
- LogNormalDistribution
- AntiLogNormalDistribution (alias of LogNormalDistribution)
- CobbDouglasDistribution (alias of LogNormalDistribution)
- NakagamiDistribution
- NegativeBinomialDistribution(success, target)
- PascalDistribution(success, target)
- PolyaDistribution (alias of NegativeBinomialDistribution)
- NormalDistribution()
- ParetoDistribution(location=1.0, shape=1.0)
- PoissonDistribution(expectation)
- RademacherDistribution()
- ReciprocalDistribution
- SemicircularDistribution(location=0.0, scale=1.0)
- TDistribution(location=0.0, scale=1.0, shape=2)
- TriangularDistribution
- UniformDistribution(location, scale)
- RectangularDistribution (alias of UniformDistribution)
- WeibullDistribution
- FrechetDistribution (alias of WeibullDistribution)
Copyright (c) Maurice H.T. Ling <mauriceling@acm.org>
Date created: 17th August 2005
"""
import math
import random
from .copadsexceptions import DistributionParameterError
from .copadsexceptions import DistributionFunctionError
from .copadsexceptions import NormalDistributionTypeError
from . import nrpy
from . import constants
class Distribution:
"""
Abstract class for all statistical distributions.
Due to the large variations of parameters for each distribution, it is
unlikely to be able to standardize a parameter list for each method that
is meaningful for all distributions. Instead, the parameters to construct
each distribution is to be given as keyword arguments.
@see: Ling, MHT. 2009. Compendium of Distributions, I: Beta, Binomial, Chi-
Square, F, Gamma, Geometric, Poisson, Student's t, and Uniform. The Python
Papers Source Codes 1:4
"""
def __init__(self, **parameters):
"""
Constructor method. The parameters are used to construct the
probability distribution.
"""
raise NotImplementedError
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability. CDF is
also known as density function.
"""
raise NotImplementedError
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution.
"""
raise NotImplementedError
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis.
"""
raise NotImplementedError
def mean(self):
"""
Gives the arithmetic mean of the sample.
"""
raise NotImplementedError
def mode(self):
"""
Gives the mode of the sample, if closed-form is available.
"""
raise NotImplementedError
def kurtosis(self):
"""
Gives the kurtosis of the sample.
"""
raise NotImplementedError
def skew(self):
"""
Gives the skew of the sample.
"""
raise NotImplementedError
def variance(self):
"""
Gives the variance of the sample.
"""
raise NotImplementedError
# ----------------------------------------------------------
# Tested Distributions
# ----------------------------------------------------------
class BetaDistribution(Distribution):
"""
Class for Beta Distribution.
@see: Ling, MHT. 2009. Compendium of Distributions, I: Beta, Binomial, Chi-
Square, F, Gamma, Geometric, Poisson, Student's t, and Uniform. The Python
Papers Source Codes 1:4
@status: Tested method
@since: version 0.2
"""
def __init__(self, location, scale, p, q):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param location:
@param scale: upper bound
@param p: shape parameter. Although no upper bound but seldom exceed 10.
@param q: shape parameter. Although no upper bound but seldom exceed 10.
"""
self.location = float(location)
self.scale = float(scale)
self.p = float(p)
self.q = float(q)
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability.
"""
return nrpy.betai(self.p, self.q, (x - self.location)/
(self.scale - self.location))
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability
for particular value of x, or the area under probability
distribution from x-h to x+h for continuous distribution.
"""
n = (self.scale - self.location) ** (self.p + self.q - 1)
n = nrpy.gammln(self.p) * nrpy.gammln(self.q) * n
n = nrpy.gammln(self.p + self.q) / n
p = (x - self.location) ** (self.p - 1)
q = (self.scale - x) ** (self.q - 1)
return n * p * q
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability
value and returns the corresponding value on the x-axis.
"""
cprob = self.CDF(start)
if probability < cprob:
return (start, cprob)
while probability > cprob:
start = start + step
cprob = self.CDF(start)
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
n = (self.location * self.q) + (self.scale * self.p)
return n / (self.p + self.q)
def mode(self):
"""Gives the mode of the sample."""
n = (self.location * (self.q - 1)) + (self.scale * \
(self.p - 1))
return n / (self.p + self.q - 2)
def kurtosis(self):
"""Gives the kurtosis of the sample."""
n = (self.p ** 2) * (self.q + 2) + \
(2 * (self.q ** 2)) + \
((self.p * self.q) * (self.q - 2))
n = n * (self.p + self.q + 1)
d = self.p * self.q * (self.p + self.q + 2) * \
(self.p + self.q + 3)
return 3 * ((n / d) - 1)
def skew(self):
"""Gives the skew of the sample."""
d = (self.p + self.q) ** 3
d = d * (self.p + self.q + 1) * (self.p + self.q + 2)
e = ((self.p + self.q) ** 2) * (self.p + self.q + 1)
e = (self.p * self.q) / e
e = e ** 1.5
return ((2 * self.p * self.q) * (self.q - self.q)) / (d * e)
def variance(self):
"""Gives the variance of the sample."""
n = self.p * self.q * ((self.scale - self.location) ** 2)
d = (self.p + self.q + 1) * ((self.p + self.q) ** 2)
return n / d
def moment(self, r):
"""Gives the r-th moment of the sample."""
return nrpy.beta(self.p + r,
self.q)/nrpy.beta(self.p, self.q)
def random(self):
"""Gives a random number based on the distribution."""
return random.betavariate(self.p, self.q)
class BinomialDistribution(Distribution):
"""
Class for Binomial Distribution.
@see: Ling, MHT. 2009. Compendium of Distributions, I: Beta, Binomial, Chi-
Square, F, Gamma, Geometric, Poisson, Student's t, and Uniform. The Python
Papers Source Codes 1:4
@status: Tested method
@since: version 0.2
"""
def __init__(self, success=0.5, trial=1000):
"""
Constructor method. The parameters are used to construct
the probability distribution.
@param success: probability of success; 0 <= success <= 1
@param trial: number of Bernoulli trials
"""
self.success = float(success)
self.trial = int(trial)
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability.
"""
return nrpy.cdf_binomial(x, self.trial, self.success)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution.
"""
x = int(x)
return nrpy.bico(self.trial, x) * \
(self.success ** x) * \
((1 - self.success) ** (self.trial - x))
def inverseCDF(self, probability, start=0, step=1):
"""
It does the reverse of CDF() method, it takes a probability
value and returns the corresponding value on the x-axis.
"""
cprob = self.CDF(start)
if probability < cprob:
return (start, cprob)
while probability > cprob:
start = start + step
cprob = self.CDF(start)
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.success * self.trial
def mode(self):
"""Gives the mode of the sample."""
return int(self.success * (self.trial + 1))
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return (1 - ((6 * self.success * (1 - self.success))) /
(self.trial * self.success * (1 - self.success)))
def skew(self):
"""Gives the skew of the sample."""
return (1 - self.success - self.success)/ \
((self.trial * self.success * (1 - self.success)) ** 0.5)
def variance(self):
"""Gives the variance of the sample."""
return self.mean() * (1 - self.success)
class CauchyDistribution(Distribution):
"""
Class for Cauchy Distribution.
@see: Chen, KFQ, Ling, MHT. 2013. COPADS III (Compendium of
Distributions II): Cauchy, Cosine, Exponential, Hypergeometric,
Logarithmic, Semicircular, Triangular, and Weibull. The Python
Papers Source Codes 5: 2.
@status: Tested method
@since: version 0.4
"""
def __init__(self, location=0.0, scale=1.0):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param location: the mean; default = 0.0
@param scale: spread of the distribution, S{lambda}; default = 1.0
"""
self.location = location
self.scale = scale
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
return 0.5 + 1 / PI * math.atan((x - self.location) / self.scale)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return 1 / (PI * self.scale * \
(1 + (((x - self.location) / self.scale) ** 2)))
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
raise DistributionFunctionError('Mean for Cauchy Distribution is \
undefined')
def mode(self):
"""Gives the mode of the sample."""
return self.location
def median(self):
"""Gives the median of the sample."""
return self.location
def quantile1(self):
"""Gives the 1st quantile of the sample."""
return self.location - self.scale
def quantile3(self):
"""Gives the 3rd quantile of the sample."""
return self.location + self.scale
def qmode(self):
"""Gives the quantile of the mode of the sample."""
return 0.5
def random(self, seed):
"""Gives a random number based on the distribution."""
while 1:
seed = self.loaction + (self.scale * math.tan(PI * (seed - 0.5)))
yield seed
class CosineDistribution(Distribution):
"""
Cosine distribution is sometimes used as a simple approximation to
Normal distribution.
@see: Chen, KFQ, Ling, MHT. 2013. COPADS III (Compendium of
Distributions II): Cauchy, Cosine, Exponential, Hypergeometric,
Logarithmic, Semicircular, Triangular, and Weibull. The Python
Papers Source Codes 5: 2.
@status: Tested method
@since: version 0.4
"""
def __init__(self, location=0.0, scale=1.0):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param location: the mean; default = 0.0
@param scale: spread of the distribution, S{lambda}; default = 1.0
"""
self.location = location
self.scale = scale
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
n = PI + (x - self.location) / self.scale + \
math.sin((x - self.location) / self.scale)
return n / PI2
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return (1 / (PI2 * self.scale)) * \
(1 + math.cos((x - self.location) / self.scale))
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.location
def mode(self):
"""Gives the mode of the sample."""
return self.location
def median(self):
"""Gives the median of the sample."""
return self.location
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return -0.5938
def skew(self):
"""Gives the skew of the sample."""
return 0.0
def variance(self):
"""Gives the variance of the sample."""
return (((PI * PI)/3) - 2) * (self.scale ** 2)
def quantile1(self):
"""Gives the 1st quantile of the sample."""
return self.location - (0.8317 * self.scale)
def quantile3(self):
"""Gives the 13rd quantile of the sample."""
return self.location + (0.8317 * self.scale)
def qmean(self):
"""Gives the quantile of the arithmetic mean of the sample."""
return 0.5
def qmode(self):
"""Gives the quantile of the mode of the sample."""
return 0.5
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
class ExponentialDistribution(Distribution):
"""
Exponential distribution is the continuous version of Geometric
distribution. It is also a special case of Gamma distribution where
shape = 1
@see: Chen, KFQ, Ling, MHT. 2013. COPADS III (Compendium of
Distributions II): Cauchy, Cosine, Exponential, Hypergeometric,
Logarithmic, Semicircular, Triangular, and Weibull. The Python
Papers Source Codes 5: 2.
@status: Tested method
@since: version 0.4
"""
def __init__(self, location=0.0, scale=1.0):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param location: position of the distribution, default = 0.0
@param scale: spread of the distribution, S{lambda}; default = 1.0"""
self.location = location
self.scale = scale
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
return 1 - math.exp((self.location - x) / self.scale)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return (1/self.scale) * math.exp((self.location - x)/self.scale)
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.location + self.scale
def mode(self):
"""Gives the mode of the sample."""
return self.location
def median(self):
"""Gives the median of the sample."""
return self.location + (self.scale * math.log10(2))
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return 6.0
def skew(self):
"""Gives the skew of the sample."""
return 2.0
def variance(self):
"""Gives the variance of the sample."""
return self.scale * self.scale
def quantile1(self):
"""Gives the 1st quantile of the sample."""
return self.location + (self.scale * math.log10(1.333))
def quantile3(self):
"""Gives the 3rd quantile of the sample."""
return self.location + (self.scale * math.log10(4))
def qmean(self):
"""Gives the quantile of the arithmetic mean of the sample."""
return 0.6321
def qmode(self):
"""Gives the quantile of the mode of the sample."""
return 0.0
def random(self):
"""Gives a random number based on the distribution."""
return random.expovariate(1/self.location)
class FDistribution(Distribution):
"""
Class for F Distribution.
@see: Ling, MHT. 2009. Compendium of Distributions, I: Beta, Binomial, Chi-
Square, F, Gamma, Geometric, Poisson, Student's t, and Uniform. The Python
Papers Source Codes 1:4
@status: Tested method
@since: version 0.2
"""
def __init__(self, df1=1, df2=1):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param df1: degrees of freedom for numerator
@param df2: degrees of freedom for denorminator
"""
self.df1 = float(df1)
self.df2 = float(df2)
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability.
"""
sub_x = (self.df1 * x) / (self.df1 * x + self.df2)
return nrpy.betai(self.df1 / 2.0, self.df2 / 2.0, sub_x)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability
for particular value of x, or the area under probability
distribution from x-h to x+h for continuous distribution.
"""
x = float(x)
n1 = ((x * self.df1) ** self.df1) * (self.df2 ** self.df2)
n2 = (x * self.df1 + self.df2) ** (self.df1 + self.df2)
d = x * nrpy.beta(self.df1 / 2.0, self.df2 / 2.0)
return math.sqrt(n1 / n2) / d
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
the corresponding value on the x-axis.
"""
cprob = self.CDF(start)
if probability < cprob:
return (start, cprob)
while probability > cprob:
start = start + step
cprob = self.CDF(start)
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return float(self.df2 / (self.df2 - 2))
class GammaDistribution(Distribution):
"""
Class for Gamma Distribution.
@see: Ling, MHT. 2009. Compendium of Distributions, I: Beta, Binomial, Chi-
Square, F, Gamma, Geometric, Poisson, Student's t, and Uniform. The Python
Papers Source Codes 1:4
@status: Tested method
@since: version 0.2
"""
def __init__(self, location, scale, shape):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param location:
@param scale:
@param shape:"""
self.location = float(location)
self.scale = float(scale)
self.shape = float(shape)
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability.
"""
return nrpy.gammp(self.shape, (x - self.location) / self.scale)
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
the corresponding value on the x-axis.
"""
cprob = self.CDF(start)
if probability < cprob:
return (start, cprob)
while probability > cprob:
start = start + step
cprob = self.CDF(start)
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.location + (self.scale * self.shape)
def mode(self):
"""Gives the mode of the sample."""
return self.location + (self.scale * (self.shape - 1))
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return 6 / self.shape
def skew(self):
"""Gives the skew of the sample."""
return 2 / math.sqrt(self.shape)
def variance(self):
"""Gives the variance of the sample."""
return self.scale * self.scale * self.shape
def qmean(self):
"""Gives the quantile of the arithmetic mean of the sample."""
return nrpy.gammp(self.shape, self.shape)
def qmode(self):
"""Gives the quantile of the mode of the sample."""
return nrpy.gammp(self.shape, self.shape - 1)
def ErlangDistribution(location, scale, shape):
"""
Erlang distribution is an alias of Gamma distribution where the shape
parameter is an integer.
@param location:
@param scale:
@param shape:
@status: Tested method
@since: version 0.2
"""
return GammaDistribution(location, scale, int(shape))
def FurryDistribution(location, scale, shape):
"""
Furry distribution is an alias of Gamma distribution.
@param location:
@param scale:
@param shape:
@status: Tested method
@since: version 0.2
"""
return GammaDistribution(location, scale, shape)
class ChiSquareDistribution(GammaDistribution):
"""
Chi-square distribution is a special case of Gamma distribution where
location = 0, scale = 2 and shape is twice that of the degrees of freedom.
@see: Ling, MHT. 2009. Compendium of Distributions, I: Beta, Binomial, Chi-
Square, F, Gamma, Geometric, Poisson, Student's t, and Uniform. The Python
Papers Source Codes 1:4
@status: Tested method
@since: version 0.2
"""
def __init__(self, df=2):
"""
Constructor method. The parameters are used to construct
the probability distribution.
@param df: degrees of freedom"""
GammaDistribution.__init__(self, 0, 2, float(df) / 2.0)
class GeometricDistribution(Distribution):
"""
Geometric distribution is the discrete version of Exponential
distribution.
@see: Ling, MHT. 2009. Compendium of Distributions, I: Beta, Binomial, Chi-
Square, F, Gamma, Geometric, Poisson, Student's t, and Uniform. The Python
Papers Source Codes 1:4
@status: Tested method
@since: version 0.2
"""
def __init__(self, success=0.5):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param success: probability of success; 0 <= success <= 1;
default = 0.5
"""
self.prob = float(success)
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability.
"""
total = self.PDF(1)
for i in range(2, int(x) + 1):
total += self.PDF(i)
return total
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability
for particular value of x, or the area under probability
distribution from x-h to x+h for continuous distribution.
"""
return self.prob * ((1 - self.prob) ** (x - 1))
def inverseCDF(self, probability, start=1, step=1):
"""
It does the reverse of CDF() method, it takes a probability value and
the corresponding value on the x-axis.
"""
cprob = self.CDF(start)
if probability < cprob:
return (start, cprob)
while probability > cprob:
start = start + step
cprob = self.CDF(start)
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return 1/self.prob
def mode(self):
"""Gives the mode of the sample."""
return 1.0
def variance(self):
"""Gives the variance of the sample."""
return (1 - self.prob) / (self.prob ** 2)
class HypergeometricDistribution(Distribution):
"""
Class for Hypergeometric distribution
@see: Chen, KFQ, Ling, MHT. 2013. COPADS III (Compendium of
Distributions II): Cauchy, Cosine, Exponential, Hypergeometric,
Logarithmic, Semicircular, Triangular, and Weibull. The Python
Papers Source Codes 5: 2.
@status: Tested method
@since: version 0.4
"""
def __init__(self, sample_size,
population_size=100,
population_success=50):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param sample_size: sample size (not more than population size)
@type sample_size: integer
@param population_size: population size; default = 100
@type population_size: integer
@param population_success: number of successes in the population
(cannot be more than population size); default = 10
@type population_success: integer"""
if population_success > population_size:
raise AttributeError('population_success cannot be more \
than population_size')
elif sample_size > population_size:
raise AttributeError('sample_size cannot be more \
than population_size')
else:
self.psize = int(population_size)
self.psuccess = int(population_success)
self.ssize = int(sample_size)
def CDF(self, sample_success):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value (sample_success, an integer that is not more than sample
size) on the x-axis where y-axis is the probability."""
if sample_success > self.ssize:
raise AttributeError('sample_success cannot be more \
than sample_size')
else:
return sum([self.PDF(n) for n in range(1, sample_success+1)])
def PDF(self, sample_success):
"""
Partial Distribution Function, which gives the probability for the
particular value of x (sample_success, an integer that is not more
than sample size), or the area under probability distribution from
x-h to x+h for continuous distribution."""
if sample_success > self.ssize:
raise AttributeError('sample_success cannot be more \
than sample_size')
else:
sample_success = int(sample_success)
numerator = nrpy.bico(self.psuccess, sample_success)
numerator = numerator * nrpy.bico(self.psize-self.psuccess,
self.ssize-sample_success)
denominator = nrpy.bico(self.psize, self.ssize)
return float(numerator)/float(denominator)
def inverseCDF(self, probability, start=1, step=1):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (int(start), cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.ssize * (float(self.psuccess)/float(self.psize))
def mode(self):
"""Gives the mode of the sample."""
temp = (self.ssize + 1) * (self.psuccess + 1)
return float(temp)/float(self.psize + 2)
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
def variance(self):
"""Gives the variance of the sample."""
t1 = float(self.psize-self.psuccess)/float(self.psize)
t2 = float(self.psize-self.ssize)/float(self.psize-1)
return self.mean() * t1 * t2
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def qmode(self):
# """Gives the quantile of the mode of the sample."""
# raise DistributionFunctionError
## def random(self, seed):
## """Gives a random number based on the distribution."""
## while 1:
## func
## yield seed
class LogarithmicDistribution(Distribution):
"""
Class for Logarithmic Distribution.
@see: Chen, KFQ, Ling, MHT. 2013. COPADS III (Compendium of
Distributions II): Cauchy, Cosine, Exponential, Hypergeometric,
Logarithmic, Semicircular, Triangular, and Weibull. The Python
Papers Source Codes 5: 2.
@status: Tested method
@since: version 0.4
"""
def __init__(self, shape):
"""Constructor method. The parameters are used to construct the
probability distribution.
@param shape: the spread of the distribution"""
self.shape = shape
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
summation = 0.0
for i in range(int(x)): summation = summation + self.PDF(i)
return summation
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return (-1 * (self.shape ** x)) / (math.log10(1 - self.shape) * x)
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return (-1 * self.shape) / ((1 - self.shape) * \
math.log10(1 - self.shape))
def mode(self):
"""Gives the mode of the sample."""
return 1.0
def variance(self):
"""Gives the variance of the sample."""
n = (-1 * self.shape) * (self.shape + math.log10(1 - self.shape))
d = ((1 - self.shape) ** 2) * math.log10(1 - self.shape) * \
math.log10(1 - self.shape)
return n / d
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
class NormalDistribution(Distribution):
"""
Class for standardized normal distribution (area under the curve = 1)
@see: Ling, MHT. 2009. Ten Z-Test Routines from Gopal Kanji's 100
Statistical Tests. The Python Papers Source Codes 1:5
@status: Tested method
@since: version 0.1
"""
def __init__(self):
self.mean = 0.0
self.stdev = 1.0
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability.
"""
return 1.0 - 0.5 * nrpy.erfcc(x/SQRT2)
def PDF(self, x):
"""
Calculates the density (probability) at x by the formula
f(x) = 1/(sqrt(2 pi) sigma) e^-((x^2/(2 sigma^2))
where mu is the mean of the distribution and sigma the standard
deviation.
@param x: probability at x
"""
return (1/(math.sqrt(PI2) * self.stdev)) * \
math.exp(-(x ** 2/(2 * self.stdev**2)))
def inverseCDF(self, probability, start = -10.0,
end = 10.0, error = 10e-8):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis, together with the
cumulative probability.
@param probability: probability under the curve from -infinity
@param start: lower boundary of calculation (default = -10)
@param end: upper boundary of calculation (default = 10)
@param error: error between the given and calculated probabilities
(default = 10e-8)
@return: Returns a tuple (start, cprob) where 'start' is the standard
deviation for the area under the curve from -infinity to the given
'probability' (+/- step). 'cprob' is the calculated area under the
curve from -infinity to the returned 'start'.
"""
# check for tolerance
if abs(self.CDF(start)-probability) < error:
return (start, self.CDF(start))
# case 1: lower than -10 standard deviations
if probability < self.CDF(start):
return self.inverseCDF(probability, start-5, start, error)
# case 2: between -10 to 10 standard deviations (bisection method)
if probability > self.CDF(start) and \
probability < self.CDF((start+end)/2):
return self.inverseCDF(probability, start, (start+end)/2, error)
if probability > self.CDF((start+end)/2) and \
probability < self.CDF(end):
return self.inverseCDF(probability, (start+end)/2, end, error)
# case 3: higher than 10 standard deviations
if probability > self.CDF(end):
return self.inverseCDF(probability, end, end+5, error)
def mean(self):
return self.mean
def mode(self):
return self.mean
def kurtosis(self):
return 0.0
def skew(self):
return 0.0
def variance(self):
return self.stdev * self.stdev
def random(self):
"""Gives a random number based on the distribution."""
return random.gauss(self.mean, self.stdev)
class PoissonDistribution(Distribution):
"""
Class for Poisson Distribution. Poisson distribution is binomial
distribution with very low success - that is, for rare events.
@see: Ling, MHT. 2009. Compendium of Distributions, I: Beta, Binomial, Chi-
Square, F, Gamma, Geometric, Poisson, Student's t, and Uniform. The Python
Papers Source Codes 1:4
@status: Tested method
@since: version 0.2
"""
def __init__(self, expectation=0.001):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param expectation: mean success probability; S{lambda}
"""
self._mean = float(expectation)
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability.
"""
return nrpy.cdf_poisson(x + 1, self._mean)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability
for particular value of x, or the area under probability
distribution from x-h to x+h for continuous distribution.
"""
return (math.exp(-1 ** self._mean) *
(self._mean ** x)) / nrpy.factrl(x)
def inverseCDF(self, probability, start=0.001, step=1):
"""
It does the reverse of CDF() method, it takes a probability value and
the corresponding value on the x-axis.
"""
cprob = self.CDF(start)
if probability < cprob:
return (start, cprob)
while probability > cprob:
start = start + step
cprob = self.CDF(start)
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self._mean
def mode(self):
"""Gives the mode of the sample."""
return int(self._mean)
def variance(self):
"""Gives the variance of the sample."""
return self._mean
class SemicircularDistribution(Distribution):
"""
Class for Semicircular Distribution.
@see: Chen, KFQ, Ling, MHT. 2013. COPADS III (Compendium of
Distributions II): Cauchy, Cosine, Exponential, Hypergeometric,
Logarithmic, Semicircular, Triangular, and Weibull. The Python
Papers Source Codes 5: 2.
@status: Tested method
@since: version 0.4
"""
def __init__(self, location=0.0, scale=1.0):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param location: mean of the distribution, default = 0.0
@param scale: spread of the distribution, default = 1.0"""
self.location = location
self.scale = scale
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
t = (x - self.location) / self.scale
return 0.5 + (1 / PI) * (t * math.sqrt(1 - (t ** 2)) + math.asin(t))
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return (2 / (self.scale * PI)) * \
math.sqrt(1 - ((x - self.location) / self.scale) ** 2)
def inverseCDF(self, probability, start=-10.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
if start < -1 * self.scale:
start = -1 * self.scale
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.location
def mode(self):
"""Gives the mode of the sample."""
return self.location
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return -1.0
def skew(self):
"""Gives the skew of the sample."""
return 0.0
def variance(self):
"""Gives the variance of the sample."""
return 0.25 * (self.scale ** 2)
def quantile1(self):
"""Gives the 1st quantile of the sample."""
return self.location - (0.404 * self.scale)
def quantile3(self):
"""Gives the 3rd quantile of the sample."""
return self.location + (0.404 * self.scale)
def qmean(self):
"""Gives the quantile of the arithmetic mean of the sample."""
return 0.5
def qmode(self):
"""Gives the quantile of the mode of the sample."""
return 0.5
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
class TDistribution(Distribution):
"""
Class for Student's t-distribution.
@see: Ling, MHT. 2009. Compendium of Distributions, I: Beta, Binomial, Chi-
Square, F, Gamma, Geometric, Poisson, Student's t, and Uniform. The Python
Papers Source Codes 1:4
@status: Tested method
@since: version 0.2
"""
def __init__(self, location=0.0, scale=1.0, shape=2):
"""Constructor method. The parameters are used to construct
the probability distribution.
@param location: default = 0.0
@param scale: default = 1.0
@param shape: degrees of freedom; default = 2"""
self._mean = float(location)
self.stdev = float(scale)
self.df = float(shape)
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability.
"""
t = (x - self._mean) / self.stdev
a = nrpy.betai(self.df / 2.0, 0.5, self.df / (self.df + (t * t)))
if t > 0:
return 1 - 0.5 * a
else:
return 0.5 * a
def PDF(self, x):
"""
Calculates the density (probability) at x with n-th degrees of freedom
as
M{f(x) = S{Gamma}((n+1)/2) /
(sqrt(n * pi) S{Gamma}(n/2)) (1 + x^2/n)^-((n+1)/2)}
for all real x. It has mean 0 (for n > 1) and variance n/(n-2)
(for n > 2)."""
a = nrpy.gammln((self.df + 1) / 2)
b = math.sqrt(math.pi * self.df) * nrpy.gammln(self.df / 2) * \
self.stdev
c = 1 + ((((x - self._mean) / self.stdev) ** 2) / self.df)
return (a / b) * (c ** ((-1 - self.df) / 2))
def inverseCDF(self, probability, start = -10.0,
end = 10.0, error = 10e-8):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis, together with the
cumulative probability.
@param probability: probability under the curve from -infinity
@param start: lower boundary of calculation (default = -10)
@param end: upper boundary of calculation (default = 10)
@param error: error between the given and calculated probabilities
(default = 10e-8)
@return: Returns a tuple (start, cprob) where 'start' is the standard
deviation for the area under the curve from -infinity to the given
'probability' (+/- step). 'cprob' is the calculated area under the
curve from -infinity to the returned 'start'.
"""
# check for tolerance
if abs(self.CDF(start)-probability) < error:
return (start, self.CDF(start))
# case 1: lower than -10 standard deviations
if probability < self.CDF(start):
return self.inverseCDF(probability, start-10, start, error)
# case 2: between -10 to 10 standard deviations (bisection method)
if probability > self.CDF(start) and \
probability < self.CDF((start+end)/2):
return self.inverseCDF(probability, start, (start+end)/2, error)
if probability > self.CDF((start+end)/2) and \
probability < self.CDF(end):
return self.inverseCDF(probability, (start+end)/2, end, error)
# case 3: higher than 10 standard deviations
if probability > self.CDF(end):
return self.inverseCDF(probability, end, end+10, error)
# cprob = self.CDF(start)
# if probability < cprob:
# return (start, cprob)
# while probability > cprob:
# start = start + step
# cprob = self.CDF(start)
# return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self._mean
def mode(self):
"""Gives the mode of the sample."""
return self._mean
def kurtosis(self):
"""Gives the kurtosis of the sample."""
a = ((self.df - 2) ** 2) * nrpy.gammln((self.df / 2) - 2)
return 3 * ((a / (4 * nrpy.gammln(self.df / 2))) - 1)
def skew(self):
"""Gives the skew of the sample."""
return 0.0
def variance(self):
"""Gives the variance of the sample."""
return (self.df / (self.df - 2)) * self.stdev * self.stdev
class TriangularDistribution(Distribution):
"""
Class for Triangular Distribution.
@see: Chen, KFQ, Ling, MHT. 2013. COPADS III (Compendium of
Distributions II): Cauchy, Cosine, Exponential, Hypergeometric,
Logarithmic, Semicircular, Triangular, and Weibull. The Python
Papers Source Codes 5: 2.
@status: Tested method
@since: version 0.4
"""
def __init__(self, upper_limit, peak, lower_limit=0):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param upper_limit: upper limit of the distrbution
@type upper_limit: float
@param peak: peak of the distrbution, which has to be between
the lower and upper limits of the distribution
@type peak: float
@param lower_limit: lower limit of the distrbution,
default = 0
@type lower_limit: float"""
self.lower_limit = lower_limit
if upper_limit < self.lower_limit:
raise AttributeError
else:
self.upper_limit = upper_limit
if peak > upper_limit:
raise AttributeError
if peak < lower_limit + 0.001:
raise AttributeError
else:
self.mode = peak
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
if x < self.lower_limit:
raise AttributeError
if x > self.mode:
raise AttributeError
else:
return (( x - self.lower_limit) ** 2) / \
((self.upper_limit - self.lower_limit) * \
(self.mode - self.lower_limit))
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution
from x-h to x+h for continuous distribution."""
if x < self.lower_limit:
raise AttributeError
if x > self.mode:
raise AttributeError
else:
return ((2 * (x - self.lower_limit)) / \
((self.upper_limit - self.lower_limit) * \
(self.mode - self.lower_limit)))
def inverseCDF(self, probability, start=0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
start = self.lower_limit
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return (float(self.lower_limit + self.upper_limit + self.mode) / 3)
def mode(self):
"""Gives the mode of the sample."""
return (self.mode)
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return ((-3)*(5 ** - 1))
def skew(self):
"""Gives the skew of the sample."""
return (math.sqrt(2) * (self.lower_limit + self.upper_limit - 2 * \
self.mode) * (2 * self.lower_limit - self.self.upper_limit - self.mode) \
* (self.lower_limit - 2 * self.upper_limit + self.mode)) \
/ (self.lower_limit ** 2 + self.upper_limit ** 2 + self.mode ** 2 - \
self.lower_limit * self.upper_limit + self.mode ** 2 - self.lower_limit * \
(self.upper_limit - self.mode))
def variance(self):
"""Gives the variance of the sample."""
return (self.lower_limit ** 2 + self.upper_limit ** 2 + self.mode ** 2\
- (self.lower_limit * self.upper_limit) - \
(self.lower_limit * self.mode) - (self.upper_limit * self.mode))\
*(18 ** -1)
def quantile1(self):
"""Gives the 1st quantile of the sample."""
if ((self.mode - self.lower_limit) * \
(self.upper_limit - self.lower_limit) ** -1) > 0.25:
return self.lower_limit + (0.5 * math.sqrt((self.upper_limit - \
self.lower_limit) * (self.mode - self.lower_limit)))
else:
return self.upper_limit - ((0.5) * math.sqrt (3 * (self.upper_limit -\
self.lower_limit) * (self.upper_limit - self.mode)))
def quantile3(self):
"""Gives the 3rd quantile of the sample."""
if ((self.mode - self.lower_limit) * \
(self.upper_limit - self.lower_limit) ** -1) > 0.75:
return self.lower_limit + (0.5 * math.sqrt(3 * (self.upper_limit - \
self.lower_limit) * (self.mode - self.lower_limit)))
else:
return self.upper_limit - ((0.5) * math.sqrt ((self.upper_limit -\
self.lower_limit) * (self.upper_limit - self.mode)))
def qmean(self):
"""Gives the quantile of the arithmetic mean of the sample."""
if self.mode > ((self.lower_limit + self.upper_limit) * 0.5):
return ((self.upper_limit + self.mode - 2 * self.lower_limit) ** 2)\
* (9 * (self.upper_limit - self.lower_limit) * (self.mode - \
self.lower_limit))
else:
return (self.lower_limit ** 2 + (5 * self.lower_limit * \
self.upper_limit) - (5 * (self.upper_limit ** 2)) - \
(7 * self.lower_limit * self.mode) + (5 * self. upper_limit * \
self.mode) + self.mode ** 2)
def qmode(self):
"""Gives the quantile of the mode of the sample."""
return (self.mode - self.lower_limit) * (self.upper_limit \
- self.lower_limit) ** - 1
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
class UniformDistribution(Distribution):
"""
Class for Uniform distribution.
@see: Ling, MHT. 2009. Compendium of Distributions, I: Beta, Binomial, Chi-
Square, F, Gamma, Geometric, Poisson, Student's t, and Uniform. The Python
Papers Source Codes 1:4
@status: Tested method
@since: version 0.2
"""
def __init__(self, location, scale):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param location:
@param scale:
"""
self.location = float(location)
self.scale = float(scale)
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability.
"""
return (x - self.location) / (self.scale - self.location)
def PDF(self):
"""
Partial Distribution Function, which gives the probability
for particular value of x, or the area under probability
distribution from x-h to x+h for continuous distribution.
"""
return 1.0 / (self.scale - self.location)
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
the corresponding value on the x-axis.
"""
cprob = self.CDF(start)
if probability < cprob:
return (start, cprob)
while probability > cprob:
start = start + step
cprob = self.CDF(start)
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return (self.location + self.scale) / 2.0
def median(self):
"""Gives the median of the sample."""
return (self.location + self.scale) / 2
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return -1.2
def skew(self):
"""Gives the skew of the sample."""
return 0.0
def variance(self):
"""Gives the variance of the sample."""
return ((self.scale - self.location) ** 2) / 12
def quantile1(self):
"""Gives the 1st quantile of the sample."""
return ((3 * self.location) + self.scale) / 4
def quantile3(self):
"""Gives the 3rd quantile of the sample."""
return (self.location + (3 * self.scale)) / 4
def qmean(self):
"""Gives the quantile of the arithmetic mean of the sample."""
return 0.5
def random(self, lower, upper):
"""Gives a random number based on the distribution."""
return random.uniform(lower, upper)
class WeiBullDistribution(Distribution):
"""
Class for Weibull distribution.
@see: Chen, KFQ, Ling, MHT. 2013. COPADS III (Compendium of
Distributions II): Cauchy, Cosine, Exponential, Hypergeometric,
Logarithmic, Semicircular, Triangular, and Weibull. The Python
Papers Source Codes 5: 2.
@status: Tested method
@since: version 0.4
"""
def __init__(self, location=1.0, scale=1.0):
"""Constructor method. The parameters are used to construct the
probability distribution.
@param location: position of the distribution, default = 1.0
@param scale: shape of the distribution, default = 1.0"""
self.location = location
self.scale = scale
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0
to a give x-value on the x-axis where y-axis is the probability."""
power = -1 * ((float(x) / self.location) ** self.scale)
return 1 - (math.e ** power)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under trobability distribution
from x-h to x+h for continuous distribution."""
if x < 0:
return 0
else:
power = -1 * ((float(x) / self.location) ** self.scale)
t3 = math.e ** power
t2 = (float(x) / self.location) ** (self.scale - 1)
t1 = self.scale / self.location
return t1 * t2 * t3
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
# def mean(self):
# """Gives the arithmetic mean of the sample."""
# return self.location * nrpy.gammln(1 + 1/self.scale)
def median(self):
"""Gives the median of the sample."""
return self.location * (math.log(2, math.e) ** (1/float(self.scale)))
def mode(self):
"""Gives the mode of the sample."""
if self.scale > 1:
t = ((self.scale - 1) / float(self.scale))
return self.location * (t ** (1/float(self.scale)))
else:
return 0
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
def random(self):
"""Gives a random number based on the distribution."""
return random.weibullvariate(self.scale, self.shape)
def FrechetDistribution(**parameters):
"""
Frechet distribution is an alias of Weibull distribution."""
return WeibullDistribution(**parameters)
# ----------------------------------------------------------
# Untested Distributions
# ----------------------------------------------------------
def AntiLogNormalDistribution(**parameters):
"""
Anti-Lognormal distribution is an alias of Lognormal distribution."""
return LogNormalDistribution(**parameters)
class BernoulliDistribution(Distribution):
"""
Bernoulli distribution is a special case of Binomial distribution where
where number of trials = 1
"""
def __init__(self, success):
"""Constructor method. The parameters are used to construct the
probability distribution.
@param success: probability of success; 0 <= success <= 1"""
self.distribution = BinomialDistribution(success, trial = 1)
def CDF(self, x):
"""Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
return self.distribution.CDF(x)
def PDF(self, x):
"""Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return self.distribution.PDF(x)
def inverseCDF(self, probability, start = 0, step = 1):
"""It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
return self.distribution.inverseCDF(probability, start, step)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.distribution.mean()
def mode(self):
"""Gives the mode of the sample."""
return self.distribution.mode()
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return self.distribution.kurtosis()
def skew(self):
"""Gives the skew of the sample."""
return self.distribution.skew()
def variance(self):
"""Gives the variance of the sample."""
return self.distribution.variance()
# def random(self):
# """Gives a random number based on the distribution."""
# return self.distribution.random()
def BilateralExponentialDistribution(**parameters):
"""
Bilateral Exponential distribution is an alias of Laplace distribution."""
return LaplaceDistribution(**parameters)
class BradfordDistribution(Distribution):
"""Class for Bradford distribution."""
def __init__(self, location, scale, shape):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param location:
@param scale: upper bound
@param shape:"""
self.location = location
self.scale = scale
self.shape = shape
self.k = math.log10(self.shape + 1)
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
r = ((self.shape * (x - self.location)) / (self.scale - self.location))
return math.log10(1 + r) / self.k
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution
from x-h to x+h for continuous distribution."""
r = (self.shape * (x - self.location)) + self.scale - self.location
return self.shape / (self.k * r)
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
r = self.shape * (self.scale - self.location)
r = r + (((self.shape + 1) * self.location - self.scale) * self.k)
return r / (self.shape * self.k)
def mode(self):
"""Gives the mode of the sample."""
return self.location
def kurtosis(self):
"""Gives the kurtosis of the sample."""
d = ((self.shape * (self.k - 2)) + (2 * self.k)) ** 2
d = 3 * self.shape * d
n = ((self.k * ((3 * self.k) - 16)) + 24)
n = (self.shape ** 3) * (self.k - 3) * n
n = n + ((self.k - 4) * (self.k - 3) * (12 * self.k * (self.k **2)))
n = n + (6 * self.k * (self.k **2)) * ((3 * self.k) - 14)
return (n + (12 * (self.k ** 3))) / d
def skew(self):
"""Gives the skew of the sample."""
r = 12 * (self.shape ** 2)
r = r - (9 * self.k * self.shape * (self.shape + 2))
r = r + ((2 * self.k * self.k) * ((self.shape * (self.shape + 3)) + 3))
d = self.shape * (((self.k - 2) * self.shape) + (2 * self.k))
d = math.sqrt(d)
d = d * ((3 * self.shape * (self.k - 2)) + (6 * self.k))
return r / d
def variance(self):
"""Gives the variance of the sample."""
r = (self.scale - self.location) ** 2
r = r * (self.shape * (self.k - 2) + (2 * self.k))
return r / (2 * self.shape * self.k * self.k)
def quantile1(self):
"""Gives the 1st quantile of the sample."""
r = (self.location * (self.shape + 1)) - self.scale
r = r + ((self.scale - self.location) * ((self.shape + 1)** 0.25))
return r / self.shape
def quantile3(self):
"""Gives the 3rd quantile of the sample."""
r = (self.location * (self.shape + 1)) - self.scale
r = r + ((self.scale - self.location) * ((self.shape + 1)** 0.75))
return r / self.shape
def qmean(self):
"""Gives the quantile of the arithmetic mean of the sample."""
r = math.log10(self.shape / math.log10(self.shape + 1))
return r / math.log10(self.shape + 1)
def qmode(self):
"""Gives the quantile of the mode of the sample."""
return 0.0
def random(self, seed):
"""Gives a random number based on the distribution."""
while 1:
r = self.location * (self.shape + 1) - self.scale
r = r + ((self.scale - self.location)*((self.shape + 1) ** seed))
seed = r / self.shape
yield seed
class BurrDistribution(Distribution):
"""
Burr distribution is the generalization of Fisk distribution. Burr
distribution with D = 1 becomes Fisk distribution.
"""
def __init__(self, location, scale, C, D):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param location:
@param scale:
@param C: shape
@param D: shape"""
self.location = location
self.scale = scale
self.C = C
self.D = D
self.k = (nrpy.gammln(self.D) * \
nrpy.gammln(1 - (2/self.C)) * \
nrpy.gammln((2/self.C) + self.D)) - \
((nrpy.gammln(1 - (1/self.C)) ** 2) * \
(nrpy.gammln((1/self.C) + self.D) ** 2))
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
return (1+(((x - self.location)/self.scale)**(-self.C)))**(-self.D)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution
from x-h to x+h for continuous distribution."""
r = (1+(((x - self.location)/self.scale)**(-self.C)))**(-self.D - 1)
r = r * ((self.C * self.D)/self.scale)
return r * (((x - self.location)/self.scale)**(-self.C - 1))
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
r = nrpy.gammln(1 - (1/self.C)) * nrpy.gammln((1/self.C) + self.D)
return self.location + ((r * self.scale) / nrpy.gammln(self.D))
def mode(self):
"""Gives the mode of the sample."""
if ((self.C * self.D) < 1): return self.location
else:
r = (((self.C * self.D)-1)/(self.C + 1)) ** (1/self.C)
return self.location + (self.scale * r)
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
def variance(self):
"""Gives the variance of the sample."""
return (self.k * (self.scale ** 2)) / (nrpy.gammln(self.D) ** 2)
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
def qmode(self):
"""Gives the quantile of the mode of the sample."""
if ((self.C * self.D) < 1): return 0.0
else:
return (1 + ((self.C+1)/((self.C*self.D) - 1))) ** (-1*self.D)
def random(self, seed):
"""Gives a random number based on the distribution."""
while 1:
r = ((1/(seed ** (1/self.D))) - 1) ** (-1/self.C)
seed = self.location + self.scale * r
yield seed
class ChiDistribution(Distribution):
"""Class for Chi distribution."""
# def __init__(self, **parameters):
# """Constructor method. The parameters are used to construct the
# probability distribution."""
# raise DistributionFunctionError
# def CDF(self, x):
# """
# Cummulative Distribution Function, which gives the cummulative
# probability (area under the probability curve) from -infinity or 0 to
# a give x-value on the x-axis where y-axis is the probability."""
# raise DistributionFunctionError
# def PDF(self, x):
# """
# Partial Distribution Function, which gives the probability for the
# particular value of x, or the area under probability distribution
# from x-h to x+h for continuous distribution."""
# raise DistributionFunctionError
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
# def mean(self):
# """Gives the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def mode(self):
# """Gives the mode of the sample."""
# raise DistributionFunctionError
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def qmode(self):
# """Gives the quantile of the mode of the sample."""
# raise DistributionFunctionError
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
def CobbDouglasDistribution(**parameters):
"""
Cobb-Douglas distribution is an alias of Lognormal distribution."""
return LogNormalDistribution(**parameters)
def DoubleExponentialDistribution(**parameters):
"""
Double Exponential distribution is an alias of Laplace distribution."""
return LaplaceDistribution(**parameters)
class DoubleGammaDistribution(Distribution):
"""
Double Gamma distribution is the signed version of Gamma distribution.
"""
def __init__(self, location, scale, shape):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param location:
@param scale:
@param shape:"""
self.location = location
self.scale = scale
self.shape = shape
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
r = nrpy.gammp(self.shape ,abs((x - self.location)/self.scale))
if x > self.location: return 0.5 + (0.5 * r)
else: return 0.5 - (0.5 * r)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution
from x-h to x+h for continuous distribution."""
r = math.exp(-1 * abs((x - self.location)/self.scale))
r = r * (abs((x - self.location)/self.scale) ** (self.shape -1))
return r / (2 * self.scale * nrpy.gammln(self.shape))
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.location
def skew(self):
"""Gives the skew of the sample."""
return 0.0
def variance(self):
"""Gives the variance of the sample."""
return self.shape * (self.shape + 1) * (self.scale ** 2)
def qmean(self):
"""Gives the quantile of the arithmetic mean of the sample."""
return 0.5
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
class DoubleWeibullDistribution(Distribution):
"""
Double Weibull distribution is the signed version of Weibull distribution.
"""
# def __init__(self, **parameters):
# """Constructor method. The parameters are used to construct the
# probability distribution."""
# raise DistributionFunctionError
# def CDF(self, x):
# """
# Cummulative Distribution Function, which gives the cummulative
# probability (area under the probability curve) from -infinity or 0 to
# a give x-value on the x-axis where y-axis is the probability."""
# raise DistributionFunctionError
# def PDF(self, x):
# """
# Partial Distribution Function, which gives the probability for the
# particular value of x, or the area under probability distribution
# from x-h to x+h for continuous distribution."""
# raise DistributionFunctionError
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
# def mean(self):
# """Gives the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def mode(self):
# """Gives the mode of the sample."""
# raise DistributionFunctionError
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def qmode(self):
# """Gives the quantile of the mode of the sample."""
# raise DistributionFunctionError
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
class ExtremeLBDistribution(Distribution):
"""Class for Extreme LB distribution."""
# def __init__(self, **parameters):
# """Constructor method. The parameters are used to construct the
# probability distribution."""
# raise DistributionFunctionError
# def CDF(self, x):
# """
# Cummulative Distribution Function, which gives the cummulative
# probability (area under the probability curve) from -infinity or 0 to
# a give x-value on the x-axis where y-axis is the probability."""
# raise DistributionFunctionError
# def PDF(self, x):
# """
# Partial Distribution Function, which gives the probability for the
# particular value of x, or the area under probability distribution
# from x-h to x+h for continuous distribution."""
# raise DistributionFunctionError
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
# def mean(self):
# """Gives the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def mode(self):
# """Gives the mode of the sample."""
# raise DistributionFunctionError
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def qmode(self):
# """Gives the quantile of the mode of the sample."""
# raise DistributionFunctionError
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
class FiskDistribution(Distribution):
"""Class for Fisk distribution."""
# def __init__(self, **parameters):
# """Constructor method. The parameters are used to construct the
# probability distribution."""
# raise DistributionFunctionError
# def CDF(self, x):
# """
# Cummulative Distribution Function, which gives the cummulative
# probability (area under the probability curve) from -infinity or 0 to
# a give x-value on the x-axis where y-axis is the probability."""
# raise DistributionFunctionError
# def PDF(self, x):
# """
# Partial Distribution Function, which gives the probability for the
# particular value of x, or the area under probability distribution
# from x-h to x+h for continuous distribution."""
# raise DistributionFunctionError
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
# def mean(self):
# """Gives the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def mode(self):
# """Gives the mode of the sample."""
# raise DistributionFunctionError
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def qmode(self):
# """Gives the quantile of the mode of the sample."""
# raise DistributionFunctionError
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
def FisherTippettDistribution(location, scale):
"""
Fisher-Tippett distribution is an alias of Gumbel distribution.
@param location: S{eta}
@param scale: S{theta}"""
return GumbelDistribution(location, scale)
class FoldedNormalDistribution(Distribution):
"""Class for Folded Normal distribution."""
# def __init__(self, **parameters):
# """Constructor method. The parameters are used to construct the
# probability distribution."""
# raise DistributionFunctionError
# def CDF(self, x):
# """
# Cummulative Distribution Function, which gives the cummulative
# probability (area under the probability curve) from -infinity or 0 to
# a give x-value on the x-axis where y-axis is the probability."""
# raise DistributionFunctionError
# def PDF(self, x):
# """
# Partial Distribution Function, which gives the probability for the
# particular value of x, or the area under probability distribution
# from x-h to x+h for continuous distribution."""
# raise DistributionFunctionError
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
# def mean(self):
# """Gives the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def mode(self):
# """Gives the mode of the sample."""
# raise DistributionFunctionError
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def qmode(self):
# """Gives the quantile of the mode of the sample."""
# raise DistributionFunctionError
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
class GenLogisticDistribution(Distribution):
"""
Generalized Logistic distribution is a generalization of Logistic
distribution. It becomes Logistic distribution when shape = 1
"""
# def __init__(self, **parameters):
# """Constructor method. The parameters are used to construct the
# probability distribution."""
# raise DistributionFunctionError
# def CDF(self, x):
# """
# Cummulative Distribution Function, which gives the cummulative
# probability (area under the probability curve) from -infinity or 0 to
# a give x-value on the x-axis where y-axis is the probability."""
# raise DistributionFunctionError
# def PDF(self, x):
# """
# Partial Distribution Function, which gives the probability for the
# particular value of x, or the area under probability distribution
# from x-h to x+h for continuous distribution."""
# raise DistributionFunctionError
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
# def mean(self):
# """Gives the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def mode(self):
# """Gives the mode of the sample."""
# raise DistributionFunctionError
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def qmode(self):
# """Gives the quantile of the mode of the sample."""
# raise DistributionFunctionError
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
def GompertzDistribution(location, scale):
"""
Gompertz distribution is an alias of Gumbel distribution.
@param location: S{eta}
@param scale: S{theta}"""
return GumbelDistribution(location, scale)
class GumbelDistribution(Distribution):
"""Class for Gumbel Distribution."""
def __init__(self, location, scale):
"""Constructor method. The parameters are used to construct the
probability distribution.
@param location: S{eta}
@param scale: S{theta}"""
self.location = location
self.scale = scale
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
return math.exp(-1 * math.exp((self.location - x) / self.scale))
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return (1/self.scale) * math.exp((self.location - x) / self.scale) * \
self.CDF(x)
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.location + (GAMMA * self.scale)
def mode(self):
"""Gives the mode of the sample."""
return self.location
def median(self):
"""Gives the median of the sample."""
return self.location - self.scale * math.log10(math.log10(2))
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return 2.4
def skew(self):
"""Gives the skew of the sample."""
return 1.1395
def variance(self):
"""Gives the variance of the sample."""
return 1.667 * ((PI * self.scale) ** 2)
def quantile1(self):
"""Gives the 1st quantile of the sample."""
return self.location - self.scale * math.log10(math.log10(4))
def quantile3(self):
"""Gives the 3rd quantile of the sample."""
return self.location - self.scale * math.log10(math.log10(1.333))
def qmean(self):
"""Gives the quantile of the arithmetic mean of the sample."""
return 0.5704
def qmode(self):
"""Gives the quantile of the mode of the sample."""
return 0.3679
def random(self, seed):
"""Gives a random number based on the distribution."""
while 1:
seed = self.location - \
(self.scale * math.log10(-1 * math.log10(seed)))
yield seed
class HalfNormalDistribution(Distribution):
"""
Half Normal distribution is a special case of Chi distribution where
shape (also degrees of freedom) = 1, and Folded Normal distribution
where location = 0
"""
def __init__(self, **parameters):
"""Constructor method. The parameters are used to construct the
probability distribution."""
try: self.distribution = ChiDistribution(location =
parameters['location'],
scale = parameters['scale'],
shape = 1)
except KeyError:
raise DistributionParameterError('Halfnormal distribution \
requires location and scale parameters')
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
return self.distribution.CDF(x)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return self.distribution.PDF(x)
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
return self.distribution.inverseCDF(probability, start, step)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.distribution.mean()
def mode(self):
"""Gives the mode of the sample."""
return self.distribution.mode()
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return self.distribution.kurtosis()
def skew(self):
"""Gives the skew of the sample."""
return self.distribution.skew()
def variance(self):
"""Gives the variance of the sample."""
return self.distribution.variance()
class HyperbolicSecantDistribution(Distribution):
"""Class for Hyperbolic Secant Distribution."""
def __init__(self, location, scale):
"""
Constructor method. The parameters are used to construct the
probability distribution.
@param location:
@param scale:"""
self.location = location
self.scale = scale
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
return (2 / PI) * \
(1 / math.tan(math.exp((x - self.location) / self.scale)))
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution
from x-h to x+h for continuous distribution."""
return (1 / math.cosh((x - self.location) / self.scale)) / \
(PI * math.scale)
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.location
def mode(self):
"""Gives the mode of the sample."""
return self.location
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return 2.0
def skew(self):
"""Gives the skew of the sample."""
return 0.0
def variance(self):
"""Gives the variance of the sample."""
return 0.25 * ((PI * self.scale) ** 2)
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
def qmean(self):
"""Gives the quantile of the arithmetic mean of the sample."""
return 0.5
def qmode(self):
"""Gives the quantile of the mode of the sample."""
return 0.5
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
class LaplaceDistribution(Distribution):
# def __init__(self, **parameters):
# """Constructor method. The parameters are used to construct the
# probability distribution."""
# raise DistributionFunctionError
# def CDF(self, x):
# """
# Cummulative Distribution Function, which gives the cummulative
# probability (area under the probability curve) from -infinity or 0 to
# a give x-value on the x-axis where y-axis is the probability."""
# raise DistributionFunctionError
# def PDF(self, x):
# """
# Partial Distribution Function, which gives the probability for the
# particular value of x, or the area under probability distribution
# from x-h to x+h for continuous distribution."""
# raise DistributionFunctionError
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
# def mean(self):
# """Gives the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def mode(self):
# """Gives the mode of the sample."""
# raise DistributionFunctionError
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def qmode(self):
# """Gives the quantile of the mode of the sample."""
# raise DistributionFunctionError
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
class LogisticDistribution(Distribution):
# def __init__(self, **parameters):
# """Constructor method. The parameters are used to construct the
# probability distribution."""
# raise DistributionFunctionError
# def CDF(self, x):
# """
# Cummulative Distribution Function, which gives the cummulative
# probability (area under the probability curve) from -infinity or 0 to
# a give x-value on the x-axis where y-axis is the probability."""
# raise DistributionFunctionError
# def PDF(self, x):
# """
# Partial Distribution Function, which gives the probability for the
# particular value of x, or the area under probability distribution
# from x-h to x+h for continuous distribution."""
# raise DistributionFunctionError
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
# def mean(self):
# """Gives the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def mode(self):
# """Gives the mode of the sample."""
# raise DistributionFunctionError
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def qmode(self):
# """Gives the quantile of the mode of the sample."""
# raise DistributionFunctionError
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
def LogLogisticDistribution(**parameters):
"""
Log-Logistic distribution is an alias of Fisk distribution."""
return FiskDistribution(**parameters)
class LogNormalDistribution(Distribution):
def __init__(self, a, b):
"""Constructor method. The parameters are used to construct the
probability distribution."""
self.location = a
self. scale = b
if (b ** 2) < 0:
raise AttributeError
# def CDF(self, x):
# """
# Cummulative Distribution Function, which gives the cummulative
# probability (area under the probability curve) from -infinity or 0
# to a give x-value on the x-axis where y-axis is the probability."""
# raise DistributionFunctionError
# def PDF(self, x):
# """
# Partial Distribution Function, which gives the probability for the
# particular value of x, or the area under probability distribution
# from x-h to x+h for continuous distribution."""
# raise DistributionFunctionError
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return math.exp((self.location + (self.scale ** 2) * self.location*(-1)))
# def mode(self):
# """Gives the mode of the sample."""
# raise DistributionFunctionError
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
def random(self):
"""Gives a random number based on the distribution."""
return random.lognormalvariate(self.location, self.scale)
def LogWeibullDistribution(location, scale):
"""
Log-Weibull distribution is an alias of Gumbel distribution.
@param location: S{eta}
@param scale: S{theta}"""
return GumbelDistribution(location, scale)
def LorentzDistribution(**parameters):
"""
Lorentz distribution is an alias of Cauchy distribution."""
return CauchyDistribution(**parameters)
class MaxwellDistribution(Distribution):
"""
Maxwell distribution is a special case of Chi distribution where
location = 0 and shape (degrees of freedom) = 3
"""
def __init__(self, scale):
"""
Constructor method.
@param scale:"""
self.distribution = ChiDistribution(0, scale, 3)
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
return self.distribution.CDF(x)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return self.distribution.PDF(x)
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
the corresponding value on the x-axis."""
return self.distribution.inverseCDF(probability, start, step)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.distribution.mean()
def mode(self):
"""Gives the mode of the sample."""
return self.distribution.mode()
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return self.distribution.kurtosis()
def skew(self):
"""Gives the skew of the sample."""
return self.distribution.skew()
def variance(self):
"""Gives the variance of the sample."""
return self.distribution.variance()
# def random(self):
# """Gives a random number based on the distribution."""
# return self.distribution.random()
class NakagamiDistribution(Distribution):
# def __init__(self, **parameters):
# """Constructor method. The parameters are used to construct the
# probability distribution."""
# raise DistributionFunctionError
# def CDF(self, x):
# """
# Cummulative Distribution Function, which gives the cummulative
# probability (area under the probability curve) from -infinity or 0 to
# a give x-value on the x-axis where y-axis is the probability."""
# raise DistributionFunctionError
# def PDF(self, x):
# """
# Partial Distribution Function, which gives the probability for the
# particular value of x, or the area under probability distribution
# from x-h to x+h for continuous distribution."""
# raise DistributionFunctionError
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
# def mean(self):
# """Gives the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def mode(self):
# """Gives the mode of the sample."""
# raise DistributionFunctionError
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def qmode(self):
# """Gives the quantile of the mode of the sample."""
# raise DistributionFunctionError
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
class NegativeBinomialDistribution(Distribution):
"""Class for Negative Binomial Distribution."""
def __init__(self, success, target):
"""Constructor method. The parameters are used to construct the
probability distribution.
@param success: probability of success; 0 <= success <= 1
@param target: a constant, target number of successes"""
self.success = success
self.target = target
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
summation = 0.0
for i in range(x): summation = summation + self.PDF(i)
return summation
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return nrpy.bico(x - 1, self.target - 1) * \
(self.success ** self.target) * \
((1 - self.success) ** (x - self.target))
def inverseCDF(self, probability, start = 0, step = 1):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.target / self.success
def mode(self):
"""Gives the mode of the sample."""
return int((self.success + self.target - 1)/self.success)
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
def NegativeExponentialDistribution(**parameters):
"""
Negative-exponential distribution is an alias of Exponential distribution."""
return ExponentialDistribution(**parameters)
class ParetoDistribution(Distribution):
"""Class for Pareto Distribution."""
def __init__(self, location=1.0, scale=1.0):
"""Constructor method. The parameters are used to construct the
probability distribution.
@param location: also the scale; default = 1.0
@param scale: S{lambda}; default = 1.0"""
self.location = location
self.scale = scale
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
return 1 - (self.location/x) ** self.scale
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return (self.shape * (self.location ** self.scale)) / \
(x ** (self.scale + 1))
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return (self.location * self.scale) / (self.scale - 1)
def mode(self):
"""Gives the mode of the sample."""
return self.location
def median(self):
"""Gives the median of the sample."""
return self.location * (2 ** (1/self.scale))
def kurtosis(self):
"""Gives the kurtosis of the sample."""
n = 6 * (self.scale ** 3 + self.scale ** 2 + 6 * self.scale - 2)
d = self.scale * (self.scale ** 2 - 7 * self.scale + 12)
return n/d
def skew(self):
"""Gives the skew of the sample."""
n = 2 * (self.scale + 1) * math.sqrt(self.scale - 2)
d = (self.scale - 3) * math.sqrt(self.scale)
return n/d
def variance(self):
"""Gives the variance of the sample."""
n = (self.location ** 2) * self.scale
d = (self.scale - 2) * ((self.scale - 1) ** 2)
return n/d
def quantile1(self):
"""Gives the 1st quantile of the sample."""
return self.location * (1.333 ** (1/self.scale))
def quantile3(self):
"""Gives the 3rd quantile of the sample."""
return self.location * (4 ** (1/self.scale))
def qmean(self):
"""Gives the quantile of the arithmetic mean of the sample."""
return 1 - (((self.scale - 1) / self.scale) ** self.scale)
def qmode(self):
"""Gives the quantile of the mode of the sample."""
return 0.0
def random(self):
"""Gives a random number based on the distribution."""
return random.paretovariate(self.scale)
class PascalDistribution(Distribution):
"""
Class for Pascal Distribution. Pascal Distribution is a form of Negative
Binomial Distribution where the 'target' is an integer
"""
def __init__(self, success, target):
"""Constructor method.
@param success: probability of success; 0 <= success <= 1
@param target: a constant, target number of successes"""
self.distribution = NegativeBinomialDistribution(success, int(target))
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
return self.distribution.CDF(x)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return self.distribution.PDF(x)
def inverseCDF(self, probability, start = 0.0, step =0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
return self.distribution.inverseCDF(probability, start, step)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.distribution.mean()
def mode(self):
"""Gives the mode of the sample."""
return self.distribution.mode()
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return self.distribution.kurtosis()
def skew(self):
"""Gives the skew of the sample."""
return self.distribution.skew()
def variance(self):
"""Gives the variance of the sample."""
return self.distribution.variance()
# def random(self):
# """Gives a random number based on the distribution."""
# return self.distribution.random()
def PolyaDistribution(success, target):
"""
Polya distribution is an alias of Negative Binomial distribution.
@param success: probability of success; 0 <= success <= 1
@param target: a constant, target number of successes
"""
return NegativeBinomialDistribution(success, target)
class PowerFunctionDistribution(Distribution):
"""
Class for Power Function Distribution. It is a form of Beta Distribution.
"""
def __init__(self, shape):
"""Constructor method.
@param shape:
"""
self.distribution = BetaDistribution(0, 1, shape, 1)
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
return self.distribution.CDF(x)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return self.distribution.PDF(x)
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
return self.distribution.inverseCDF(probability, start, step)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.distribution.mean()
def mode(self):
"""Gives the mode of the sample."""
return self.distribution.mode()
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return self.distribution.kurtosis()
def skew(self):
"""Gives the skew of the sample."""
return self.distribution.skew()
def variance(self):
"""Gives the variance of the sample."""
return self.distribution.variance()
# def random(self):
# """Gives a random number based on the distribution."""
# return self.distribution.random()
class RademacherDistribution(Distribution):
"""Class for Rademacher Distribution."""
def __init__(self):
"""Constructor method."""
pass
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
if x < -1:
return 0.0
elif x > -1 and x < 1:
return 0.5
else: return 1.0
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution
from x-h to x+h for continuous distribution."""
if x == -1 or x == 1: return 0.5
else: return 0.0
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
if probability == 0.0: return (-1.0001, 0.0)
if probability == 1.0: return (1.0, 1.0)
else: return (0.999, 0.5)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return 0
def skew(self):
"""Gives the skew of the sample."""
return 0
def variance(self):
"""Gives the variance of the sample."""
return 1
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
class RayleighDistribution(Distribution):
"""
Rayleigh distribution is a special case of Chi distribution where
location = 0 and shape (degrees of freedom) = 2
"""
def __init__(self, scale):
"""Constructor method.
@param scale:"""
self.distribution = ChiDistribution(0, scale, 2)
def CDF(self, x):
"""
Cummulative Distribution Function, which gives the cummulative
probability (area under the probability curve) from -infinity or 0 to
a give x-value on the x-axis where y-axis is the probability."""
return self.distribution.CDF(x)
def PDF(self, x):
"""
Partial Distribution Function, which gives the probability for the
particular value of x, or the area under probability distribution from
x-h to x+h for continuous distribution."""
return self.distribution.PDF(x)
def inverseCDF(self, probability, start = 0.0, step =0.01):
"""
It does the reverse of CDF() method, it takes a probability value and
returns the corresponding value on the x-axis."""
return self.distribution.inverseCDF(probability, start, step)
def mean(self):
"""Gives the arithmetic mean of the sample."""
return self.distribution.mean()
def mode(self):
"""Gives the mode of the sample."""
return self.distribution.mode()
def kurtosis(self):
"""Gives the kurtosis of the sample."""
return self.distribution.kurtosis()
def skew(self):
"""Gives the skew of the sample."""
return self.distribution.skew()
def variance(self):
"""Gives the variance of the sample."""
return self.distribution.variance()
# def random(self):
# """Gives a random number based on the distribution."""
# return self.distribution.random()
class ReciprocalDistribution(Distribution):
# def __init__(self, **parameters):
# """Constructor method. The parameters are used to construct the
# probability distribution."""
# raise DistributionFunctionError
# def CDF(self, x):
# """
# Cummulative Distribution Function, which gives the cummulative
# probability (area under the probability curve) from -infinity or 0 to
# a give x-value on the x-axis where y-axis is the probability."""
# raise DistributionFunctionError
# def PDF(self, x):
# """
# Partial Distribution Function, which gives the probability for the
# particular value of x, or the area under probability distribution
# from x-h to x+h for continuous distribution."""
# raise DistributionFunctionError
def inverseCDF(self, probability, start=0.0, step=0.01):
"""
It does the reverse of CDF() method, it takes a probability value
and returns the corresponding value on the x-axis."""
cprob = self.CDF(start)
if probability < cprob: return (start, cprob)
while (probability > cprob):
start = start + step
cprob = self.CDF(start)
# print start, cprob
return (start, cprob)
# def mean(self):
# """Gives the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def mode(self):
# """Gives the mode of the sample."""
# raise DistributionFunctionError
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def qmode(self):
# """Gives the quantile of the mode of the sample."""
# raise DistributionFunctionError
# def random(self):
# """Gives a random number based on the distribution."""
# raise DistributionFunctionError
def RectangularDistribution(**parameters):
"""
Rectangular distribution is an alias of Uniform distribution."""
return UniformDistribution(**parameters)
def SechSquaredDistribution(**parameters):
"""
Sech-squared distribution is an alias of Logistic distribution."""
return LogisticDistribution(**parameters)
def WaldDistribution(**parameters):
"""
Wald distribution is an alias of Inverse Normal distribution."""
return InverseNormalDistribution(**parameters)
#class DummyDistribution(Distribution):
# def __init__(self, **parameters):
# """Constructor method. The parameters are used to construct the
# probability distribution."""
# raise DistributionFunctionError
# def CDF(self, x):
# """
# Cummulative Distribution Function, which gives the cummulative
# probability (area under the probability curve) from -infinity or 0 to
# a give x-value on the x-axis where y-axis is the probability."""
# raise DistributionFunctionError
# def PDF(self, x):
# """
# Partial Distribution Function, which gives the probability for the
# particular value of x, or the area under probability distribution
# from x-h to x+h for continuous distribution."""
# raise DistributionFunctionError
# def inverseCDF(self, probability, start=0.0, step=0.01):
# """
# It does the reverse of CDF() method, it takes a probability value
# and returns the corresponding value on the x-axis."""
# cprob = self.CDF(start)
# if probability < cprob: return (start, cprob)
# while (probability > cprob):
# start = start + step
# cprob = self.CDF(start)
# # print start, cprob
# return (start, cprob)
# def mean(self):
# """Gives the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def mode(self):
# """Gives the mode of the sample."""
# raise DistributionFunctionError
# def kurtosis(self):
# """Gives the kurtosis of the sample."""
# raise DistributionFunctionError
# def skew(self):
# """Gives the skew of the sample."""
# raise DistributionFunctionError
# def variance(self):
# """Gives the variance of the sample."""
# raise DistributionFunctionError
# def quantile1(self):
# """Gives the 1st quantile of the sample."""
# raise DistributionFunctionError
# def quantile3(self):
# """Gives the 3rd quantile of the sample."""
# raise DistributionFunctionError
# def qmean(self):
# """Gives the quantile of the arithmetic mean of the sample."""
# raise DistributionFunctionError
# def qmode(self):
# """Gives the quantile of the mode of the sample."""
# raise DistributionFunctionError
## def random(self, seed):
## """Gives a random number based on the distribution."""
## while 1:
## func
## yield seed
|
gpl-3.0
| 288,445,279,155,761,150
| 35.911425
| 83
| 0.581699
| false
| 4.145849
| false
| false
| false
|
janusnic/21v-pyqt
|
unit_02/con3.py
|
1
|
1328
|
##!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Code PyQt4
In this example, we create a simple
window in PyQt4.
"""
from PyQt4 import QtCore, QtGui
class MyWindow(QtGui.QWidget):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.button1 = QtGui.QPushButton(u"Кнопка 1. Нажми меня")
self.button2 = QtGui.QPushButton(u"Кнопка 2")
vbox = QtGui.QVBoxLayout()
vbox.addWidget(self.button1)
vbox.addWidget(self.button2)
self.setLayout(vbox)
self.resize(300, 100)
# Передача сигнала от кнопки 1 к кнопке 2
self.connect(self.button1, QtCore.SIGNAL("clicked()"), self.button2, QtCore.SIGNAL('clicked()'))
# Способ 1 (4 параметра)
self.connect(self.button2, QtCore.SIGNAL("clicked()"), self, QtCore.SLOT("on_clicked_button2()"))
# Способ 2 (3 параметра)
self.connect(self.button2, QtCore.SIGNAL("clicked()"), QtCore.SLOT("on_clicked_button2()"))
@QtCore.pyqtSlot()
def on_clicked_button2(self):
print("Сигнал получен кнопкой 2")
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
window = MyWindow()
window.show()
sys.exit(app.exec_())
|
mit
| -807,823,098,126,535,000
| 31.315789
| 105
| 0.629177
| false
| 2.893868
| false
| false
| false
|
appsembler/configuration
|
playbooks/roles/backups/files/backup.py
|
1
|
17068
|
#!/usr/bin/python
import argparse
import datetime
import logging
import math
import os
import requests
import shutil
import socket
import subprocess
import sys
import time
import raven
def make_file_prefix(base_name):
hostname = socket.gethostname()
return '{0}_{1}'.format(hostname, base_name)
def make_file_name(base_name):
"""
Create a file name based on the hostname, a base_name, and date
e.g. openedxlite12345_mysql_20140102
"""
return '{0}_{1}'.format(make_file_prefix(base_name), datetime.datetime.now().
strftime("%Y%m%d"))
def upload_to_s3(file_path, bucket, aws_access_key_id, aws_secret_access_key):
"""
Upload a file to the specified S3 bucket.
file_path: An absolute path to the file to be uploaded.
bucket: The name of an S3 bucket.
aws_access_key_id: An AWS access key.
aws_secret_access_key: An AWS secret access key.
"""
from filechunkio import FileChunkIO
import boto
logging.info('Uploading backup at "{}" to Amazon S3 bucket "{}"'
.format(file_path, bucket))
conn = boto.connect_s3(aws_access_key_id, aws_secret_access_key)
bucket = conn.lookup(bucket)
file_name = os.path.basename(file_path)
file_size = os.stat(file_path).st_size
chunk_size = 104857600 # 100 MB
chunk_count = int(math.ceil(file_size / float(chunk_size)))
multipart_upload = bucket.initiate_multipart_upload(file_name)
for i in range(chunk_count):
offset = chunk_size * i
bytes_to_read = min(chunk_size, file_size - offset)
with FileChunkIO(file_path, 'r', offset=offset, bytes=bytes_to_read) as fp:
logging.info('Upload chunk {}/{}'.format(i + 1, chunk_count))
multipart_upload.upload_part_from_file(fp, part_num=(i + 1))
multipart_upload.complete_upload()
logging.info('Upload successful')
def upload_to_gcloud_storage(file_path, bucket):
"""
Upload a file to the specified Google Cloud Storage bucket.
Note that the host machine must be properly configured to use boto with a
Google Cloud Platform service account. See
https://cloud.google.com/storage/docs/xml-api/gspythonlibrary.
file_path: An absolute path to the file to be uploaded.
bucket: The name of a Google Cloud Storage bucket.
"""
import boto
import gcs_oauth2_boto_plugin
logging.info('Uploading backup at "{}" to Google Cloud Storage bucket '
'"{}"'.format(file_path, bucket))
file_name = os.path.basename(file_path)
gcloud_uri = boto.storage_uri(bucket + '/' + file_name, 'gs')
gcloud_uri.new_key().set_contents_from_filename(file_path)
logging.info('Upload successful')
def upload_to_azure_storage(file_path, bucket, account, key):
"""
Upload a file to the specified Azure Storage container.
file_path: An absolute path to the file to be uploaded.
bucket: The name of an Azure Storage container.
account: An Azure Storage account.
key: An Azure Storage account key.
"""
from azure.storage.blob import BlockBlobService
logging.info('Uploading backup at "{}" to Azure Storage container'
'"{}"'.format(file_path, bucket))
file_name = os.path.basename(file_path)
blob_service = BlockBlobService(account_name=account, account_key=key)
blob_service.create_blob_from_path(bucket, file_name, file_path)
logging.info('Upload successful')
class NoBackupsFound(Exception):
pass
def monitor_gcloud_backups(bucket, service, sentry, pushgateway):
"""Double check the backups in the Google Cloud Storage Bucket
Finds the most recent backup file and pushes the creation
timestamp to our monitoring. This gives us something of a "dead
man's switch" to alert us if the previous day's backups failed
silently.
We also raise a Sentry error if there are no backups found or
if this monitoring process fails.
bucket: The name of a Google Cloud Storage bucket.
service: the service name (really only supports 'mongodb' currently)
sentry: The sentry client
pushgateway: URL of the pushgateway
"""
import boto
import gcs_oauth2_boto_plugin
logging.info('checking backups in Google Cloud Storage bucket '
'"{}"'.format(bucket))
sentry.extra_context({'bucket': bucket})
try:
gcloud_uri = boto.storage_uri(bucket, 'gs')
keys = gcloud_uri.get_all_keys()
prefix = make_file_prefix(service)
backups = [k for k in keys if k.key.startswith(prefix)]
if len(backups) < 1:
raise NoBackupsFound("There are no backup files in the bucket")
backups.sort(key=lambda x: x.last_modified)
most_recent = backups[-1]
sentry.extra_context({'most_recent': most_recent})
last_modified = datetime.datetime.strptime(most_recent.last_modified,
'%Y-%m-%dT%H:%M:%S.%fZ')
push_backups_age_metric(pushgateway, socket.gethostname(),
float(last_modified.strftime('%s')),
backups_type=service)
logging.info('Monitoring successful')
except Exception:
sentry.CaptureException()
def push_backups_age_metric(gateway, instance, value, backups_type="mongodb"):
""" submits backups timestamp to push gateway service
labelled with the instance (typically hostname) and type ('mongodb'
or 'mysql')"""
headers = {
'Content-type': 'application/octet-stream'
}
requests.post(
'{}/metrics/job/backups_monitor/instance/{}'.format(gateway, instance),
data='backups_timestamp{type="%s"} %f\n' % (backups_type, value),
headers=headers)
def compress_backup(backup_path):
"""
Compress a backup using tar and gzip.
backup_path: An absolute path to a file or directory containing a
database dump.
returns: The absolute path to the compressed backup file.
"""
logging.info('Compressing backup at "{}"'.format(backup_path))
compressed_backup_path = backup_path + '.tar.gz'
zip_cmd = ['tar', '-zcvf', compressed_backup_path, backup_path]
ret = subprocess.call(zip_cmd, env={'GZIP': '-9'})
if ret: # if non-zero return
error_msg = 'Error occurred while compressing backup'
logging.error(error_msg)
raise Exception(error_msg)
return compressed_backup_path
def dump_service(service_name, backup_dir, user='', password=''):
"""
Dump the database contents for a service.
service_name: The name of the service to dump, either mysql or mongodb.
backup_dir: The directory where the database is to be dumped.
returns: The absolute path of the file or directory containing the
dump.
"""
commands = {
'mysql': 'mysqldump -u root --all-databases --single-transaction > {}',
'mongodb': 'mongodump -o {}',
}
if user and password:
commands['mongodb'] += (' --authenticationDatabase admin -u {} -p {}'
.format(user, password))
cmd_template = commands.get(service_name)
if cmd_template:
backup_filename = make_file_name(service_name)
backup_path = os.path.join(backup_dir, backup_filename)
cmd = cmd_template.format(backup_path)
logging.info('Dumping database: `{}`'.format(cmd))
ret = subprocess.call(cmd, shell=True)
if ret: # if non-zero return
error_msg = 'Error occurred while dumping database'
logging.error(error_msg)
raise Exception(error_msg)
return backup_path
else:
error_msg = 'Unknown service {}'.format(service_name)
logging.error(error_msg)
raise Exception(error_msg)
def clean_up(backup_path):
"""
Remove the local database dump and the corresponding tar file if it exists.
backup_path: An absolute path to a file or directory containing a
database dump.
"""
logging.info('Cleaning up "{}"'.format(backup_path))
backup_tar = backup_path + '.tar.gz'
if os.path.isfile(backup_tar):
os.remove(backup_tar)
try:
if os.path.isdir(backup_path):
shutil.rmtree(backup_path)
elif os.path.isfile(backup_path):
os.remove(backup_path)
except OSError:
logging.exception('Removing files at {} failed!'.format(backup_path))
def restore(service_name, backup_path, uncompress=True, settings=None):
"""
Restore a database from a backup.
service_name: The name of the service whose database is to be restored,
either mysql or mongodb.
backup_path: The absolute path to a backup.
uncompress: If True, the backup is assumed to be a gzipped tar and is
uncompressed before the database restoration.
"""
if service_name == 'mongodb':
restore_mongodb(backup_path, uncompress)
elif service_name == 'mysql':
restore_mysql(backup_path, uncompress, settings=settings)
def restore_mongodb(backup_path, uncompress=True):
"""
Restore a MongoDB database from a backup.
backup_path: The absolute path to a backup.
uncompress: If True, the backup is assumed to be a gzipped tar and is
uncompressed before the database restoration.
"""
logging.info('Restoring MongoDB from "{}"'.format(backup_path))
if uncompress:
backup_path = _uncompress(backup_path)
cmd = 'mongorestore {}'.format(backup_path)
ret = subprocess.call(cmd, shell=True)
if ret: # if non-zero return
error_msg = 'Error occurred while restoring MongoDB backup'
logging.error(error_msg)
raise Exception(error_msg)
logging.info('MongoDB successfully restored')
def restore_mysql(backup_path, uncompress=True, settings=None):
"""
Restore a MySQL database from a backup.
backup_path: The absolute path to a backup.
uncompress: If True, the backup is assumed to be a gzipped tar and is
uncompressed before the database restoration.
"""
logging.info('Restoring MySQL from "{}"'.format(backup_path))
if uncompress:
backup_path = _uncompress(backup_path)
cmd = 'mysqladmin -f drop edxapp'
ret = subprocess.call(cmd, shell=True)
if ret: # if non-zero return
error_msg = 'Error occurred while deleting old mysql database'
logging.error(error_msg)
raise Exception(error_msg)
cmd = 'mysqladmin -f create edxapp'
ret = subprocess.call(cmd, shell=True)
if ret: # if non-zero return
error_msg = 'Error occurred while creating new mysql database'
logging.error(error_msg)
raise Exception(error_msg)
cmd = 'mysql -D edxapp < {0}'.format(backup_path)
ret = subprocess.call(cmd, shell=True)
if ret: # if non-zero return
error_msg = 'Error occurred while restoring mysql database'
logging.error(error_msg)
raise Exception(error_msg)
cmd = ('source /edx/app/edxapp/edxapp_env && /edx/bin/manage.edxapp '
'lms migrate --settings={}'.format(settings))
ret = subprocess.call(cmd, shell=True, executable="/bin/bash")
if ret: # if non-zero return
error_msg = 'Error occurred while running edx migrations'
logging.error(error_msg)
raise Exception(error_msg)
cmd = '/edx/bin/supervisorctl restart edxapp:'
ret = subprocess.call(cmd, shell=True)
if ret: # if non-zero return
error_msg = 'Error occurred while restarting edx'
logging.error(error_msg)
raise Exception(error_msg)
logging.info('MySQL successfully restored')
def _uncompress(file_path):
"""
Uncompress a gzipped tar file. The contents of the compressed file are
extracted to the directory containing the compressed file.
file_path: An absolute path to a gzipped tar file.
returns: The directory containing the contents of the compressed file.
"""
logging.info('Uncompressing file at "{}"'.format(file_path))
file_dir = os.path.dirname(file_path)
cmd = 'tar xzvf {}'.format(file_path)
ret = subprocess.call(cmd, shell=True)
if ret: # if non-zero return
error_msg = 'Error occurred while uncompressing {}'.format(file_path)
logging.error(error_msg)
raise Exception(error_msg)
return file_path.replace('.tar.gz', '')
def _parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('service', help='mongodb or mysql')
parser.add_argument('-r', '--restore-path',
help='path to a backup used to restore a database')
parser.add_argument('-d', '--dir', dest='backup_dir',
help='temporary storage directory used during backup')
parser.add_argument('-u', '--user', help='database user')
parser.add_argument('--password', help='database password')
parser.add_argument('-p', '--provider', help='gs or s3')
parser.add_argument('-b', '--bucket', help='bucket name')
parser.add_argument('-i', '--s3-id', dest='s3_id',
help='AWS access key id')
parser.add_argument('-k', '--s3-key', dest='s3_key',
help='AWS secret access key')
parser.add_argument('--azure-account', dest='azure_account',
help='Azure storage account')
parser.add_argument('--azure-key', dest='azure_key',
help='Azure storage account key')
parser.add_argument('-n', '--uncompressed', dest='compressed',
action='store_false', default=True,
help='disable compression')
parser.add_argument('-s', '--settings',
help='Django settings used when running database '
'migrations')
parser.add_argument('--sentry-dsn', help='Sentry data source name')
parser.add_argument('--pushgateway', help='Prometheus pushgateway URL')
return parser.parse_args()
def _main():
args = _parse_args()
program_name = os.path.basename(sys.argv[0])
backup_dir = (args.backup_dir or os.environ.get('BACKUP_DIR',
'/tmp/db_backups'))
user = args.user or os.environ.get('BACKUP_USER', '')
password = args.password or os.environ.get('BACKUP_PASSWORD', '')
bucket = args.bucket or os.environ.get('BACKUP_BUCKET')
compressed = args.compressed
provider = args.provider or os.environ.get('BACKUP_PROVIDER', 'gs')
restore_path = args.restore_path
s3_id = args.s3_id or os.environ.get('BACKUP_AWS_ACCESS_KEY_ID')
s3_key = args.s3_key or os.environ.get('BACKUP_AWS_SECRET_ACCESS_KEY')
azure_account = args.azure_account or os.environ.get('BACKUP_AZURE_STORAGE_ACCOUNT')
azure_key = args.azure_key or os.environ.get('BACKUP_AZURE_STORAGE_KEY')
settings = args.settings or os.environ.get('BACKUP_SETTINGS', 'aws_appsembler')
sentry_dsn = args.sentry_dsn or os.environ.get('BACKUP_SENTRY_DSN', '')
pushgateway = args.pushgateway or os.environ.get('PUSHGATEWAY', 'https://pushgateway.infra.appsembler.com')
service = args.service
sentry = raven.Client(sentry_dsn)
if program_name == 'edx_backup':
backup_path = ''
try:
if not os.path.exists(backup_dir):
os.makedirs(backup_dir)
backup_path = dump_service(service, backup_dir, user, password)
if compressed:
backup_path = compress_backup(backup_path)
if provider == 'gs':
upload_to_gcloud_storage(backup_path, bucket)
elif provider == 's3':
upload_to_s3(backup_path, bucket, aws_access_key_id=s3_id,
aws_secret_access_key=s3_key)
elif provider == 'azure':
upload_to_azure_storage(backup_path, bucket, azure_account,
azure_key)
else:
error_msg = ('Invalid storage provider specified. Please use '
'"gs" or "s3".')
logging.warning(error_msg)
except:
logging.exception("The backup failed!")
sentry.captureException(fingerprint=['{{ default }}', time.time()])
finally:
clean_up(backup_path.replace('.tar.gz', ''))
elif program_name == 'edx_restore':
restore(service, restore_path, compressed, settings=settings)
elif program_name == 'edx_backups_monitor':
if provider == 'gs':
monitor_gcloud_backups(bucket, service, sentry, pushgateway)
else:
# other providers not supported yet
logging.warning("no backup monitoring available for this provider")
if __name__ == '__main__':
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
_main()
|
agpl-3.0
| 4,505,051,146,387,217,400
| 34.558333
| 111
| 0.629189
| false
| 3.967457
| false
| false
| false
|
jtacoma/geometriki
|
geometriki/tests/functional/test_pages.py
|
1
|
2036
|
# This file is part of geometriki.
#
# geometriki is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# geometriki is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with geometriki, in a file named COPYING. If not,
# see <http://www.gnu.org/licenses/>.
from helloworld.tests import *
class TestPagesController(TestController):
def test_index(self):
response = self.app.get(url('pages'))
# Test response...
def test_index_as_xml(self):
response = self.app.get(url('formatted_pages', format='xml'))
def test_create(self):
response = self.app.post(url('pages'))
def test_new(self):
response = self.app.get(url('new_page'))
def test_new_as_xml(self):
response = self.app.get(url('formatted_new_page', format='xml'))
def test_update(self):
response = self.app.put(url('page', id=1))
def test_update_browser_fakeout(self):
response = self.app.post(url('page', id=1), params=dict(_method='put'))
def test_delete(self):
response = self.app.delete(url('page', id=1))
def test_delete_browser_fakeout(self):
response = self.app.post(url('page', id=1), params=dict(_method='delete'))
def test_show(self):
response = self.app.get(url('page', id=1))
def test_show_as_xml(self):
response = self.app.get(url('formatted_page', id=1, format='xml'))
def test_edit(self):
response = self.app.get(url('edit_page', id=1))
def test_edit_as_xml(self):
response = self.app.get(url('formatted_edit_page', id=1, format='xml'))
|
agpl-3.0
| 943,609,453,979,199,700
| 34.103448
| 82
| 0.667485
| false
| 3.510345
| true
| false
| false
|
maliceio/malice-pdf
|
pdfparser/pdf_parser.py
|
1
|
58940
|
"""
Modified by CSE to fit ASSEMBLYLINE service
"""
__description__ = 'pdf-parser, use it to parse a PDF document'
__author__ = 'Didier Stevens'
__version__ = '0.6.8'
__date__ = '2017/10/29'
__minimum_python_version__ = (2, 5, 1)
__maximum_python_version__ = (3, 6, 3)
"""
Source code put in public domain by Didier Stevens, no Copyright
https://DidierStevens.com
Use at your own risk
History:
2008/05/02: continue
2008/05/03: continue
2008/06/02: streams
2008/10/19: refactor, grep & extract functionality
2008/10/20: reference
2008/10/21: cleanup
2008/11/12: V0.3 dictionary parser
2008/11/13: option elements
2008/11/14: continue
2009/05/05: added /ASCIIHexDecode support (thanks Justin Prosco)
2009/05/11: V0.3.1 updated usage, added --verbose and --extract
2009/07/16: V0.3.2 Added Canonicalize (thanks Justin Prosco)
2009/07/18: bugfix EqualCanonical
2009/07/24: V0.3.3 Added --hash option
2009/07/25: EqualCanonical for option --type, added option --nocanonicalizedoutput
2009/07/28: V0.3.4 Added ASCII85Decode support
2009/08/01: V0.3.5 Updated ASCIIHexDecode to support whitespace obfuscation
2009/08/30: V0.3.6 TestPythonVersion
2010/01/08: V0.3.7 Added RLE and LZW support (thanks pARODY); added dump option
2010/01/09: Fixed parsing of incomplete startxref
2010/09/22: V0.3.8 Changed dump option, updated PrettyPrint, added debug option
2011/12/17: fixed bugs empty objects
2012/03/11: V0.3.9 fixed bugs double nested [] in PrettyPrintSub (thanks kurt)
2013/01/11: V0.3.10 Extract and dump bug fixes by Priit; added content option
2013/02/16: Performance improvement in cPDFTokenizer by using StringIO for token building by Christophe Vandeplas; xrange replaced with range
2013/02/16: V0.4.0 added http/https support; added error handling for missing file or URL; ; added support for ZIP file with password 'infected'
2013/03/13: V0.4.1 fixes for Python 3
2013/04/11: V0.4.2 modified PrettyPrintSub for strings with unprintable characters
2013/05/04: Added options searchstream, unfiltered, casesensitive, regex
2013/09/18: V0.4.3 fixed regression bug -w option
2014/09/25: V0.5.0 added option -g
2014/09/29: Added PrintGenerateObject and PrintOutputObject
2014/12/05: V0.6.0 Added YARA support
2014/12/09: cleanup, refactoring
2014/12/13: Python 3 fixes
2015/01/11: Added support for multiple YARA rule files; added request to search in trailer
2015/01/31: V0.6.1 Added optionyarastrings
2015/02/09: Added decoders
2015/04/05: V0.6.2 Added generateembedded
2015/04/06: fixed bug reported by Kurt for stream produced by Ghostscript where endstream is not preceded by whitespace; fixed prettyprint bug
2015/04/24: V0.6.3 when option dump's filename is -, content is dumped to stdout
2015/08/12: V0.6.4 option hash now also calculates hashes of streams when selecting or searching objects; and displays hexasciidump first line
2016/07/27: V0.6.5 bugfix whitespace 0x00 0x0C after stream 0x0D 0x0A reported by @mr_me
2016/11/20: V0.6.6 added workaround zlib errors FlateDecode
2016/12/17: V0.6.7 added option -k
2017/01/07: V0.6.8 changed cPDFParseDictionary to handle strings () with % character
2017/10/28: fixed bug
2017/10/29: added # support for option -y
Todo:
- handle printf todo
- support for JS hex string EC61C64349DB8D88AF0523C4C06E0F4D.pdf.vir
"""
import re
import optparse
import zlib
import binascii
import hashlib
import sys
import zipfile
import time
import os
if sys.version_info[0] >= 3:
from io import StringIO
import urllib.request
urllib23 = urllib.request
else:
from cStringIO import StringIO
import urllib2
urllib23 = urllib2
try:
import yara
except:
pass
CHAR_WHITESPACE = 1
CHAR_DELIMITER = 2
CHAR_REGULAR = 3
CONTEXT_NONE = 1
CONTEXT_OBJ = 2
CONTEXT_XREF = 3
CONTEXT_TRAILER = 4
PDF_ELEMENT_COMMENT = 1
PDF_ELEMENT_INDIRECT_OBJECT = 2
PDF_ELEMENT_XREF = 3
PDF_ELEMENT_TRAILER = 4
PDF_ELEMENT_STARTXREF = 5
PDF_ELEMENT_MALFORMED = 6
dumplinelength = 16
#Convert 2 Bytes If Python 3
def C2BIP3(string):
if sys.version_info[0] > 2:
return bytes([ord(x) for x in string])
else:
return string
# CIC: Call If Callable
def CIC(expression):
if callable(expression):
return expression()
else:
return expression
# IFF: IF Function
def IFF(expression, valueTrue, valueFalse):
if expression:
return CIC(valueTrue)
else:
return CIC(valueFalse)
def Timestamp(epoch=None):
if epoch == None:
localTime = time.localtime()
else:
localTime = time.localtime(epoch)
return '%04d%02d%02d-%02d%02d%02d' % localTime[0:6]
def CopyWithoutWhiteSpace(content):
result = []
for token in content:
if token[0] != CHAR_WHITESPACE:
result.append(token)
return result
def Obj2Str(content):
return ''.join(map(lambda x: repr(x[1])[1:-1], CopyWithoutWhiteSpace(content)))
class cPDFDocument:
def __init__(self, filepath):
self.file = filepath
if type(filepath) != str:
self.infile = filepath
elif filepath.lower().startswith('http://') or filepath.lower().startswith('https://'):
try:
if sys.hexversion >= 0x020601F0:
self.infile = urllib23.urlopen(filepath, timeout=5)
else:
self.infile = urllib23.urlopen(filepath)
except urllib23.HTTPError:
print('Error accessing URL %s' % filepath)
print(sys.exc_info()[1])
sys.exit()
elif filepath.lower().endswith('.zip'):
try:
self.zipfile = zipfile.ZipFile(filepath, 'r')
self.infile = self.zipfile.open(self.zipfile.infolist()[0], 'r', C2BIP3('infected'))
except:
print('Error opening file %s' % filepath)
print(sys.exc_info()[1])
sys.exit()
else:
try:
self.infile = open(filepath, 'rb')
except:
raise Exception('Error opening file %s' % filepath)
self.ungetted = []
self.position = -1
def byte(self):
if len(self.ungetted) != 0:
self.position += 1
return self.ungetted.pop()
inbyte = self.infile.read(1)
if not inbyte or inbyte == '':
self.infile.close()
return None
self.position += 1
return ord(inbyte)
def unget(self, byte):
self.position -= 1
self.ungetted.append(byte)
def CharacterClass(byte):
if byte == 0 or byte == 9 or byte == 10 or byte == 12 or byte == 13 or byte == 32:
return CHAR_WHITESPACE
if byte == 0x28 or byte == 0x29 or byte == 0x3C or byte == 0x3E or byte == 0x5B or byte == 0x5D or byte == 0x7B or byte == 0x7D or byte == 0x2F or byte == 0x25:
return CHAR_DELIMITER
return CHAR_REGULAR
def IsNumeric(str):
return re.match('^[0-9]+', str)
class cPDFTokenizer:
def __init__(self, file):
try:
self.oPDF = cPDFDocument(file)
except Exception as e:
raise Exception(e)
self.ungetted = []
def Token(self):
if len(self.ungetted) != 0:
return self.ungetted.pop()
if self.oPDF == None:
return None
self.byte = self.oPDF.byte()
if self.byte == None:
self.oPDF = None
return None
elif CharacterClass(self.byte) == CHAR_WHITESPACE:
file_str = StringIO()
while self.byte != None and CharacterClass(self.byte) == CHAR_WHITESPACE:
file_str.write(chr(self.byte))
self.byte = self.oPDF.byte()
if self.byte != None:
self.oPDF.unget(self.byte)
else:
self.oPDF = None
self.token = file_str.getvalue()
return (CHAR_WHITESPACE, self.token)
elif CharacterClass(self.byte) == CHAR_REGULAR:
file_str = StringIO()
while self.byte != None and CharacterClass(self.byte) == CHAR_REGULAR:
file_str.write(chr(self.byte))
self.byte = self.oPDF.byte()
if self.byte != None:
self.oPDF.unget(self.byte)
else:
self.oPDF = None
self.token = file_str.getvalue()
return (CHAR_REGULAR, self.token)
else:
if self.byte == 0x3C:
self.byte = self.oPDF.byte()
if self.byte == 0x3C:
return (CHAR_DELIMITER, '<<')
else:
self.oPDF.unget(self.byte)
return (CHAR_DELIMITER, '<')
elif self.byte == 0x3E:
self.byte = self.oPDF.byte()
if self.byte == 0x3E:
return (CHAR_DELIMITER, '>>')
else:
self.oPDF.unget(self.byte)
return (CHAR_DELIMITER, '>')
elif self.byte == 0x25:
file_str = StringIO()
while self.byte != None:
file_str.write(chr(self.byte))
if self.byte == 10 or self.byte == 13:
self.byte = self.oPDF.byte()
break
self.byte = self.oPDF.byte()
if self.byte != None:
if self.byte == 10:
file_str.write(chr(self.byte))
else:
self.oPDF.unget(self.byte)
else:
self.oPDF = None
self.token = file_str.getvalue()
return (CHAR_DELIMITER, self.token)
return (CHAR_DELIMITER, chr(self.byte))
def TokenIgnoreWhiteSpace(self):
token = self.Token()
while token != None and token[0] == CHAR_WHITESPACE:
token = self.Token()
return token
def Tokens(self):
tokens = []
token = self.Token()
while token != None:
tokens.append(token)
token = self.Token()
return tokens
def unget(self, byte):
self.ungetted.append(byte)
class cPDFParser:
def __init__(self, file, verbose=False, extract=None):
self.context = CONTEXT_NONE
self.content = []
try:
self.oPDFTokenizer = cPDFTokenizer(file)
except Exception as e:
raise Exception(e)
self.verbose = verbose
self.extract = extract
def GetObject(self):
while True:
if self.context == CONTEXT_OBJ:
self.token = self.oPDFTokenizer.Token()
else:
self.token = self.oPDFTokenizer.TokenIgnoreWhiteSpace()
if self.token:
if self.token[0] == CHAR_DELIMITER:
if self.token[1][0] == '%':
if self.context == CONTEXT_OBJ:
self.content.append(self.token)
else:
return cPDFElementComment(self.token[1])
elif self.token[1] == '/':
self.token2 = self.oPDFTokenizer.Token()
if self.token2[0] == CHAR_REGULAR:
if self.context != CONTEXT_NONE:
self.content.append((CHAR_DELIMITER, self.token[1] + self.token2[1]))
# elif self.verbose:
# print('todo 1: %s' % (self.token[1] + self.token2[1]))
else:
self.oPDFTokenizer.unget(self.token2)
if self.context != CONTEXT_NONE:
self.content.append(self.token)
# elif self.verbose:
# print('todo 2: %d %s' % (self.token[0], repr(self.token[1])))
elif self.context != CONTEXT_NONE:
self.content.append(self.token)
# elif self.verbose:
# print('todo 3: %d %s' % (self.token[0], repr(self.token[1])))
elif self.token[0] == CHAR_WHITESPACE:
if self.context != CONTEXT_NONE:
self.content.append(self.token)
# elif self.verbose:
# print('todo 4: %d %s' % (self.token[0], repr(self.token[1])))
else:
if self.context == CONTEXT_OBJ:
if self.token[1] == 'endobj':
self.oPDFElementIndirectObject = cPDFElementIndirectObject(
self.objectId, self.objectVersion, self.content)
self.context = CONTEXT_NONE
self.content = []
return self.oPDFElementIndirectObject
else:
self.content.append(self.token)
elif self.context == CONTEXT_TRAILER:
if self.token[1] == 'startxref' or self.token[1] == 'xref':
self.oPDFElementTrailer = cPDFElementTrailer(self.content)
self.oPDFTokenizer.unget(self.token)
self.context = CONTEXT_NONE
self.content = []
return self.oPDFElementTrailer
else:
self.content.append(self.token)
elif self.context == CONTEXT_XREF:
if self.token[1] == 'trailer' or self.token[1] == 'xref':
self.oPDFElementXref = cPDFElementXref(self.content)
self.oPDFTokenizer.unget(self.token)
self.context = CONTEXT_NONE
self.content = []
return self.oPDFElementXref
else:
self.content.append(self.token)
else:
if IsNumeric(self.token[1]):
self.token2 = self.oPDFTokenizer.TokenIgnoreWhiteSpace()
if IsNumeric(self.token2[1]):
self.token3 = self.oPDFTokenizer.TokenIgnoreWhiteSpace()
if self.token3[1] == 'obj':
self.objectId = eval(self.token[1])
self.objectVersion = eval(self.token2[1])
self.context = CONTEXT_OBJ
else:
self.oPDFTokenizer.unget(self.token3)
self.oPDFTokenizer.unget(self.token2)
# if self.verbose:
# print('todo 6: %d %s' % (self.token[0], repr(self.token[1])))
else:
self.oPDFTokenizer.unget(self.token2)
# if self.verbose:
# print('todo 7: %d %s' % (self.token[0], repr(self.token[1])))
elif self.token[1] == 'trailer':
self.context = CONTEXT_TRAILER
self.content = [self.token]
elif self.token[1] == 'xref':
self.context = CONTEXT_XREF
self.content = [self.token]
elif self.token[1] == 'startxref':
self.token2 = self.oPDFTokenizer.TokenIgnoreWhiteSpace()
if self.token2 and IsNumeric(self.token2[1]):
return cPDFElementStartxref(eval(self.token2[1]))
else:
self.oPDFTokenizer.unget(self.token2)
# if self.verbose:
# print('todo 9: %d %s' % (self.token[0], repr(self.token[1])))
elif self.extract:
self.bytes = ''
while self.token:
self.bytes += self.token[1]
self.token = self.oPDFTokenizer.Token()
return cPDFElementMalformed(self.bytes)
# elif self.verbose:
# print('todo 10: %d %s' % (self.token[0], repr(self.token[1])))
else:
break
class cPDFElementComment:
def __init__(self, comment):
self.type = PDF_ELEMENT_COMMENT
self.comment = comment
# if re.match('^%PDF-[0-9]\.[0-9]', self.token[1]):
# print(repr(self.token[1]))
# elif re.match('^%%EOF', self.token[1]):
# print(repr(self.token[1]))
class cPDFElementXref:
def __init__(self, content):
self.type = PDF_ELEMENT_XREF
self.content = content
class cPDFElementTrailer:
def __init__(self, content):
self.type = PDF_ELEMENT_TRAILER
self.content = content
def Contains(self, keyword):
data = ''
for i in range(0, len(self.content)):
if self.content[i][1] == 'stream':
break
else:
data += Canonicalize(self.content[i][1])
return data.upper().find(keyword.upper()) != -1
def IIf(expr, truepart, falsepart):
if expr:
return truepart
else:
return falsepart
class cPDFElementIndirectObject:
def __init__(self, id, version, content):
self.type = PDF_ELEMENT_INDIRECT_OBJECT
self.id = id
self.version = version
self.content = content
#fix stream for Ghostscript bug reported by Kurt
if self.ContainsStream():
position = len(self.content) - 1
if position < 0:
return
while self.content[position][0] == CHAR_WHITESPACE and position >= 0:
position -= 1
if position < 0:
return
if self.content[position][0] != CHAR_REGULAR:
return
if self.content[position][1] == 'endstream':
return
if not self.content[position][1].endswith('endstream'):
return
self.content = self.content[0:position] + [
(self.content[position][0], self.content[position][1][:-len('endstream')])
] + [(self.content[position][0], 'endstream')] + self.content[position + 1:]
def GetType(self):
content = CopyWithoutWhiteSpace(self.content)
dictionary = 0
for i in range(0, len(content)):
if content[i][0] == CHAR_DELIMITER and content[i][1] == '<<':
dictionary += 1
if content[i][0] == CHAR_DELIMITER and content[i][1] == '>>':
dictionary -= 1
if dictionary == 1 and content[i][0] == CHAR_DELIMITER and EqualCanonical(content[i][1],
'/Type') and i < len(content) - 1:
return content[i + 1][1]
return ''
def GetReferences(self):
content = CopyWithoutWhiteSpace(self.content)
references = []
for i in range(0, len(content)):
if i > 1 and content[i][0] == CHAR_REGULAR and content[i][1] == 'R' and content[i - 2][0] == CHAR_REGULAR and IsNumeric(
content[i - 2][1]) and content[i - 1][0] == CHAR_REGULAR and IsNumeric(content[i - 1][1]):
references.append((content[i - 2][1], content[i - 1][1], content[i][1]))
return references
def References(self, index):
for ref in self.GetReferences():
if ref[0] == index:
return True
return False
def ContainsStream(self):
for i in range(0, len(self.content)):
if self.content[i][0] == CHAR_REGULAR and self.content[i][1] == 'stream':
return self.content[0:i]
return False
def Contains(self, keyword):
data = ''
for i in range(0, len(self.content)):
if self.content[i][1] == 'stream':
break
else:
data += Canonicalize(self.content[i][1])
return data.upper().find(keyword.upper()) != -1
def StreamContains(self, keyword, filter, casesensitive, regex):
if not self.ContainsStream():
return False
streamData = self.Stream(filter)
if filter and streamData == 'No filters':
streamData = self.Stream(False)
if regex:
return re.search(keyword, streamData, IIf(casesensitive, 0, re.I))
elif casesensitive:
return keyword in streamData
else:
return keyword.lower() in streamData.lower()
def Stream(self, filter=True):
state = 'start'
countDirectories = 0
data = ''
filters = []
for i in range(0, len(self.content)):
if state == 'start':
if self.content[i][0] == CHAR_DELIMITER and self.content[i][1] == '<<':
countDirectories += 1
if self.content[i][0] == CHAR_DELIMITER and self.content[i][1] == '>>':
countDirectories -= 1
if countDirectories == 1 and self.content[i][0] == CHAR_DELIMITER and EqualCanonical(
self.content[i][1], '/Filter'):
state = 'filter'
elif countDirectories == 0 and self.content[i][0] == CHAR_REGULAR and self.content[i][1] == 'stream':
state = 'stream-whitespace'
elif state == 'filter':
if self.content[i][0] == CHAR_DELIMITER and self.content[i][1][0] == '/':
filters = [self.content[i][1]]
state = 'search-stream'
elif self.content[i][0] == CHAR_DELIMITER and self.content[i][1] == '[':
state = 'filter-list'
elif state == 'filter-list':
if self.content[i][0] == CHAR_DELIMITER and self.content[i][1][0] == '/':
filters.append(self.content[i][1])
elif self.content[i][0] == CHAR_DELIMITER and self.content[i][1] == ']':
state = 'search-stream'
elif state == 'search-stream':
if self.content[i][0] == CHAR_REGULAR and self.content[i][1] == 'stream':
state = 'stream-whitespace'
elif state == 'stream-whitespace':
if self.content[i][0] == CHAR_WHITESPACE:
whitespace = self.content[i][1]
if whitespace.startswith('\x0D\x0A') and len(whitespace) > 2:
data += whitespace[2:]
elif whitespace.startswith('\x0A') and len(whitespace) > 1:
data += whitespace[1:]
else:
data += self.content[i][1]
state = 'stream-concat'
elif state == 'stream-concat':
if self.content[i][0] == CHAR_REGULAR and self.content[i][1] == 'endstream':
if filter:
return self.Decompress(data, filters)
else:
return data
else:
data += self.content[i][1]
else:
return 'Unexpected filter state'
return filters
def Decompress(self, data, filters):
for filter in filters:
if EqualCanonical(filter, '/FlateDecode') or EqualCanonical(filter, '/Fl'):
try:
data = FlateDecode(data)
except zlib.error as e:
message = 'FlateDecode decompress failed'
if len(data) > 0 and ord(data[0]) & 0x0F != 8:
message += ', unexpected compression method: %02x' % ord(data[0])
return message + '. zlib.error %s' % e.message
elif EqualCanonical(filter, '/ASCIIHexDecode') or EqualCanonical(filter, '/AHx'):
try:
data = ASCIIHexDecode(data)
except:
return 'ASCIIHexDecode decompress failed'
elif EqualCanonical(filter, '/ASCII85Decode') or EqualCanonical(filter, '/A85'):
try:
data = ASCII85Decode(data.rstrip('>'))
except:
return 'ASCII85Decode decompress failed'
elif EqualCanonical(filter, '/LZWDecode') or EqualCanonical(filter, '/LZW'):
try:
data = LZWDecode(data)
except:
return 'LZWDecode decompress failed'
elif EqualCanonical(filter, '/RunLengthDecode') or EqualCanonical(filter, '/R'):
try:
data = RunLengthDecode(data)
except:
return 'RunLengthDecode decompress failed'
# elif i.startswith('/CC') # CCITTFaxDecode
# elif i.startswith('/DCT') # DCTDecode
else:
return 'Unsupported filter: %s' % repr(filters)
if len(filters) == 0:
return 'No filters'
else:
return data
def StreamYARAMatch(self, rules, decoders, decoderoptions, filter):
if not self.ContainsStream():
return None
streamData = self.Stream(filter)
if filter and streamData == 'No filters':
streamData = self.Stream(False)
oDecoders = [cIdentity(streamData, None)]
for cDecoder in decoders:
try:
oDecoder = cDecoder(streamData, decoderoptions)
oDecoders.append(oDecoder)
except Exception as e:
print('Error instantiating decoder: %s' % cDecoder.name)
raise e
results = []
for oDecoder in oDecoders:
while oDecoder.Available():
yaraResults = rules.match(data=oDecoder.Decode())
if yaraResults != []:
results.append([oDecoder.Name(), yaraResults])
return results
class cPDFElementStartxref:
def __init__(self, index):
self.type = PDF_ELEMENT_STARTXREF
self.index = index
class cPDFElementMalformed:
def __init__(self, content):
self.type = PDF_ELEMENT_MALFORMED
self.content = content
def TrimLWhiteSpace(data):
while data != [] and data[0][0] == CHAR_WHITESPACE:
data = data[1:]
return data
def TrimRWhiteSpace(data):
while data != [] and data[-1][0] == CHAR_WHITESPACE:
data = data[:-1]
return data
class cPDFParseDictionary:
def __init__(self, content, nocanonicalizedoutput):
self.content = content
self.nocanonicalizedoutput = nocanonicalizedoutput
dataTrimmed = TrimLWhiteSpace(TrimRWhiteSpace(self.content))
if dataTrimmed == []:
self.parsed = None
elif self.isOpenDictionary(dataTrimmed[0]) and (self.isCloseDictionary(dataTrimmed[-1]) or
self.couldBeCloseDictionary(dataTrimmed[-1])):
self.parsed = self.ParseDictionary(dataTrimmed)[0]
else:
self.parsed = None
def isOpenDictionary(self, token):
return token[0] == CHAR_DELIMITER and token[1] == '<<'
def isCloseDictionary(self, token):
return token[0] == CHAR_DELIMITER and token[1] == '>>'
def couldBeCloseDictionary(self, token):
return token[0] == CHAR_DELIMITER and token[1].rstrip().endswith('>>')
def ParseDictionary(self, tokens):
state = 0 # start
dictionary = []
while tokens != []:
if state == 0:
if self.isOpenDictionary(tokens[0]):
state = 1
else:
return None, tokens
elif state == 1:
if self.isOpenDictionary(tokens[0]):
pass
elif self.isCloseDictionary(tokens[0]):
return dictionary, tokens
elif tokens[0][0] != CHAR_WHITESPACE:
key = ConditionalCanonicalize(tokens[0][1], self.nocanonicalizedoutput)
value = []
state = 2
elif state == 2:
if self.isOpenDictionary(tokens[0]):
value, tokens = self.ParseDictionary(tokens)
dictionary.append((key, value))
state = 1
elif self.isCloseDictionary(tokens[0]):
dictionary.append((key, value))
return dictionary, tokens
elif value == [] and tokens[0][0] == CHAR_WHITESPACE:
pass
elif value == [] and tokens[0][1] == '[':
value.append(tokens[0][1])
elif value != [] and value[0] == '[' and tokens[0][1] != ']':
value.append(tokens[0][1])
elif value != [] and value[0] == '[' and tokens[0][1] == ']':
value.append(tokens[0][1])
dictionary.append((key, value))
value = []
state = 1
elif value == [] and tokens[0][1] == '(':
value.append(tokens[0][1])
elif value != [] and value[0] == '(' and tokens[0][1] != ')':
if tokens[0][1][0] == '%':
tokens = [tokens[0]] + cPDFTokenizer(StringIO(tokens[0][1][1:])).Tokens() + tokens[1:]
value.append('%')
else:
value.append(tokens[0][1])
elif value != [] and value[0] == '(' and tokens[0][1] == ')':
value.append(tokens[0][1])
dictionary.append((key, value))
value = []
state = 1
elif value != [] and tokens[0][1][0] == '/':
dictionary.append((key, value))
key = ConditionalCanonicalize(tokens[0][1], self.nocanonicalizedoutput)
value = []
state = 2
else:
value.append(ConditionalCanonicalize(tokens[0][1], self.nocanonicalizedoutput))
tokens = tokens[1:]
def Retrieve(self):
return self.parsed
def PrettyPrintSubElement(self, prefix, e):
res = ""
if e[1] == []:
res += '%s %s' % (prefix, e[0])
elif type(e[1][0]) == type(''):
if len(e[1]) == 3 and IsNumeric(e[1][0]) and e[1][1] == '0' and e[1][2] == 'R':
joiner = ' '
else:
joiner = ''
value = joiner.join(e[1]).strip()
reprValue = repr(value)
if "'" + value + "'" != reprValue:
value = reprValue
res += '%s %s %s' % (prefix, e[0], value)
else:
res += '%s %s' % (prefix, e[0])
sres = self.PrettyPrintSub(prefix + ' ', e[1])
res += sres
return res
def PrettyPrintSub(self, prefix, dictionary):
res = ""
if dictionary != None:
res = '<<++<<'
for e in dictionary:
sres = self.PrettyPrintSubElement(prefix, e)
res += sres
res += '>>++>>'
return res
def PrettyPrint(self, prefix):
res = self.PrettyPrintSub(prefix, self.parsed)
return res
def Get(self, select):
for key, value in self.parsed:
if key == select:
return value
return None
def GetNestedSub(self, dictionary, select):
for key, value in dictionary:
if key == select:
return self.PrettyPrintSubElement('', [select, value])
if type(value) == type([]) and len(value) > 0 and type(value[0]) == type((None,)):
result = self.GetNestedSub(value, select)
if result != None:
return self.PrettyPrintSubElement('', [select, result])
return None
def GetNested(self, select):
return self.GetNestedSub(self.parsed, select)
def FormatOutput(data, raw):
if raw:
if type(data) == type([]):
return ''.join(map(lambda x: x[1], data))
else:
return data
else:
return repr(data)
#Fix for http://bugs.python.org/issue11395
def StdoutWriteChunked(data):
if sys.version_info[0] > 2:
sys.stdout.buffer.write(data)
else:
while data != '':
sys.stdout.write(data[0:10000])
try:
sys.stdout.flush()
except IOError:
return
data = data[10000:]
def IfWIN32SetBinary(io):
if sys.platform == 'win32':
import msvcrt
msvcrt.setmode(io.fileno(), os.O_BINARY)
def PrintOutputObject(object, filt, nocanonicalizedoutput, dump, show_stream=False, hsh=False, raw=False):
errors = set()
res = ""
res += 'obj %d %d\n' % (object.id, object.version)
res += 'Type: %s\n' % ConditionalCanonicalize(object.GetType(), nocanonicalizedoutput)
res += 'Referencing: %s\n' % ', '.join(map(lambda x: '%s %s %s' % x, object.GetReferences()))
dataPrecedingStream = object.ContainsStream()
oPDFParseDictionary = None
if dataPrecedingStream:
res += 'Contains stream\n'
oPDFParseDictionary = cPDFParseDictionary(dataPrecedingStream, nocanonicalizedoutput)
if hsh:
streamContent = object.Stream(False)
res += 'unfiltered\n'
res += 'len: %6d md5: %s\n' % (len(streamContent), hashlib.md5(streamContent).hexdigest())
res += '%s\n' % HexAsciiDumpLine(streamContent)
streamContent = object.Stream(True)
res += 'filtered\n'
res += 'len: %6d md5: %s\n' % (len(streamContent), hashlib.md5(streamContent).hexdigest())
res += '%s\n' % HexAsciiDumpLine(streamContent)
streamContent = None
else:
if raw:
res += '%s\n' % FormatOutput(object.content, raw)
oPDFParseDictionary = cPDFParseDictionary(object.content, nocanonicalizedoutput)
if show_stream:
res += oPDFParseDictionary.PrettyPrint(' ')
if filt:
filtered = object.Stream()
if filtered == []:
res += ('%s\n' % FormatOutput(object.content, raw))
else:
res += ('%s\n' % FormatOutput(filtered, raw))
if dump:
filtered = object.Stream(filt == True)
if filtered == []:
filtered = ''
fdata = C2BIP3(filtered)
if fdata.startswith('Unsupported filter: '):
errors.add(fdata)
elif len(fdata) > 10:
try:
with open(dump, 'wb') as f:
f.write(fdata)
res += "Object extracted. See extracted files."
except:
errors.add('Error writing file %s' % dump)
return res, errors
def Canonicalize(sIn):
if sIn == '':
return sIn
elif sIn[0] != '/':
return sIn
elif sIn.find('#') == -1:
return sIn
else:
i = 0
iLen = len(sIn)
sCanonical = ''
while i < iLen:
if sIn[i] == '#' and i < iLen - 2:
try:
sCanonical += chr(int(sIn[i + 1:i + 3], 16))
i += 2
except:
sCanonical += sIn[i]
else:
sCanonical += sIn[i]
i += 1
return sCanonical
def EqualCanonical(s1, s2):
return Canonicalize(s1) == s2
def ConditionalCanonicalize(sIn, nocanonicalizedoutput):
if nocanonicalizedoutput:
return sIn
else:
return Canonicalize(sIn)
# http://code.google.com/p/pdfminerr/source/browse/trunk/pdfminer/pdfminer/ascii85.py
def ASCII85Decode(data):
import struct
n = b = 0
out = ''
for c in data:
if '!' <= c and c <= 'u':
n += 1
b = b * 85 + (ord(c) - 33)
if n == 5:
out += struct.pack('>L', b)
n = b = 0
elif c == 'z':
assert n == 0
out += '\0\0\0\0'
elif c == '~':
if n:
for _ in range(5 - n):
b = b * 85 + 84
out += struct.pack('>L', b)[:n - 1]
break
return out
def ASCIIHexDecode(data):
return binascii.unhexlify(''.join([c for c in data if c not in ' \t\n\r']).rstrip('>'))
# if inflating fails, we try to inflate byte per byte (sample 4da299d6e52bbb79c0ac00bad6a1d51d4d5fe42965a8d94e88a359e5277117e2)
def FlateDecode(data):
try:
return zlib.decompress(C2BIP3(data))
except:
if len(data) <= 10:
raise
oDecompress = zlib.decompressobj()
oStringIO = StringIO()
count = 0
for byte in C2BIP3(data):
try:
oStringIO.write(oDecompress.decompress(byte))
count += 1
except:
break
if len(data) - count <= 2:
return oStringIO.getvalue()
else:
raise
def RunLengthDecode(data):
f = StringIO(data)
decompressed = ''
runLength = ord(f.read(1))
while runLength:
if runLength < 128:
decompressed += f.read(runLength + 1)
if runLength > 128:
decompressed += f.read(1) * (257 - runLength)
if runLength == 128:
break
runLength = ord(f.read(1))
# return sub(r'(\d+)(\D)', lambda m: m.group(2) * int(m.group(1)), data)
return decompressed
#### LZW code sourced from pdfminer
# Copyright (c) 2004-2009 Yusuke Shinyama <yusuke at cs dot nyu dot edu>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
class LZWDecoder(object):
def __init__(self, fp):
self.fp = fp
self.buff = 0
self.bpos = 8
self.nbits = 9
self.table = None
self.prevbuf = None
return
def readbits(self, bits):
v = 0
while 1:
# the number of remaining bits we can get from the current buffer.
r = 8 - self.bpos
if bits <= r:
# |-----8-bits-----|
# |-bpos-|-bits-| |
# | |----r----|
v = (v << bits) | ((self.buff >> (r - bits)) & ((1 << bits) - 1))
self.bpos += bits
break
else:
# |-----8-bits-----|
# |-bpos-|---bits----...
# | |----r----|
v = (v << r) | (self.buff & ((1 << r) - 1))
bits -= r
x = self.fp.read(1)
if not x: raise EOFError
self.buff = ord(x)
self.bpos = 0
return v
def feed(self, code):
x = ''
if code == 256:
self.table = [chr(c) for c in range(256)] # 0-255
self.table.append(None) # 256
self.table.append(None) # 257
self.prevbuf = ''
self.nbits = 9
elif code == 257:
pass
elif not self.prevbuf:
x = self.prevbuf = self.table[code]
else:
if code < len(self.table):
x = self.table[code]
self.table.append(self.prevbuf + x[0])
else:
self.table.append(self.prevbuf + self.prevbuf[0])
x = self.table[code]
l = len(self.table)
if l == 511:
self.nbits = 10
elif l == 1023:
self.nbits = 11
elif l == 2047:
self.nbits = 12
self.prevbuf = x
return x
def run(self):
while 1:
try:
code = self.readbits(self.nbits)
except EOFError:
break
x = self.feed(code)
yield x
return
####
def LZWDecode(data):
return ''.join(LZWDecoder(StringIO(data)).run())
def PrintGenerateObject(object, options, newId=None):
if newId == None:
objectId = object.id
else:
objectId = newId
dataPrecedingStream = object.ContainsStream()
if dataPrecedingStream:
if options.filter:
decompressed = object.Stream(True)
if decompressed == 'No filters' or decompressed.startswith('Unsupported filter: '):
print(' oPDF.stream(%d, %d, %s, %s)' %
(objectId, object.version, repr(object.Stream(False).rstrip()),
repr(re.sub('/Length\s+\d+', '/Length %d', FormatOutput(dataPrecedingStream, True)).strip())))
else:
dictionary = FormatOutput(dataPrecedingStream, True)
dictionary = re.sub(r'/Length\s+\d+', '', dictionary)
dictionary = re.sub(r'/Filter\s*/[a-zA-Z0-9]+', '', dictionary)
dictionary = re.sub(r'/Filter\s*\[.+\]', '', dictionary)
dictionary = re.sub(r'^\s*<<', '', dictionary)
dictionary = re.sub(r'>>\s*$', '', dictionary)
dictionary = dictionary.strip()
print(" oPDF.stream2(%d, %d, %s, %s, 'f')" % (objectId, object.version, repr(decompressed.rstrip()),
repr(dictionary)))
else:
print(' oPDF.stream(%d, %d, %s, %s)' %
(objectId, object.version, repr(object.Stream(False).rstrip()),
repr(re.sub('/Length\s+\d+', '/Length %d', FormatOutput(dataPrecedingStream, True)).strip())))
else:
print(' oPDF.indirectobject(%d, %d, %s)' % (objectId, object.version,
repr(FormatOutput(object.content, True).strip())))
def File2Strings(filename):
try:
f = open(filename, 'r')
except:
return None
try:
return map(lambda line: line.rstrip('\n'), f.readlines())
except:
return None
finally:
f.close()
def ProcessAt(argument):
if argument.startswith('@'):
strings = File2Strings(argument[1:])
if strings == None:
raise Exception('Error reading %s' % argument)
else:
return strings
else:
return [argument]
def YARACompile(ruledata):
if ruledata.startswith('#'):
if ruledata.startswith('#h#'):
rule = binascii.a2b_hex(ruledata[3:])
elif ruledata.startswith('#b#'):
rule = binascii.a2b_base64(ruledata[3:])
elif ruledata.startswith('#s#'):
rule = 'rule string {strings: $a = "%s" ascii wide nocase condition: $a}' % ruledata[3:]
elif ruledata.startswith('#q#'):
rule = ruledata[3:].replace("'", '"')
else:
rule = ruledata[1:]
return yara.compile(source=rule)
else:
dFilepaths = {}
if os.path.isdir(ruledata):
for root, dirs, files in os.walk(ruledata):
for file in files:
filename = os.path.join(root, file)
dFilepaths[filename] = filename
else:
for filename in ProcessAt(ruledata):
dFilepaths[filename] = filename
return yara.compile(filepaths=dFilepaths)
def AddDecoder(cClass):
global decoders
decoders.append(cClass)
class cDecoderParent():
pass
def LoadDecoders(decoders, verbose):
if decoders == '':
return
scriptPath = os.path.dirname(sys.argv[0])
for decoder in sum(map(ProcessAt, decoders.split(',')), []):
try:
if not decoder.lower().endswith('.py'):
decoder += '.py'
if os.path.dirname(decoder) == '':
if not os.path.exists(decoder):
scriptDecoder = os.path.join(scriptPath, decoder)
if os.path.exists(scriptDecoder):
decoder = scriptDecoder
exec(open(decoder, 'r').read(), globals(), globals())
except Exception as e:
print('Error loading decoder: %s' % decoder)
if verbose:
raise e
class cIdentity(cDecoderParent):
name = 'Identity function decoder'
def __init__(self, stream, options):
self.stream = stream
self.options = options
self.available = True
def Available(self):
return self.available
def Decode(self):
self.available = False
return self.stream
def Name(self):
return ''
def DecodeFunction(decoders, options, stream):
if decoders == []:
return stream
return decoders[0](stream, options.decoderoptions).Decode()
class cDumpStream():
def __init__(self):
self.text = ''
def Addline(self, line):
if line != '':
self.text += line + '\n'
def Content(self):
return self.text
def HexDump(data):
oDumpStream = cDumpStream()
hexDump = ''
for i, b in enumerate(data):
if i % dumplinelength == 0 and hexDump != '':
oDumpStream.Addline(hexDump)
hexDump = ''
hexDump += IFF(hexDump == '', '', ' ') + '%02X' % ord(b)
oDumpStream.Addline(hexDump)
return oDumpStream.Content()
def CombineHexAscii(hexDump, asciiDump):
if hexDump == '':
return ''
return hexDump + ' ' + (' ' * (3 * (dumplinelength - len(asciiDump)))) + asciiDump
def HexAsciiDump(data):
oDumpStream = cDumpStream()
hexDump = ''
asciiDump = ''
for i, b in enumerate(data):
if i % dumplinelength == 0:
if hexDump != '':
oDumpStream.Addline(CombineHexAscii(hexDump, asciiDump))
hexDump = '%08X:' % i
asciiDump = ''
hexDump += ' %02X' % ord(b)
asciiDump += IFF(ord(b) >= 32, b, '.')
oDumpStream.Addline(CombineHexAscii(hexDump, asciiDump))
return oDumpStream.Content()
def HexAsciiDumpLine(data):
return HexAsciiDump(data[0:16])[10:-1]
def PDFParserMain(filename, outdirectory, **kwargs):
"""
Modified by CSE to fit ASSEMBLYLINE Service
"""
"""
pdf-parser, use it to parse a PDF document
"""
# Options
verbose = kwargs.get("verbose", False)
filt = kwargs.get("filter", False)
search = kwargs.get("search", None)
obj = kwargs.get("object", None)
typ = kwargs.get("type", None)
reference = kwargs.get("reference", None)
searchstream = kwargs.get("searchstream", None)
stats = kwargs.get("stats", False)
key = kwargs.get("key", None)
raw = kwargs.get("raw", False)
hsh = kwargs.get("hash", False)
dump = kwargs.get("dump", None)
get_object_detail = kwargs.get("get_object_detail", False)
get_malform = kwargs.get("get_malform", True)
max_objstm = kwargs.get("max_objstm", 100)
if dump:
dump = os.path.join(outdirectory, dump)
elements = kwargs.get("elements", None)
nocanonicalizedoutput = kwargs.get("nocanonicalizedoutput", False)
malform_content = os.path.join(outdirectory, "malformed_content")
max_search_hits = 50
search_hits = 0
try:
oPDFParser = cPDFParser(filename, verbose=verbose, extract=malform_content)
except Exception as e:
raise Exception(e)
cntComment = 0
cntXref = 0
cntTrailer = 0
cntStartXref = 0
cntIndirectObject = 0
dicObjectTypes = {}
selectComment = False
selectXref = False
selectTrailer = False
selectStartXref = False
selectIndirectObject = False
show_stream = False
if elements:
for c in elements:
if c == 'c':
selectComment = True
elif c == 'x':
selectXref = True
elif c == 't':
selectTrailer = True
elif c == 's':
selectStartXref = True
elif c == 'i':
selectIndirectObject = True
else:
print('Error: unknown --elements value %s' % c)
return
else:
selectIndirectObject = True
if not search and not obj and not reference and not typ and not searchstream and not key:
selectComment = True
selectXref = True
selectTrailer = True
selectStartXref = True
if search or key:
selectTrailer = True
show_stream = True
optionsType = ''
if typ:
optionsType = typ
results = {
'version': __version__,
'parts': [],
'stats': [],
'files': {
'embedded': [],
'malformed': [],
'triage_kw': []
},
'obj_details': ""
}
errors = set()
while True:
try:
object = oPDFParser.GetObject()
except Exception:
continue
if object != None:
if stats:
if object.type == PDF_ELEMENT_COMMENT:
cntComment += 1
elif object.type == PDF_ELEMENT_XREF:
cntXref += 1
elif object.type == PDF_ELEMENT_TRAILER:
cntTrailer += 1
elif object.type == PDF_ELEMENT_STARTXREF:
cntStartXref += 1
elif object.type == PDF_ELEMENT_INDIRECT_OBJECT:
cntIndirectObject += 1
type1 = object.GetType()
if not type1 in dicObjectTypes:
dicObjectTypes[type1] = [object.id]
else:
dicObjectTypes[type1].append(object.id)
else:
if object.type == PDF_ELEMENT_COMMENT and selectComment:
if not search and not key or search and object.Contains(search):
results['parts'].append('PDF Comment %s' % FormatOutput(object.comment, raw))
elif object.type == PDF_ELEMENT_XREF and selectXref:
results['parts'].append('xref %s' % FormatOutput(object.content, raw))
elif object.type == PDF_ELEMENT_TRAILER and selectTrailer:
oPDFParseDictionary = cPDFParseDictionary(object.content[1:], nocanonicalizedoutput)
if not search and not key or search and object.Contains(search):
if oPDFParseDictionary == None:
results['parts'].append('trailer: %s' % FormatOutput(object.content, raw))
else:
trailer = 'trailer:\n'
trailer += oPDFParseDictionary.PrettyPrint(' ')
results['parts'].append(trailer)
elif key:
if oPDFParseDictionary.parsed != None:
result = oPDFParseDictionary.GetNested(key)
if result != None:
results['parts'].append(result)
elif object.type == PDF_ELEMENT_STARTXREF and selectStartXref:
if not search:
results['parts'].append('startxref %d' % object.index)
elif object.type == PDF_ELEMENT_INDIRECT_OBJECT and selectIndirectObject:
if search:
if search_hits <= max_search_hits:
if object.Contains(search):
res, err = PrintOutputObject(
object,
filt,
nocanonicalizedoutput,
dump,
raw=raw,
hsh=hsh,
show_stream=show_stream)
if search in res:
results['parts'].append(res)
search_hits += 1
else:
# Try again, this time getting the raw output
res, err = PrintOutputObject(object, filt, nocanonicalizedoutput, dump, raw=True)
if search in res:
results['parts'].append(res)
search_hits += 1
else:
break
elif key:
oPDFParseDictionary = cPDFParseDictionary(object.content[1:], nocanonicalizedoutput)
if oPDFParseDictionary.parsed != None:
result = oPDFParseDictionary.GetNested(key)
if result != None:
results['parts'].append(result)
elif obj:
if object.id == eval(obj):
res, err = PrintOutputObject(
object, filt, nocanonicalizedoutput, dump, raw=raw, hsh=hsh, show_stream=show_stream)
results['parts'].append(res)
if get_object_detail:
obj_det = re.match(r'[\r]?\n<<.+>>[\r]?\n', FormatOutput(object.content, raw=True),
re.DOTALL)
if obj_det:
results['obj_details'] = obj_det.group(0)
if dump and "Object extracted." in res:
results['files']['embedded'].append(dump)
if len(err) > 0:
for e in err:
errors.add("Object extraction error: {}".format(e))
break
elif reference:
if object.References(reference):
res, err = PrintOutputObject(
object, filt, nocanonicalizedoutput, dump, raw=raw, hsh=hsh, show_stream=show_stream)
results['parts'].append(res)
elif typ:
if EqualCanonical(object.GetType(), optionsType):
if search_hits <= max_objstm:
res, err = PrintOutputObject(
object,
filt,
nocanonicalizedoutput,
dump,
raw=raw,
hsh=hsh,
show_stream=show_stream)
results['parts'].append(res)
search_hits += 1
else:
break
elif hsh:
results['parts'].append('obj %d %d' % (object.id, object.version))
rawContent = FormatOutput(object.content, True)
results['parts'].append(
' len: %d md5: %s' % (len(rawContent), hashlib.md5(rawContent).hexdigest()))
else:
res, err = PrintOutputObject(
object, filt, nocanonicalizedoutput, dump, raw=raw, hsh=hsh, show_stream=show_stream)
results['parts'].append(res)
elif object.type == PDF_ELEMENT_MALFORMED and get_malform:
if len(object.content) > 50:
try:
with open(malform_content, 'wb') as fExtract:
fExtract.write(C2BIP3(object.content))
results['files']['malformed'].append(malform_content)
except:
errors.add('Error writing file %s' % malform_content)
else:
break
if stats:
results['stats'].append('Comment: %s' % cntComment)
results['stats'].append('XREF: %s' % cntXref)
results['stats'].append('Trailer: %s' % cntTrailer)
results['stats'].append('StartXref: %s' % cntStartXref)
results['stats'].append('Indirect object: %s' % cntIndirectObject)
names = dicObjectTypes.keys()
names.sort()
for key in names:
results['stats'].append(
'%s %d: %s' % (key, len(dicObjectTypes[key]), ', '.join(map(lambda x: '%d' % x, dicObjectTypes[key]))))
return results, errors
def TestPythonVersion(enforceMaximumVersion=False, enforceMinimumVersion=False):
if sys.version_info[0:3] > __maximum_python_version__:
if enforceMaximumVersion:
print('This program does not work with this version of Python (%d.%d.%d)' % sys.version_info[0:3])
print('Please use Python version %d.%d.%d' % __maximum_python_version__)
sys.exit()
else:
print('This program has not been tested with this version of Python (%d.%d.%d)' % sys.version_info[0:3])
print('Should you encounter problems, please use Python version %d.%d.%d' % __maximum_python_version__)
if sys.version_info[0:3] < __minimum_python_version__:
if enforceMinimumVersion:
print('This program does not work with this version of Python (%d.%d.%d)' % sys.version_info[0:3])
print('Please use Python version %d.%d.%d' % __maximum_python_version__)
sys.exit()
else:
print('This program has not been tested with this version of Python (%d.%d.%d)' % sys.version_info[0:3])
print('Should you encounter problems, please use Python version %d.%d.%d' % __maximum_python_version__)
|
mit
| 7,926,709,286,726,347,000
| 36.976804
| 164
| 0.507397
| false
| 4.140499
| false
| false
| false
|
EmanueleCannizzaro/scons
|
test/MSVS/vs-9.0Exp-exec.py
|
1
|
3292
|
#!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/MSVS/vs-9.0Exp-exec.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Test that we can actually build a simple program using our generated
Visual Studio 9.0 project (.vcproj) and solution (.sln) files
using Visual C++ 9.0 Express edition.
"""
import os
import sys
import TestSConsMSVS
test = TestSConsMSVS.TestSConsMSVS()
if sys.platform != 'win32':
msg = "Skipping Visual Studio test on non-Windows platform '%s'\n" % sys.platform
test.skip_test(msg)
msvs_version = '9.0Exp'
if not msvs_version in test.msvs_versions():
msg = "Visual Studio %s not installed; skipping test.\n" % msvs_version
test.skip_test(msg)
# Let SCons figure out the Visual Studio environment variables for us and
# print out a statement that we can exec to suck them into our external
# environment so we can execute devenv and really try to build something.
test.run(arguments = '-n -q -Q -f -', stdin = """\
env = Environment(tools = ['msvc'], MSVS_VERSION='%(msvs_version)s')
print "os.environ.update(%%s)" %% repr(env['ENV'])
""" % locals())
exec(test.stdout())
test.subdir('sub dir')
test.write(['sub dir', 'SConstruct'], """\
env=Environment(MSVS_VERSION = '%(msvs_version)s')
env.MSVSProject(target = 'foo.vcproj',
srcs = ['foo.c'],
buildtarget = 'foo.exe',
variant = 'Release')
env.Program('foo.c')
""" % locals())
test.write(['sub dir', 'foo.c'], r"""
int
main(int argc, char *argv)
{
printf("foo.c\n");
exit (0);
}
""")
test.run(chdir='sub dir', arguments='.')
test.vcproj_sys_path(test.workpath('sub dir', 'foo.vcproj'))
import SCons.Platform.win32
system_dll_path = os.path.join( SCons.Platform.win32.get_system_root(), 'System32' )
os.environ['PATH'] = os.environ['PATH'] + os.pathsep + system_dll_path
test.run(chdir='sub dir',
program=[test.get_msvs_executable(msvs_version)],
arguments=['foo.sln', '/build', 'Release'])
test.run(program=test.workpath('sub dir', 'foo'), stdout="foo.c\n")
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
mit
| 5,027,620,518,771,086,000
| 29.201835
| 101
| 0.699271
| false
| 3.432742
| true
| false
| false
|
gstarnberger/paasta
|
paasta_tools/contrib/delete_old_marathon_deployments.py
|
1
|
3106
|
#!/usr/bin/env python
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import datetime
import logging
import dateutil.parser
from dateutil import tz
from pytimeparse import timeparse
from paasta_tools import marathon_tools
log = logging.getLogger(__name__)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--age', dest='age', type=timedelta_type, default='1h',
help="Max age of a Marathon deployment before it is stopped."
"Any pytimeparse unit is supported")
parser.add_argument('-n', '--dry-run', action="store_true",
help="Don't actually stop any Marathon deployments")
parser.add_argument('-v', '--verbose', action='store_true')
options = parser.parse_args()
return options
def timedelta_type(value):
"""Return the :class:`datetime.datetime.DateTime` for a time in the past.
:param value: a string containing a time format supported by :mod:`pytimeparse`
"""
if value is None:
return None
return datetime_seconds_ago(timeparse.timeparse(value))
def datetime_seconds_ago(seconds):
return now() - datetime.timedelta(seconds=seconds)
def now():
return datetime.datetime.now(tz.tzutc())
def delete_deployment_if_too_old(client, deployment, max_date, dry_run):
started_at = dateutil.parser.parse(deployment.version)
age = now() - started_at
if started_at < max_date:
if dry_run is True:
log.warning("Would delete %s for %s as it is %s old" % (deployment.id, deployment.affected_apps[0], age))
else:
log.warning("Deleting %s for %s as it is %s old" % (deployment.id, deployment.affected_apps[0], age))
client.delete_deployment(deployment_id=deployment.id, force=True)
else:
if dry_run is True:
log.warning("NOT deleting %s for %s as it is %s old" % (deployment.id, deployment.affected_apps[0], age))
def main():
args = parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.WARNING)
config = marathon_tools.load_marathon_config()
client = marathon_tools.get_marathon_client(config.get_url(), config.get_username(), config.get_password())
for deployment in client.list_deployments():
delete_deployment_if_too_old(
client=client,
deployment=deployment,
max_date=args.age,
dry_run=args.dry_run,
)
if __name__ == "__main__":
main()
|
apache-2.0
| 3,240,582,496,428,836,000
| 32.76087
| 117
| 0.667418
| false
| 3.834568
| true
| false
| false
|
dorneanu/appvulnms
|
src/core/parser/AppVulnXMLParser.py
|
1
|
14417
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: victor
# @Date: 2014-02-09
# @Last Modified by: victor
# @Last Modified time: 2014-06-06
# @Copyright:
#
# This file is part of the AppVulnMS project.
#
#
# Copyright (c) 2014 Victor Dorneanu <info AAET dornea DOT nu>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# The MIT License (MIT)
import base64
from lxml import etree
from core.parser.HTTPParser import HTTPParser
from core.parser.HTTPParser import HTTPRequestParser
from core.parser.HTTPParser import HTTPResponseParser
class AppVulnXMLParser():
"""AppVulnXML parser. Edits XML data"""
def __init__(self, xml_data):
# Create parser to parse the XML tree and insert new data into it
self.parser = etree.XMLParser(remove_blank_text=True, strip_cdata=False,
ns_clean=True, recover=True, encoding='utf-8')
self.xml_tree = etree.XML(str(xml_data), self.parser)
self.issues = self.xml_tree.xpath("/XmlReport/Results/Vulnerabilities/*")
self.issue_index = 0
def __len__(self):
"""Returns number of available issues
:returns: Number of available issues
"""
return len(self.issues)
def __iter__(self):
"""Iterator to walk through issues
:returns: Iterator to iterate through issues
"""
return self
def __next__(self):
"""Walk through issues"""
issue = self.issues[self.issue_index]
if (self.issue_index + 1) < len(self.issues):
self.issue_index += 1
else:
raise StopIteration
return issue
def get_root(self):
"""Get root of XML document
:returns: Root XML Element
"""
return self.xml_tree
def get_xml(self):
"""Returns XML tree as string
:returns: XML tree as string
"""
return etree.tostring(self.xml_tree, pretty_print=True, encoding="utf-8").decode("utf-8")
def get_scanner(self):
"""Returns /XmlReport/Scanner
:returns: /XmlReport/Scanner as XML document
"""
return self.xml_tree.xpath("/XmlReport/Scanner")
def get_summary(self):
"""Returns /XmlReport/Summary
:returns: /XmlReport/Summary as XML document
"""
return self.xml_tree.xpath("/XmlReport/Summary")
def get_vulnerabilities(self):
"""Return /XmlReport/Results/Vulnerabilities
:returns: /XmlReport/Results/Vulnerabilities as XML document
"""
return self.xml_tree.xpath("/XmlReport/Results/Vulnerabilities/*")
def add_request_data(self, issue, request_data):
"""Add parsed request data to the node
:param issue: Issue as XML document
:param request_data: HTTP request data
"""
request = HTTPRequestParser(request_data)
request.parse_data()
request.set_http_headers()
headers = request.get_headers()
# Add request attributes method like method
try:
xml_request_node = issue.xpath("TestProbe/HTTP/Request")[0]
xml_request_node.attrib['method'] = request.get_method()
xml_request_node.attrib['version'] = request.get_request_version()
except IndexError:
log.error("Index error")
# Add parsed data
try:
xml_parsed_traffic = issue.xpath("TestProbe/HTTP/Request/Parsed")[0]
except IndexError:
Log.error("Index error")
# Iterate through headers and create new XML nodes
for h in headers.keys():
for v in headers[h]:
# Create new sub-element
header_node = etree.Element('Header', name=h, value=v)
xml_parsed_traffic.append(header_node)
# Add request data node
request_data_node = etree.Element('Data')
request_data_node.text = etree.CDATA(request.get_request_data())
xml_parsed_traffic.append(request_data_node)
def add_response_data(self, issue, response_data, binary_data=False):
"""Add parsed response data to the node
:param issue: Issue as XML document
:param response_data: HTTP response data
:param binary_data: Flag indicating whether responde_data is binary
"""
response = HTTPResponseParser(response_data, binary_data)
response.parse_data()
response.set_http_headers()
headers = response.get_headers()
# Add response metadata
try:
xml_response_node = issue.xpath("TestProbe/HTTP/Response")[0]
xml_response_node.attrib['version'] = response.get_response_version()
xml_response_node.attrib['status'] = response.get_status()
xml_response_node.attrib['reason'] = response.get_reason()
except IndexError:
log.error("Index error")
# Add response data
try:
xml_parsed_traffic = issue.xpath("TestProbe/HTTP/Response/Parsed")[0]
except IndexError:
Log.error("Index error")
# Iterate through headers and create new XML nodes
for h in headers.keys():
for v in headers[h]:
# Create new sub-element
header_node = etree.Element('Header', name=h, value=v)
xml_parsed_traffic.append(header_node)
# Add request data node
request_data_node = etree.Element('Data')
request_data_node.text = etree.CDATA(response.get_response_data())
request_data_node.attrib['base64'] = str(binary_data)
xml_parsed_traffic.append(request_data_node)
def extract_traffic(self, issue, binary_data=False):
"""Extract HTTP traffic from RawTraffic/MergedTraffic and adjust XML in single issue
:param issue: Issue as XML document
:param binary_data: Flag indicating whether traffic is binary
"""
raw_traffic = issue.xpath("RawTraffic")[0]
raw_request_traffic = issue.xpath("RawTraffic/RequestTraffic")
raw_response_traffic = issue.xpath("RawTraffic/ResponseTraffic")
raw_merged_traffic = issue.xpath("RawTraffic/MergedTraffic")
# New nodes
request_node = etree.Element("RequestTraffic")
response_node = etree.Element("ResponseTraffic")
request_node.text = ''
response_node.text = ''
# Add base64 flag to traffic
request_node.attrib['base64'] = 'false'
response_node.attrib['base64'] = 'false'
# Check if merged traffic is provided
if len(raw_merged_traffic) > 0:
# Split traffic
http_data = HTTPParser.split_http_traffic(raw_merged_traffic[0].text)
# Adjust XML data
if http_data:
request_node.text = etree.CDATA(http_data['request'])
raw_traffic.append(request_node)
response_node.text = etree.CDATA(http_data['response'])
raw_traffic.append(response_node)
# Remove MergedTraffic node
raw_merged_traffic[0].getparent().remove(raw_merged_traffic[0])
# Check if request traffic already provided
# TODO: Do the same for request traffic?
if len(raw_request_traffic) > 0:
if len(raw_request_traffic[0].text) > 0:
base64_flag = False
if 'base64' in raw_request_traffic[0].attrib:
if raw_request_traffic[0].attrib['base64'] == 'true':
base64_flag = True
# Check if base64
if base64_flag:
# Replace binary data by plaintext data
decoded_request_data = base64.b64decode(raw_request_traffic[0].text).decode("utf-8")
raw_request_traffic[0].getparent().remove(raw_request_traffic[0])
new_request_traffic = etree.Element("RequestTraffic")
new_request_traffic.text = etree.CDATA(decoded_request_data)
new_request_traffic.attrib['base64'] = "false"
# Append new node
raw_traffic.append(new_request_traffic)
else:
# Add new nodes
raw_traffic.append(request_node)
raw_traffic.append(response_node)
def add_data(self, binary_data=False):
"""Adds request data (e.g. headers) to the XML tree
:param binary_data: Flag indicating whether data is binary
"""
for issue in self.issues:
# Extract traffic
self.extract_traffic(issue, binary_data)
# Extract request and response
raw_request_traffic = issue.xpath("RawTraffic/RequestTraffic")[0]
raw_response_traffic = issue.xpath("RawTraffic/ResponseTraffic")[0]
# Add request data
if raw_request_traffic.text:
base64_flag = False
if 'base64' in raw_request_traffic.attrib:
if raw_request_traffic.attrib['base64'] == 'true':
base64_flag = True
# Check if base64
if base64_flag:
decoded_request_traffic = base64.b64decode(raw_request_traffic.text)
self.add_request_data(issue, decoded_request_traffic.decode(encoding="utf-8", errors="ignore"))
else:
self.add_request_data(issue, raw_request_traffic.text)
# Add response data
if raw_response_traffic.text:
base64_flag = False
if 'base64' in raw_response_traffic.attrib:
if raw_response_traffic.attrib['base64'] == 'true':
base64_flag = True
# Check if base64
if base64_flag:
decoded_response_traffic = base64.b64decode(raw_response_traffic.text)
self.add_response_data(
issue, decoded_response_traffic.decode(encoding="utf-8", errors="ignore"), True)
else:
self.add_response_data(issue, raw_response_traffic.text)
def get_payload(self, issue):
"""Gets issue payload information, e.g. parameter/cookie and value
:param issue: Issue as XML document
:returns: XML data containing PoC information
"""
raw_query = issue.xpath("TestProbe/Request/Query")
if len(raw_query) > 0:
return raw_query
else:
return None
def convert_base64_to_plain(self):
"""Converts Base64 traffic to plaintext
For all issue the traffic will be converted to base64.
"""
for issue in self.issues:
raw_traffic = issue.xpath("RawTraffic")
request_traffic = issue.xpath("RawData/RawRequest")
response_traffic = issue.xpath("RawData/RawResponse")
# Decode request traffic
if len(request_traffic) > 0:
base64_traffic = request_traffic[0].text
traffic = base64.b64decode(base64_traffic)
request_traffic[0].text = etree.CDATA(traffic.decode('utf-8'))
# Decode response traffic
if len(response_traffic) > 0:
base64_traffic = response_traffic[0].text
traffic = base64.b64decode(base64_traffic)
# FIXME: Do this better
if len(traffic) < 10000:
response = str(traffic)
else:
response = base64_traffic
# print(response)
response_traffic[0].text = etree.CDATA(response)
# Merge traffic data
raw_traffic[0].text = ''.join([request_traffic[0].text, str(response_traffic[0].text)])
# Remove RawData
raw_data = issue.xpath("RawData")
issue.remove(raw_data[0])
def string(self):
"""Returns string respresentation of XML tree
:returns: Returns string respresentation of XML tree
"""
return etree.tostring(self.xml_tree,
pretty_print=True,
xml_declaration=False
).decode(encoding="utf-8")
def __str__(self):
return self.string()
|
mit
| -7,039,225,817,628,492,000
| 39.383754
| 119
| 0.553791
| false
| 4.616394
| false
| false
| false
|
tricoder42/python-ariadne
|
docs/conf.py
|
1
|
8846
|
# coding: utf-8
from __future__ import unicode_literals
import os
import sys
import sphinx_rtd_theme
import ariadne
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Python Ariadne'
copyright = '2015, Tomáš Ehrlich'
author = 'Tomáš Ehrlich'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '.'.join(ariadne.__version__.split('.')[:2])
# The full version, including alpha/beta/rc tags.
release = ariadne.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'PythonAriadnedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'PythonAriadne.tex', 'Python Ariadne Documentation',
'Tomáš Ehrlich', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pythonariadne', 'Python Ariadne Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'PythonAriadne', u'Python Ariadne Documentation',
author, 'PythonAriadne', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
mit
| 815,831,060,555,983,000
| 31.028986
| 79
| 0.704864
| false
| 3.654403
| false
| false
| false
|
GennadiyZakharov/locotrack
|
src/ltgui/preprocessorwidget.py
|
1
|
5007
|
'''
Created on 29 jan. 2015
@author: Gena
'''
from PyQt4 import QtCore, QtGui
class PreprocessorWidget(QtGui.QWidget):
'''
classdocs
'''
def __init__(self, preprocessor, parent=None):
'''
Constructor
'''
super(PreprocessorWidget, self).__init__(parent)
self.preprocessor = preprocessor
layout = QtGui.QGridLayout()
#
self.negativeChechBox = QtGui.QCheckBox()
negativeLabel = QtGui.QLabel("Negative image")
layout.addWidget(negativeLabel,0,0)
layout.addWidget(self.negativeChechBox,0,1)
self.negativeChechBox.stateChanged.connect(self.setInvertImage)
#
self.removeBarrelChechBox = QtGui.QCheckBox()
removeBarrelLabel = QtGui.QLabel("Remove barrel distortion")
layout.addWidget(removeBarrelLabel)
layout.addWidget(self.removeBarrelChechBox)
self.removeBarrelChechBox.stateChanged.connect(self.setRemoveBarrel)
#
self.removeBarrelSpinbox = QtGui.QDoubleSpinBox()
removeBarrelValLabel = QtGui.QLabel('Distortion coefficient')
self.removeBarrelSpinbox.setRange(-10,10)
self.removeBarrelSpinbox.setSingleStep(0.2)
self.removeBarrelSpinbox.setSuffix('E-5')
layout.addWidget(removeBarrelValLabel)
layout.addWidget(self.removeBarrelSpinbox)
self.removeBarrelSpinbox.valueChanged.connect(self.preprocessor.setRemoveBarrelCoef)
self.removeBarrelFocal = QtGui.QDoubleSpinBox()
removeBarrelFocalLabel = QtGui.QLabel('Focal length')
self.removeBarrelFocal.setRange(2,50)
self.removeBarrelFocal.setSingleStep(0.2)
layout.addWidget(removeBarrelFocalLabel)
layout.addWidget(self.removeBarrelFocal)
self.removeBarrelFocal.valueChanged.connect(self.preprocessor.setRemoveBarrelFocal)
self.centerXSpinBox = QtGui.QSpinBox()
centerXLabel = QtGui.QLabel('Camera position, X')
self.centerXSpinBox.setMaximum(1280)
self.centerXSpinBox.setSingleStep(10)
layout.addWidget(centerXLabel)
layout.addWidget(self.centerXSpinBox)
self.centerXSpinBox.valueChanged.connect(self.preprocessor.setCenterX)
self.centerYSpinBox = QtGui.QSpinBox()
centerYLabel = QtGui.QLabel('Camera position, Y')
self.centerYSpinBox.setMaximum(1024)
self.centerYSpinBox.setSingleStep(10)
layout.addWidget(centerYLabel)
layout.addWidget(self.centerYSpinBox)
self.centerYSpinBox.valueChanged.connect(self.preprocessor.setCenterY)
accumulateBackgroundLabel = QtGui.QLabel('Background frames')
layout.addWidget(accumulateBackgroundLabel)
self.accumulateBackgroundSpinBox = QtGui.QSpinBox()
self.accumulateBackgroundSpinBox.setMaximum(1000)
self.accumulateBackgroundSpinBox.setMinimum(50)
layout.addWidget(self.accumulateBackgroundSpinBox)
self.accumulateBackgroundSpinBox.valueChanged.connect(self.preprocessor.setBackgroundFrames)
self.accumulateBackgroundButton = QtGui.QPushButton('Accumulate background')
layout.addWidget(self.accumulateBackgroundButton)
self.accumulateBackgroundButton.clicked.connect(preprocessor.collectBackground)
self.calibrateImageButton = QtGui.QPushButton('Calibrate from image')
layout.addWidget(self.calibrateImageButton)
self.calibrateImageButton.clicked.connect(self.calibrateImage)
# Layout
self.setLayout(layout)
self.loadState()
@QtCore.pyqtSlot(int)
def setInvertImage(self, state):
self.preprocessor.setInvertImage(state == QtCore.Qt.Checked)
@QtCore.pyqtSlot(int)
def setRemoveBarrel(self, state):
value = (state == QtCore.Qt.Checked)
self.removeBarrelFocal.setEnabled(value)
self.removeBarrelSpinbox.setEnabled(value)
self.preprocessor.setRemoveBarrel(value)
def loadState(self):
self.negativeChechBox.setChecked(self.preprocessor.invertImage)
self.removeBarrelChechBox.setChecked(self.preprocessor.removeBarrel)
self.removeBarrelSpinbox.setValue(self.preprocessor.removeBarrelCoef)
self.removeBarrelFocal.setValue(self.preprocessor.removeBarrelFocal)
self.centerXSpinBox.setValue(self.preprocessor.centerX)
self.centerYSpinBox.setValue(self.preprocessor.centerY)
self.accumulateBackgroundSpinBox.setValue(self.preprocessor.nBackgroundFrames)
def calibrateImage(self):
calibrationImageName = QtGui.QFileDialog.getOpenFileName(self,
"Choose calibration image file",
'.',
"Image file ({})".format("*.*"))
if not calibrationImageName.isEmpty():
self.preprocessor.calibrateFromImage(calibrationImageName)
|
lgpl-3.0
| -634,777,850,189,652,000
| 40.38843
| 100
| 0.688236
| false
| 4.1725
| false
| false
| false
|
kollad/turbo-ninja
|
utils/mathutils.py
|
1
|
5600
|
from bisect import insort_left
from collections import MutableMapping, OrderedDict
import random
import struct
import hashlib
from threading import Lock
import os
from engine.utils.timeutils import milliseconds
_inc_lock = Lock()
_inc = 0
_pid = int(os.getpid()) % 0xffff
def random_id(length=18):
"""Generate id, based on timestamp, assumed to be unique for this process.
"""
global _inc
ts = milliseconds()
with _inc_lock:
source = '{}{}{}'.format(ts, _pid, _inc)
_inc += 1
return hash_string(source, length)
def unique_id():
"""Generate random id, based on timestamp, assumed to be unique for this process.
Note, that strings, generated by this function will be sorted, i.e. each next string will be greater than previous.
Do not use this function for very quick generation of pack of ids cause of possible collisions.
"""
global _inc
ts = milliseconds()
s = ts / 1000
ds = ts / 100 - s * 10
with _inc_lock:
source = '{}{}{}{}'.format(
struct.pack('>I', s),
struct.pack('>B', ds),
struct.pack('>H', _pid),
struct.pack('>H', _inc % 0xffff)
)
_inc += 1
return source.encode('hex')
def hash_string(source, length=18):
"""Generate truncated to length hexdigest for provided source string.
:param source: string to computes hash from.
:type source: basestring
:param length: truncate hash to the specified length.
:type length: int
:rtype: str
"""
return hashlib.sha256(source.encode('utf-8')).hexdigest()[0:length]
class Median(object):
def __init__(self, *args):
self.values = sorted(args)
def __add__(self, other):
insort_left(self.values, float(other))
return self
def clear(self):
self.values = []
@property
def min(self):
try:
return self.values[0]
except IndexError:
return 0
@property
def max(self):
try:
return self.values[-1]
except IndexError:
return 0
@property
def len(self):
return len(self.values)
@property
def avg(self):
return self.sum / max(self.len, 1)
@property
def med(self):
index = int(self.len / 2)
try:
return self.values[index]
except IndexError:
return 0
@property
def sum(self):
return sum(self.values)
def __repr__(self):
return '<Median: (min: {:.1f}, max: {:.1f}, med: {:.1f}, avg: {:.2f})>'.format(
self.min, self.max, self.med, self.avg)
def __str__(self):
return self.__repr__()
class WeightedItem(object):
__slots__ = 'name', 'weight', 'toughness', 'hunger'
def __init__(self, name, weight=1):
self.name = name
self.weight = weight
self.toughness = 0
self.hunger = 0
def __repr__(self):
return '(weight: {}, toughness: {}, hunger: {})'.format(self.weight, self.toughness, self.hunger)
def __str__(self):
return self.__repr__()
class Weights(MutableMapping):
def __init__(self, **kwargs):
self._items = {}
self._updated = True
self._total = 0
self._len = 0
self._first = None
self._last = None
self.update(kwargs)
def __getitem__(self, item):
return self._items[item].weight
def __setitem__(self, key, value):
if value >= 0:
try:
self._items[key].weight = value
except KeyError:
self._items[key] = WeightedItem(key, value)
else:
raise ValueError('Value should be positive or zero.')
self._updated = True
def __delitem__(self, key):
del self._items[key]
self._updated = True
def __len__(self):
return len(self._items)
def __contains__(self, item):
return self._items.__contains__(item)
def keys(self):
return list(self._items.keys())
def __iter__(self):
return iter(list(self.keys()))
def _refresh_heights(self):
l = self._len = len(self._items)
if not l:
raise IndexError('Cannot choose from nothing.')
items = sorted(list(self._items.values()), key=lambda item: item.weight)
t = 0
for item in items:
t += item.weight
item.toughness = t
total = self._total = t
t = 0
c = l - 1
for item in items:
t += float(total - item.weight) / c
item.hunger = t
self._items = OrderedDict()
for item in items:
self._items[item.name] = item
self._first = items[0]
def roll(self):
return random.random() * self._total
def choice(self, thin=False):
if self._updated:
self._refresh_heights()
self._updated = False
if self._len < 2:
if self._first:
return self._first.name
else:
raise IndexError('Nothing to choose')
r = self.roll()
if not thin:
for item in self._items.values():
if r < item.toughness:
return item.name
else:
for item in self._items.values():
if r < item.hunger:
return item.name
raise IndexError('Nothing to choose')
def __repr__(self):
return '<Weights: {}>'.format(self._items)
def __str__(self):
return self.__repr__()
|
mit
| -161,171,418,633,999,780
| 24.339367
| 119
| 0.5425
| false
| 3.977273
| false
| false
| false
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_10_01/models/virtual_network_gateway_connection.py
|
1
|
7595
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class VirtualNetworkGatewayConnection(Resource):
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param authorization_key: The authorizationKey.
:type authorization_key: str
:param virtual_network_gateway1: Required. The reference to virtual
network gateway resource.
:type virtual_network_gateway1:
~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGateway
:param virtual_network_gateway2: The reference to virtual network gateway
resource.
:type virtual_network_gateway2:
~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGateway
:param local_network_gateway2: The reference to local network gateway
resource.
:type local_network_gateway2:
~azure.mgmt.network.v2017_10_01.models.LocalNetworkGateway
:param connection_type: Required. Gateway connection type. Possible values
are: 'Ipsec','Vnet2Vnet','ExpressRoute', and 'VPNClient. Possible values
include: 'IPsec', 'Vnet2Vnet', 'ExpressRoute', 'VPNClient'
:type connection_type: str or
~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGatewayConnectionType
:param routing_weight: The routing weight.
:type routing_weight: int
:param shared_key: The IPSec shared key.
:type shared_key: str
:ivar connection_status: Virtual network Gateway connection status.
Possible values are 'Unknown', 'Connecting', 'Connected' and
'NotConnected'. Possible values include: 'Unknown', 'Connecting',
'Connected', 'NotConnected'
:vartype connection_status: str or
~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGatewayConnectionStatus
:ivar tunnel_connection_status: Collection of all tunnels' connection
health status.
:vartype tunnel_connection_status:
list[~azure.mgmt.network.v2017_10_01.models.TunnelConnectionHealth]
:ivar egress_bytes_transferred: The egress bytes transferred in this
connection.
:vartype egress_bytes_transferred: long
:ivar ingress_bytes_transferred: The ingress bytes transferred in this
connection.
:vartype ingress_bytes_transferred: long
:param peer: The reference to peerings resource.
:type peer: ~azure.mgmt.network.v2017_10_01.models.SubResource
:param enable_bgp: EnableBgp flag
:type enable_bgp: bool
:param use_policy_based_traffic_selectors: Enable policy-based traffic
selectors.
:type use_policy_based_traffic_selectors: bool
:param ipsec_policies: The IPSec Policies to be considered by this
connection.
:type ipsec_policies:
list[~azure.mgmt.network.v2017_10_01.models.IpsecPolicy]
:param resource_guid: The resource GUID property of the
VirtualNetworkGatewayConnection resource.
:type resource_guid: str
:ivar provisioning_state: The provisioning state of the
VirtualNetworkGatewayConnection resource. Possible values are: 'Updating',
'Deleting', and 'Failed'.
:vartype provisioning_state: str
:param etag: Gets a unique read-only string that changes whenever the
resource is updated.
:type etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_network_gateway1': {'required': True},
'connection_type': {'required': True},
'connection_status': {'readonly': True},
'tunnel_connection_status': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'virtual_network_gateway1': {'key': 'properties.virtualNetworkGateway1', 'type': 'VirtualNetworkGateway'},
'virtual_network_gateway2': {'key': 'properties.virtualNetworkGateway2', 'type': 'VirtualNetworkGateway'},
'local_network_gateway2': {'key': 'properties.localNetworkGateway2', 'type': 'LocalNetworkGateway'},
'connection_type': {'key': 'properties.connectionType', 'type': 'str'},
'routing_weight': {'key': 'properties.routingWeight', 'type': 'int'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'connection_status': {'key': 'properties.connectionStatus', 'type': 'str'},
'tunnel_connection_status': {'key': 'properties.tunnelConnectionStatus', 'type': '[TunnelConnectionHealth]'},
'egress_bytes_transferred': {'key': 'properties.egressBytesTransferred', 'type': 'long'},
'ingress_bytes_transferred': {'key': 'properties.ingressBytesTransferred', 'type': 'long'},
'peer': {'key': 'properties.peer', 'type': 'SubResource'},
'enable_bgp': {'key': 'properties.enableBgp', 'type': 'bool'},
'use_policy_based_traffic_selectors': {'key': 'properties.usePolicyBasedTrafficSelectors', 'type': 'bool'},
'ipsec_policies': {'key': 'properties.ipsecPolicies', 'type': '[IpsecPolicy]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, **kwargs):
super(VirtualNetworkGatewayConnection, self).__init__(**kwargs)
self.authorization_key = kwargs.get('authorization_key', None)
self.virtual_network_gateway1 = kwargs.get('virtual_network_gateway1', None)
self.virtual_network_gateway2 = kwargs.get('virtual_network_gateway2', None)
self.local_network_gateway2 = kwargs.get('local_network_gateway2', None)
self.connection_type = kwargs.get('connection_type', None)
self.routing_weight = kwargs.get('routing_weight', None)
self.shared_key = kwargs.get('shared_key', None)
self.connection_status = None
self.tunnel_connection_status = None
self.egress_bytes_transferred = None
self.ingress_bytes_transferred = None
self.peer = kwargs.get('peer', None)
self.enable_bgp = kwargs.get('enable_bgp', None)
self.use_policy_based_traffic_selectors = kwargs.get('use_policy_based_traffic_selectors', None)
self.ipsec_policies = kwargs.get('ipsec_policies', None)
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = None
self.etag = kwargs.get('etag', None)
|
mit
| 4,056,119,647,110,844,400
| 48.967105
| 117
| 0.663858
| false
| 4.024907
| false
| false
| false
|
epuzanov/ZenPacks.community.CIMMon
|
ZenPacks/community/CIMMon/interfaces.py
|
1
|
9776
|
################################################################################
#
# This program is part of the CIMMon Zenpack for Zenoss.
# Copyright (C) 2012 Egor Puzanov.
#
# This program can be used under the GNU General Public License version 2
# You can find full information here: http://www.zenoss.com/oss
#
################################################################################
__doc__="""interfaces
describes the form field to the user interface.
$Id: interfaces.py,v 1.6 2012/10/14 17:36:23 egor Exp $"""
__version__ = "$Revision: 1.6 $"[11:-2]
from Products.Zuul.interfaces import IComponentInfo,\
IIpInterfaceInfo,\
IExpansionCardInfo
from Products.Zuul.form import schema
from Products.Zuul.utils import ZuulMessageFactory as _t
class IPhysicalMemoryInfo(IComponentInfo):
"""
Info adapter for Physical Memory Module components.
"""
manufacturer = schema.Entity(title=u"Manufacturer", readonly=True,
group='Details')
product = schema.Entity(title=u"Model", readonly=True, group='Details')
slot = schema.Int(title=u"Slot", readonly=False,group='Details')
size = schema.Text(title=u"Size", readonly=True, group='Details')
cimClassName = schema.Text(title=u"CIM Class Name", readonly=True,
group='Details')
cimStatClassName = schema.Text(title=u"CIM Statistics Class Name",
readonly=True, group='Details')
class IDiskDriveInfo(IComponentInfo):
"""
Info adapter for Disk Drive components.
"""
manufacturer = schema.Entity(title=u"Manufacturer", readonly=True,
group='Details')
product = schema.Entity(title=u"Model", readonly=True, group='Details')
serialNumber = schema.Text(title=u"Serial #", readonly=True,group='Details')
FWRev = schema.Text(title=u"Firmware", readonly=True, group='Details')
size = schema.Text(title=u"Size", readonly=True, group='Details')
diskType = schema.Text(title=u"Type", readonly=True, group='Details')
chassis = schema.Entity(title=u"Chassis", readonly=True,group='Details')
storagePool = schema.Entity(title=u"Disk Group", readonly=True,
group='Details')
bay = schema.Int(title=u"Bay", readonly=False, group='Details')
cimClassName = schema.Text(title=u"CIM Class Name", readonly=True,
group='Details')
cimStatClassName = schema.Text(title=u"CIM Statistics Class Name",
readonly=True, group='Details')
class IChassisInfo(IComponentInfo):
"""
Info adapter for Chassis components.
"""
manufacturer = schema.Entity(title=u"Manufacturer", readonly=True,
group='Details')
product = schema.Entity(title=u"Model", readonly=True, group='Details')
serialNumber = schema.Text(title=u"Serial #", readonly=True,group='Details')
layout = schema.Text(title=u"Layout String", readonly=False,group='Details')
cimClassName = schema.Text(title=u"CIM Class Name", readonly=True,
group='Details')
cimStatClassName = schema.Text(title=u"CIM Statistics Class Name",
readonly=True, group='Details')
class IStoragePoolInfo(IComponentInfo):
"""
Info adapter for Storage Pool components.
"""
usage = schema.Text(title=u"Usage", readonly=True, group="Details")
totalDisks = schema.Int(title=u"Total Disk", readonly=True, group="Details")
totalBytesString = schema.Text(title=u"Total Bytes", readonly=True,
group="Details")
usedBytesString = schema.Text(title=u"Used Bytes", readonly=True,
group="Details")
availBytesString = schema.Text(title=u"Available Bytes", readonly=True,
group="Details")
capacity = schema.Text(title=u"Utilization", readonly=True, group="Details")
cimClassName = schema.Text(title=u"CIM Class Name", readonly=True,
group='Details')
cimStatClassName = schema.Text(title=u"CIM Statistics Class Name",
readonly=True, group='Details')
class IStorageVolumeInfo(IComponentInfo):
"""
Info adapter for Storage Volume components.
"""
storagePool = schema.Entity(title=u"Disk Group", readonly=True,
group='Details')
accessType = schema.Text(title=u"Access Type", readonly=True,
group='Details')
diskType = schema.Text(title=u"Disk Type", readonly=True, group='Details')
totalBytesString = schema.Text(title=u"Total Bytes", readonly=True,
group="Details")
cimClassName = schema.Text(title=u"CIM Class Name", readonly=True,
group='Details')
cimStatClassName = schema.Text(title=u"CIM Statistics Class Name",
readonly=True, group='Details')
class IPowerSupplyInfo(IComponentInfo):
"""
Info adapter for PowerSupply components.
"""
watts = schema.Int(title=u'Watts', group='Overview', readonly=True)
type = schema.Text(title=u'Type', group='Overview', readonly=True)
millivolts = schema.Int(title=u'Millivolts', group='Overview',readonly=True)
cimClassName = schema.Text(title=u"CIM Class Name", readonly=True,
group='Details')
cimStatClassName = schema.Text(title=u"CIM Statistics Class Name",
readonly=True, group='Details')
class ITemperatureSensorInfo(IComponentInfo):
"""
Info adapter for TemperatureSensor components.
"""
temperature = schema.Int(title=u'Temperature (Fahrenheit)',group='Overview',
readonly=True)
cimClassName = schema.Text(title=u"CIM Class Name", readonly=True,
group='Details')
cimStatClassName = schema.Text(title=u"CIM Statistics Class Name",
readonly=True, group='Details')
class IFanInfo(IComponentInfo):
"""
Info adapter for Fan components.
"""
type = schema.Text(title=u'Type', group='Overview', readonly=True)
rpm = schema.Text(title=u'RPM', group='Overview', readonly=True)
cimClassName = schema.Text(title=u"CIM Class Name", readonly=True,
group='Details')
cimStatClassName = schema.Text(title=u"CIM Statistics Class Name",
readonly=True, group='Details')
class IComputerSystemInfo(IExpansionCardInfo):
"""
Info adapter for Controller components.
"""
FWRev = schema.Text(title=u"Firmware", readonly=True, group='Details')
uptime = schema.Text(title=u"Uptime", readonly=True, group='Details')
cimClassName = schema.Text(title=u"CIM Class Name", readonly=True,
group='Details')
cimStatClassName = schema.Text(title=u"CIM Statistics Class Name",
readonly=True, group='Details')
class INetworkPortInfo(IIpInterfaceInfo):
"""
Info adapter for Controller components.
"""
controller =schema.Entity(title=u"Controller",readonly=True,group='Details')
cimClassName = schema.Text(title=u"CIM Class Name", readonly=True,
group='Details')
cimStatClassName = schema.Text(title=u"CIM Statistics Class Name",
readonly=True, group='Details')
class IRedundancySetInfo(IComponentInfo):
"""
Info adapter for RedundancySet components.
"""
typeOfSet = schema.Text(title=u"Type", readonly=True, group='Details')
loadBalanceAlgorithm = schema.Text(title=u"Load Balance Algorithm",
readonly=True, group='Details')
minNumberNeeded = schema.Int(title=u"Min Number Needed", readonly=True,
group='Details')
membersCount = schema.Int(title=u"Members Count", readonly=True,
group='Details')
cimClassName = schema.Text(title=u"CIM Class Name", readonly=True,
group='Details')
cimStatClassName = schema.Text(title=u"CIM Statistics Class Name",
readonly=True, group='Details')
class IReplicationGroupInfo(IComponentInfo):
"""
Info adapter for ReplicationGroup components.
"""
cimClassName = schema.Text(title=u"CIM Class Name", readonly=True,
group='Details')
cimStatClassName = schema.Text(title=u"CIM Statistics Class Name",
readonly=True, group='Details')
|
gpl-2.0
| -7,139,634,888,139,762,000
| 51.55914
| 80
| 0.54511
| false
| 4.641975
| false
| false
| false
|
shaz13/oppia
|
core/controllers/cron.py
|
1
|
7669
|
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the cron jobs."""
import logging
from pipeline import pipeline
from core import jobs
from core.controllers import base
from core.domain import acl_decorators
from core.domain import email_manager
from core.domain import exp_jobs_one_off
from core.domain import recommendations_jobs_one_off
from core.domain import user_jobs_one_off
from core.platform import models
import utils
(job_models,) = models.Registry.import_models([models.NAMES.job])
# The default retention time is 2 days.
MAX_MAPREDUCE_METADATA_RETENTION_MSECS = 2 * 24 * 60 * 60 * 1000
TWENTY_FIVE_HOURS_IN_MSECS = 25 * 60 * 60 * 1000
MAX_JOBS_TO_REPORT_ON = 50
class JobStatusMailerHandler(base.BaseHandler):
"""Handler for mailing admin about job failures."""
@acl_decorators.can_perform_cron_tasks
def get(self):
"""Handles GET requests."""
# TODO(sll): Get the 50 most recent failed shards, not all of them.
failed_jobs = jobs.get_stuck_jobs(TWENTY_FIVE_HOURS_IN_MSECS)
if failed_jobs:
email_subject = 'MapReduce failure alert'
email_message = (
'%s jobs have failed in the past 25 hours. More information '
'(about at most %s jobs; to see more, please check the logs):'
) % (len(failed_jobs), MAX_JOBS_TO_REPORT_ON)
for job in failed_jobs[:MAX_JOBS_TO_REPORT_ON]:
email_message += '\n'
email_message += '-----------------------------------'
email_message += '\n'
email_message += (
'Job with mapreduce ID %s (key name %s) failed. '
'More info:\n\n'
' counters_map: %s\n'
' shard_retries: %s\n'
' slice_retries: %s\n'
' last_update_time: %s\n'
' last_work_item: %s\n'
) % (
job.mapreduce_id, job.key().name(), job.counters_map,
job.retries, job.slice_retries, job.update_time,
job.last_work_item
)
else:
email_subject = 'MapReduce status report'
email_message = 'All MapReduce jobs are running fine.'
email_manager.send_mail_to_admin(email_subject, email_message)
class CronDashboardStatsHandler(base.BaseHandler):
"""Handler for appending dashboard stats to a list."""
@acl_decorators.can_perform_cron_tasks
def get(self):
"""Handles GET requests."""
user_jobs_one_off.DashboardStatsOneOffJob.enqueue(
user_jobs_one_off.DashboardStatsOneOffJob.create_new())
class CronExplorationRecommendationsHandler(base.BaseHandler):
"""Handler for computing exploration recommendations."""
@acl_decorators.can_perform_cron_tasks
def get(self):
"""Handles GET requests."""
job_class = (
recommendations_jobs_one_off.ExplorationRecommendationsOneOffJob)
job_class.enqueue(job_class.create_new())
class CronExplorationSearchRankHandler(base.BaseHandler):
"""Handler for computing exploration search ranks."""
@acl_decorators.can_perform_cron_tasks
def get(self):
"""Handles GET requests."""
exp_jobs_one_off.IndexAllExplorationsJobManager.enqueue(
exp_jobs_one_off.IndexAllExplorationsJobManager.create_new())
class CronMapreduceCleanupHandler(base.BaseHandler):
@acl_decorators.can_perform_cron_tasks
def get(self):
"""Clean up intermediate data items for completed M/R jobs that
started more than MAX_MAPREDUCE_METADATA_RETENTION_MSECS milliseconds
ago.
Map/reduce runs leave around a large number of rows in several
tables. This data is useful to have around for a while:
- it helps diagnose any problems with jobs that may be occurring
- it shows where resource usage is occurring
However, after a few days, this information is less relevant, and
should be cleaned up.
"""
recency_msec = MAX_MAPREDUCE_METADATA_RETENTION_MSECS
num_cleaned = 0
min_age_msec = recency_msec
# Only consider jobs that started at most 1 week before recency_msec.
max_age_msec = recency_msec + 7 * 24 * 60 * 60 * 1000
# The latest start time that a job scheduled for cleanup may have.
max_start_time_msec = (
utils.get_current_time_in_millisecs() - min_age_msec)
# Get all pipeline ids from jobs that started between max_age_msecs
# and max_age_msecs + 1 week, before now.
pipeline_id_to_job_instance = {}
job_instances = job_models.JobModel.get_recent_jobs(1000, max_age_msec)
for job_instance in job_instances:
if (job_instance.time_started_msec < max_start_time_msec and not
job_instance.has_been_cleaned_up):
if 'root_pipeline_id' in job_instance.metadata:
pipeline_id = job_instance.metadata['root_pipeline_id']
pipeline_id_to_job_instance[pipeline_id] = job_instance
# Clean up pipelines.
for pline in pipeline.get_root_list()['pipelines']:
pipeline_id = pline['pipelineId']
job_definitely_terminated = (
pline['status'] == 'done' or
pline['status'] == 'aborted' or
pline['currentAttempt'] > pline['maxAttempts'])
have_start_time = 'startTimeMs' in pline
job_started_too_long_ago = (
have_start_time and
pline['startTimeMs'] < max_start_time_msec)
if (job_started_too_long_ago or
(not have_start_time and job_definitely_terminated)):
# At this point, the map/reduce pipeline is either in a
# terminal state, or has taken so long that there's no
# realistic possibility that there might be a race condition
# between this and the job actually completing.
if pipeline_id in pipeline_id_to_job_instance:
job_instance = pipeline_id_to_job_instance[pipeline_id]
job_instance.has_been_cleaned_up = True
job_instance.put()
# This enqueues a deferred cleanup item.
p = pipeline.Pipeline.from_id(pipeline_id)
if p:
p.cleanup()
num_cleaned += 1
logging.warning('%s MR jobs cleaned up.' % num_cleaned)
if job_models.JobModel.do_unfinished_jobs_exist(
jobs.JobCleanupManager.__name__):
logging.warning('A previous cleanup job is still running.')
else:
jobs.JobCleanupManager.enqueue(
jobs.JobCleanupManager.create_new(), additional_job_params={
jobs.MAPPER_PARAM_MAX_START_TIME_MSEC: max_start_time_msec
})
logging.warning('Deletion jobs for auxiliary entities kicked off.')
|
apache-2.0
| -490,610,909,016,384,450
| 40.231183
| 79
| 0.615726
| false
| 4.0215
| false
| false
| false
|
luzheqi1987/nova-annotation
|
nova/network/linux_net.py
|
1
|
71216
|
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implements vlans, bridges, and iptables rules using linux utilities."""
import calendar
import inspect
import os
import re
import netaddr
from oslo.concurrency import processutils
from oslo.config import cfg
from oslo.serialization import jsonutils
from oslo.utils import excutils
from oslo.utils import importutils
from oslo.utils import timeutils
import six
from nova import exception
from nova.i18n import _, _LE
from nova import objects
from nova.openstack.common import fileutils
from nova.openstack.common import log as logging
from nova import paths
from nova import utils
LOG = logging.getLogger(__name__)
linux_net_opts = [
cfg.MultiStrOpt('dhcpbridge_flagfile',
default=['/etc/nova/nova-dhcpbridge.conf'],
help='Location of flagfiles for dhcpbridge'),
cfg.StrOpt('networks_path',
default=paths.state_path_def('networks'),
help='Location to keep network config files'),
cfg.StrOpt('public_interface',
default='eth0',
help='Interface for public IP addresses'),
cfg.StrOpt('dhcpbridge',
default=paths.bindir_def('nova-dhcpbridge'),
help='Location of nova-dhcpbridge'),
cfg.StrOpt('routing_source_ip',
default='$my_ip',
help='Public IP of network host'),
cfg.IntOpt('dhcp_lease_time',
default=86400,
help='Lifetime of a DHCP lease in seconds'),
cfg.MultiStrOpt('dns_server',
default=[],
help='If set, uses specific DNS server for dnsmasq. Can'
' be specified multiple times.'),
cfg.BoolOpt('use_network_dns_servers',
default=False,
help='If set, uses the dns1 and dns2 from the network ref.'
' as dns servers.'),
cfg.ListOpt('dmz_cidr',
default=[],
help='A list of dmz range that should be accepted'),
cfg.MultiStrOpt('force_snat_range',
default=[],
help='Traffic to this range will always be snatted to the '
'fallback ip, even if it would normally be bridged out '
'of the node. Can be specified multiple times.'),
cfg.StrOpt('dnsmasq_config_file',
default='',
help='Override the default dnsmasq settings with this file'),
cfg.StrOpt('linuxnet_interface_driver',
default='nova.network.linux_net.LinuxBridgeInterfaceDriver',
help='Driver used to create ethernet devices.'),
cfg.StrOpt('linuxnet_ovs_integration_bridge',
default='br-int',
help='Name of Open vSwitch bridge used with linuxnet'),
cfg.BoolOpt('send_arp_for_ha',
default=False,
help='Send gratuitous ARPs for HA setup'),
cfg.IntOpt('send_arp_for_ha_count',
default=3,
help='Send this many gratuitous ARPs for HA setup'),
cfg.BoolOpt('use_single_default_gateway',
default=False,
help='Use single default gateway. Only first nic of vm will '
'get default gateway from dhcp server'),
cfg.MultiStrOpt('forward_bridge_interface',
default=['all'],
help='An interface that bridges can forward to. If this '
'is set to all then all traffic will be forwarded. '
'Can be specified multiple times.'),
cfg.StrOpt('metadata_host',
default='$my_ip',
help='The IP address for the metadata API server'),
cfg.IntOpt('metadata_port',
default=8775,
help='The port for the metadata API port'),
cfg.StrOpt('iptables_top_regex',
default='',
help='Regular expression to match iptables rule that should '
'always be on the top.'),
cfg.StrOpt('iptables_bottom_regex',
default='',
help='Regular expression to match iptables rule that should '
'always be on the bottom.'),
cfg.StrOpt('iptables_drop_action',
default='DROP',
help=('The table that iptables to jump to when a packet is '
'to be dropped.')),
cfg.IntOpt('ovs_vsctl_timeout',
default=120,
help='Amount of time, in seconds, that ovs_vsctl should wait '
'for a response from the database. 0 is to wait forever.'),
cfg.BoolOpt('fake_network',
default=False,
help='If passed, use fake network devices and addresses'),
]
CONF = cfg.CONF
CONF.register_opts(linux_net_opts)
CONF.import_opt('host', 'nova.netconf')
CONF.import_opt('use_ipv6', 'nova.netconf')
CONF.import_opt('my_ip', 'nova.netconf')
CONF.import_opt('network_device_mtu', 'nova.objects.network')
# NOTE(vish): Iptables supports chain names of up to 28 characters, and we
# add up to 12 characters to binary_name which is used as a prefix,
# so we limit it to 16 characters.
# (max_chain_name_length - len('-POSTROUTING') == 16)
def get_binary_name():
"""Grab the name of the binary we're running in."""
return os.path.basename(inspect.stack()[-1][1])[:16]
binary_name = get_binary_name()
class IptablesRule(object):
"""An iptables rule.
You shouldn't need to use this class directly, it's only used by
IptablesManager.
"""
def __init__(self, chain, rule, wrap=True, top=False):
self.chain = chain
self.rule = rule
self.wrap = wrap
self.top = top
def __eq__(self, other):
return ((self.chain == other.chain) and
(self.rule == other.rule) and
(self.top == other.top) and
(self.wrap == other.wrap))
def __ne__(self, other):
return not self == other
def __str__(self):
if self.wrap:
chain = '%s-%s' % (binary_name, self.chain)
else:
chain = self.chain
# new rules should have a zero [packet: byte] count
return '[0:0] -A %s %s' % (chain, self.rule)
class IptablesTable(object):
"""An iptables table."""
def __init__(self):
self.rules = []
self.remove_rules = []
self.chains = set()
self.unwrapped_chains = set()
self.remove_chains = set()
self.dirty = True
def has_chain(self, name, wrap=True):
if wrap:
return name in self.chains
else:
return name in self.unwrapped_chains
def add_chain(self, name, wrap=True):
"""Adds a named chain to the table.
The chain name is wrapped to be unique for the component creating
it, so different components of Nova can safely create identically
named chains without interfering with one another.
At the moment, its wrapped name is <binary name>-<chain name>,
so if nova-compute creates a chain named 'OUTPUT', it'll actually
end up named 'nova-compute-OUTPUT'.
"""
if wrap:
self.chains.add(name)
else:
self.unwrapped_chains.add(name)
self.dirty = True
def remove_chain(self, name, wrap=True):
"""Remove named chain.
This removal "cascades". All rule in the chain are removed, as are
all rules in other chains that jump to it.
If the chain is not found, this is merely logged.
"""
if wrap:
chain_set = self.chains
else:
chain_set = self.unwrapped_chains
if name not in chain_set:
LOG.warn(_('Attempted to remove chain %s which does not exist'),
name)
return
self.dirty = True
# non-wrapped chains and rules need to be dealt with specially,
# so we keep a list of them to be iterated over in apply()
if not wrap:
self.remove_chains.add(name)
chain_set.remove(name)
if not wrap:
self.remove_rules += filter(lambda r: r.chain == name, self.rules)
self.rules = filter(lambda r: r.chain != name, self.rules)
if wrap:
jump_snippet = '-j %s-%s' % (binary_name, name)
else:
jump_snippet = '-j %s' % (name,)
if not wrap:
self.remove_rules += filter(lambda r: jump_snippet in r.rule,
self.rules)
self.rules = filter(lambda r: jump_snippet not in r.rule, self.rules)
def add_rule(self, chain, rule, wrap=True, top=False):
"""Add a rule to the table.
This is just like what you'd feed to iptables, just without
the '-A <chain name>' bit at the start.
However, if you need to jump to one of your wrapped chains,
prepend its name with a '$' which will ensure the wrapping
is applied correctly.
"""
if wrap and chain not in self.chains:
raise ValueError(_('Unknown chain: %r') % chain)
if '$' in rule:
rule = ' '.join(map(self._wrap_target_chain, rule.split(' ')))
rule_obj = IptablesRule(chain, rule, wrap, top)
if rule_obj in self.rules:
LOG.debug("Skipping duplicate iptables rule addition. "
"%(rule)r already in %(rules)r",
{'rule': rule_obj, 'rules': self.rules})
else:
self.rules.append(IptablesRule(chain, rule, wrap, top))
self.dirty = True
def _wrap_target_chain(self, s):
if s.startswith('$'):
return '%s-%s' % (binary_name, s[1:])
return s
def remove_rule(self, chain, rule, wrap=True, top=False):
"""Remove a rule from a chain.
Note: The rule must be exactly identical to the one that was added.
You cannot switch arguments around like you can with the iptables
CLI tool.
"""
try:
self.rules.remove(IptablesRule(chain, rule, wrap, top))
if not wrap:
self.remove_rules.append(IptablesRule(chain, rule, wrap, top))
self.dirty = True
except ValueError:
LOG.warn(_('Tried to remove rule that was not there:'
' %(chain)r %(rule)r %(wrap)r %(top)r'),
{'chain': chain, 'rule': rule,
'top': top, 'wrap': wrap})
def remove_rules_regex(self, regex):
"""Remove all rules matching regex."""
if isinstance(regex, six.string_types):
regex = re.compile(regex)
num_rules = len(self.rules)
self.rules = filter(lambda r: not regex.match(str(r)), self.rules)
removed = num_rules - len(self.rules)
if removed > 0:
self.dirty = True
return removed
def empty_chain(self, chain, wrap=True):
"""Remove all rules from a chain."""
chained_rules = [rule for rule in self.rules
if rule.chain == chain and rule.wrap == wrap]
if chained_rules:
self.dirty = True
for rule in chained_rules:
self.rules.remove(rule)
class IptablesManager(object):
"""Wrapper for iptables.
See IptablesTable for some usage docs
A number of chains are set up to begin with.
First, nova-filter-top. It's added at the top of FORWARD and OUTPUT. Its
name is not wrapped, so it's shared between the various nova workers. It's
intended for rules that need to live at the top of the FORWARD and OUTPUT
chains. It's in both the ipv4 and ipv6 set of tables.
For ipv4 and ipv6, the built-in INPUT, OUTPUT, and FORWARD filter chains
are wrapped, meaning that the "real" INPUT chain has a rule that jumps to
the wrapped INPUT chain, etc. Additionally, there's a wrapped chain named
"local" which is jumped to from nova-filter-top.
For ipv4, the built-in PREROUTING, OUTPUT, and POSTROUTING nat chains are
wrapped in the same was as the built-in filter chains. Additionally,
there's a snat chain that is applied after the POSTROUTING chain.
"""
def __init__(self, execute=None):
if not execute:
self.execute = _execute
else:
self.execute = execute
self.ipv4 = {'filter': IptablesTable(),
'nat': IptablesTable(),
'mangle': IptablesTable()}
self.ipv6 = {'filter': IptablesTable()}
self.iptables_apply_deferred = False
# Add a nova-filter-top chain. It's intended to be shared
# among the various nova components. It sits at the very top
# of FORWARD and OUTPUT.
for tables in [self.ipv4, self.ipv6]:
tables['filter'].add_chain('nova-filter-top', wrap=False)
tables['filter'].add_rule('FORWARD', '-j nova-filter-top',
wrap=False, top=True)
tables['filter'].add_rule('OUTPUT', '-j nova-filter-top',
wrap=False, top=True)
tables['filter'].add_chain('local')
tables['filter'].add_rule('nova-filter-top', '-j $local',
wrap=False)
# Wrap the built-in chains
builtin_chains = {4: {'filter': ['INPUT', 'OUTPUT', 'FORWARD'],
'nat': ['PREROUTING', 'OUTPUT', 'POSTROUTING'],
'mangle': ['POSTROUTING']},
6: {'filter': ['INPUT', 'OUTPUT', 'FORWARD']}}
for ip_version in builtin_chains:
if ip_version == 4:
tables = self.ipv4
elif ip_version == 6:
tables = self.ipv6
for table, chains in builtin_chains[ip_version].iteritems():
for chain in chains:
tables[table].add_chain(chain)
tables[table].add_rule(chain, '-j $%s' % (chain,),
wrap=False)
# Add a nova-postrouting-bottom chain. It's intended to be shared
# among the various nova components. We set it as the last chain
# of POSTROUTING chain.
self.ipv4['nat'].add_chain('nova-postrouting-bottom', wrap=False)
self.ipv4['nat'].add_rule('POSTROUTING', '-j nova-postrouting-bottom',
wrap=False)
# We add a snat chain to the shared nova-postrouting-bottom chain
# so that it's applied last.
self.ipv4['nat'].add_chain('snat')
self.ipv4['nat'].add_rule('nova-postrouting-bottom', '-j $snat',
wrap=False)
# And then we add a float-snat chain and jump to first thing in
# the snat chain.
self.ipv4['nat'].add_chain('float-snat')
self.ipv4['nat'].add_rule('snat', '-j $float-snat')
def defer_apply_on(self):
self.iptables_apply_deferred = True
def defer_apply_off(self):
self.iptables_apply_deferred = False
self.apply()
def dirty(self):
for table in self.ipv4.itervalues():
if table.dirty:
return True
if CONF.use_ipv6:
for table in self.ipv6.itervalues():
if table.dirty:
return True
return False
def apply(self):
if self.iptables_apply_deferred:
return
if self.dirty():
self._apply()
else:
LOG.debug("Skipping apply due to lack of new rules")
@utils.synchronized('iptables', external=True)
def _apply(self):
"""Apply the current in-memory set of iptables rules.
This will blow away any rules left over from previous runs of the
same component of Nova, and replace them with our current set of
rules. This happens atomically, thanks to iptables-restore.
"""
s = [('iptables', self.ipv4)]
if CONF.use_ipv6:
s += [('ip6tables', self.ipv6)]
for cmd, tables in s:
all_tables, _err = self.execute('%s-save' % (cmd,), '-c',
run_as_root=True,
attempts=5)
all_lines = all_tables.split('\n')
for table_name, table in tables.iteritems():
start, end = self._find_table(all_lines, table_name)
all_lines[start:end] = self._modify_rules(
all_lines[start:end], table, table_name)
table.dirty = False
self.execute('%s-restore' % (cmd,), '-c', run_as_root=True,
process_input='\n'.join(all_lines),
attempts=5)
LOG.debug("IPTablesManager.apply completed with success")
def _find_table(self, lines, table_name):
if len(lines) < 3:
# length only <2 when fake iptables
return (0, 0)
try:
start = lines.index('*%s' % table_name) - 1
except ValueError:
# Couldn't find table_name
return (0, 0)
end = lines[start:].index('COMMIT') + start + 2
return (start, end)
def _modify_rules(self, current_lines, table, table_name):
unwrapped_chains = table.unwrapped_chains
chains = table.chains
remove_chains = table.remove_chains
rules = table.rules
remove_rules = table.remove_rules
if not current_lines:
fake_table = ['#Generated by nova',
'*' + table_name, 'COMMIT',
'#Completed by nova']
current_lines = fake_table
# Remove any trace of our rules
new_filter = filter(lambda line: binary_name not in line,
current_lines)
top_rules = []
bottom_rules = []
if CONF.iptables_top_regex:
regex = re.compile(CONF.iptables_top_regex)
temp_filter = filter(lambda line: regex.search(line), new_filter)
for rule_str in temp_filter:
new_filter = filter(lambda s: s.strip() != rule_str.strip(),
new_filter)
top_rules = temp_filter
if CONF.iptables_bottom_regex:
regex = re.compile(CONF.iptables_bottom_regex)
temp_filter = filter(lambda line: regex.search(line), new_filter)
for rule_str in temp_filter:
new_filter = filter(lambda s: s.strip() != rule_str.strip(),
new_filter)
bottom_rules = temp_filter
seen_chains = False
rules_index = 0
for rules_index, rule in enumerate(new_filter):
if not seen_chains:
if rule.startswith(':'):
seen_chains = True
else:
if not rule.startswith(':'):
break
if not seen_chains:
rules_index = 2
our_rules = top_rules
bot_rules = []
for rule in rules:
rule_str = str(rule)
if rule.top:
# rule.top == True means we want this rule to be at the top.
# Further down, we weed out duplicates from the bottom of the
# list, so here we remove the dupes ahead of time.
# We don't want to remove an entry if it has non-zero
# [packet:byte] counts and replace it with [0:0], so let's
# go look for a duplicate, and over-ride our table rule if
# found.
# ignore [packet:byte] counts at beginning of line
if rule_str.startswith('['):
rule_str = rule_str.split(']', 1)[1]
dup_filter = filter(lambda s: rule_str.strip() in s.strip(),
new_filter)
new_filter = filter(lambda s:
rule_str.strip() not in s.strip(),
new_filter)
# if no duplicates, use original rule
if dup_filter:
# grab the last entry, if there is one
dup = dup_filter[-1]
rule_str = str(dup)
else:
rule_str = str(rule)
rule_str.strip()
our_rules += [rule_str]
else:
bot_rules += [rule_str]
our_rules += bot_rules
new_filter[rules_index:rules_index] = our_rules
new_filter[rules_index:rules_index] = [':%s - [0:0]' % (name,)
for name in unwrapped_chains]
new_filter[rules_index:rules_index] = [':%s-%s - [0:0]' %
(binary_name, name,)
for name in chains]
commit_index = new_filter.index('COMMIT')
new_filter[commit_index:commit_index] = bottom_rules
seen_lines = set()
def _weed_out_duplicates(line):
# ignore [packet:byte] counts at beginning of lines
if line.startswith('['):
line = line.split(']', 1)[1]
line = line.strip()
if line in seen_lines:
return False
else:
seen_lines.add(line)
return True
def _weed_out_removes(line):
# We need to find exact matches here
if line.startswith(':'):
# it's a chain, for example, ":nova-billing - [0:0]"
# strip off everything except the chain name
line = line.split(':')[1]
line = line.split('- [')[0]
line = line.strip()
for chain in remove_chains:
if chain == line:
remove_chains.remove(chain)
return False
elif line.startswith('['):
# it's a rule
# ignore [packet:byte] counts at beginning of lines
line = line.split(']', 1)[1]
line = line.strip()
for rule in remove_rules:
# ignore [packet:byte] counts at beginning of rules
rule_str = str(rule)
rule_str = rule_str.split(' ', 1)[1]
rule_str = rule_str.strip()
if rule_str == line:
remove_rules.remove(rule)
return False
# Leave it alone
return True
# We filter duplicates, letting the *last* occurrence take
# precedence. We also filter out anything in the "remove"
# lists.
new_filter.reverse()
new_filter = filter(_weed_out_duplicates, new_filter)
new_filter = filter(_weed_out_removes, new_filter)
new_filter.reverse()
# flush lists, just in case we didn't find something
remove_chains.clear()
for rule in remove_rules:
remove_rules.remove(rule)
return new_filter
# NOTE(jkoelker) This is just a nice little stub point since mocking
# builtins with mox is a nightmare
def write_to_file(file, data, mode='w'):
with open(file, mode) as f:
f.write(data)
def metadata_forward():
"""Create forwarding rule for metadata."""
if CONF.metadata_host != '127.0.0.1':
iptables_manager.ipv4['nat'].add_rule('PREROUTING',
'-s 0.0.0.0/0 -d 169.254.169.254/32 '
'-p tcp -m tcp --dport 80 -j DNAT '
'--to-destination %s:%s' %
(CONF.metadata_host,
CONF.metadata_port))
else:
iptables_manager.ipv4['nat'].add_rule('PREROUTING',
'-s 0.0.0.0/0 -d 169.254.169.254/32 '
'-p tcp -m tcp --dport 80 '
'-j REDIRECT --to-ports %s' %
CONF.metadata_port)
iptables_manager.apply()
def metadata_accept():
"""Create the filter accept rule for metadata."""
rule = '-s 0.0.0.0/0 -p tcp -m tcp --dport %s' % CONF.metadata_port
if CONF.metadata_host != '127.0.0.1':
rule += ' -d %s -j ACCEPT' % CONF.metadata_host
else:
rule += ' -m addrtype --dst-type LOCAL -j ACCEPT'
iptables_manager.ipv4['filter'].add_rule('INPUT', rule)
iptables_manager.apply()
def add_snat_rule(ip_range, is_external=False):
if CONF.routing_source_ip:
if is_external:
if CONF.force_snat_range:
snat_range = CONF.force_snat_range
else:
snat_range = []
else:
snat_range = ['0.0.0.0/0']
for dest_range in snat_range:
rule = ('-s %s -d %s -j SNAT --to-source %s'
% (ip_range, dest_range, CONF.routing_source_ip))
if not is_external and CONF.public_interface:
rule += ' -o %s' % CONF.public_interface
iptables_manager.ipv4['nat'].add_rule('snat', rule)
iptables_manager.apply()
def init_host(ip_range, is_external=False):
"""Basic networking setup goes here."""
# NOTE(devcamcar): Cloud public SNAT entries and the default
# SNAT rule for outbound traffic.
add_snat_rule(ip_range, is_external)
rules = []
if is_external:
for snat_range in CONF.force_snat_range:
rules.append('PREROUTING -p ipv4 --ip-src %s --ip-dst %s '
'-j redirect --redirect-target ACCEPT' %
(ip_range, snat_range))
if rules:
ensure_ebtables_rules(rules, 'nat')
iptables_manager.ipv4['nat'].add_rule('POSTROUTING',
'-s %s -d %s/32 -j ACCEPT' %
(ip_range, CONF.metadata_host))
for dmz in CONF.dmz_cidr:
iptables_manager.ipv4['nat'].add_rule('POSTROUTING',
'-s %s -d %s -j ACCEPT' %
(ip_range, dmz))
iptables_manager.ipv4['nat'].add_rule('POSTROUTING',
'-s %(range)s -d %(range)s '
'-m conntrack ! --ctstate DNAT '
'-j ACCEPT' %
{'range': ip_range})
iptables_manager.apply()
def send_arp_for_ip(ip, device, count):
out, err = _execute('arping', '-U', ip,
'-A', '-I', device,
'-c', str(count),
run_as_root=True, check_exit_code=False)
if err:
LOG.debug('arping error for ip %s', ip)
def bind_floating_ip(floating_ip, device):
"""Bind ip to public interface."""
_execute('ip', 'addr', 'add', str(floating_ip) + '/32',
'dev', device,
run_as_root=True, check_exit_code=[0, 2, 254])
if CONF.send_arp_for_ha and CONF.send_arp_for_ha_count > 0:
send_arp_for_ip(floating_ip, device, CONF.send_arp_for_ha_count)
def unbind_floating_ip(floating_ip, device):
"""Unbind a public ip from public interface."""
_execute('ip', 'addr', 'del', str(floating_ip) + '/32',
'dev', device,
run_as_root=True, check_exit_code=[0, 2, 254])
def ensure_metadata_ip():
"""Sets up local metadata ip."""
_execute('ip', 'addr', 'add', '169.254.169.254/32',
'scope', 'link', 'dev', 'lo',
run_as_root=True, check_exit_code=[0, 2, 254])
def ensure_vpn_forward(public_ip, port, private_ip):
"""Sets up forwarding rules for vlan."""
iptables_manager.ipv4['filter'].add_rule('FORWARD',
'-d %s -p udp '
'--dport 1194 '
'-j ACCEPT' % private_ip)
iptables_manager.ipv4['nat'].add_rule('PREROUTING',
'-d %s -p udp '
'--dport %s -j DNAT --to %s:1194' %
(public_ip, port, private_ip))
iptables_manager.ipv4['nat'].add_rule('OUTPUT',
'-d %s -p udp '
'--dport %s -j DNAT --to %s:1194' %
(public_ip, port, private_ip))
iptables_manager.apply()
def ensure_floating_forward(floating_ip, fixed_ip, device, network):
"""Ensure floating ip forwarding rule."""
# NOTE(vish): Make sure we never have duplicate rules for the same ip
regex = '.*\s+%s(/32|\s+|$)' % floating_ip
num_rules = iptables_manager.ipv4['nat'].remove_rules_regex(regex)
if num_rules:
msg = _('Removed %(num)d duplicate rules for floating ip %(float)s')
LOG.warn(msg % {'num': num_rules, 'float': floating_ip})
for chain, rule in floating_forward_rules(floating_ip, fixed_ip, device):
iptables_manager.ipv4['nat'].add_rule(chain, rule)
iptables_manager.apply()
if device != network['bridge']:
ensure_ebtables_rules(*floating_ebtables_rules(fixed_ip, network))
def remove_floating_forward(floating_ip, fixed_ip, device, network):
"""Remove forwarding for floating ip."""
for chain, rule in floating_forward_rules(floating_ip, fixed_ip, device):
iptables_manager.ipv4['nat'].remove_rule(chain, rule)
iptables_manager.apply()
if device != network['bridge']:
remove_ebtables_rules(*floating_ebtables_rules(fixed_ip, network))
def floating_ebtables_rules(fixed_ip, network):
"""Makes sure only in-network traffic is bridged."""
return (['PREROUTING --logical-in %s -p ipv4 --ip-src %s '
'! --ip-dst %s -j redirect --redirect-target ACCEPT' %
(network['bridge'], fixed_ip, network['cidr'])], 'nat')
def floating_forward_rules(floating_ip, fixed_ip, device):
rules = []
rule = '-s %s -j SNAT --to %s' % (fixed_ip, floating_ip)
if device:
rules.append(('float-snat', rule + ' -d %s' % fixed_ip))
rules.append(('float-snat', rule + ' -o %s' % device))
else:
rules.append(('float-snat', rule))
rules.append(
('PREROUTING', '-d %s -j DNAT --to %s' % (floating_ip, fixed_ip)))
rules.append(
('OUTPUT', '-d %s -j DNAT --to %s' % (floating_ip, fixed_ip)))
rules.append(('POSTROUTING', '-s %s -m conntrack --ctstate DNAT -j SNAT '
'--to-source %s' %
(fixed_ip, floating_ip)))
return rules
def clean_conntrack(fixed_ip):
try:
_execute('conntrack', '-D', '-r', fixed_ip, run_as_root=True,
check_exit_code=[0, 1])
except processutils.ProcessExecutionError:
LOG.exception(_('Error deleting conntrack entries for %s'), fixed_ip)
def _enable_ipv4_forwarding():
sysctl_key = 'net.ipv4.ip_forward'
stdout, stderr = _execute('sysctl', '-n', sysctl_key)
if stdout.strip() is not '1':
_execute('sysctl', '-w', '%s=1' % sysctl_key, run_as_root=True)
@utils.synchronized('lock_gateway', external=True)
def initialize_gateway_device(dev, network_ref):
if not network_ref:
return
_enable_ipv4_forwarding()
# NOTE(vish): The ip for dnsmasq has to be the first address on the
# bridge for it to respond to requests properly
try:
prefix = network_ref.cidr.prefixlen
except AttributeError:
prefix = network_ref['cidr'].rpartition('/')[2]
full_ip = '%s/%s' % (network_ref['dhcp_server'], prefix)
new_ip_params = [[full_ip, 'brd', network_ref['broadcast']]]
old_ip_params = []
out, err = _execute('ip', 'addr', 'show', 'dev', dev,
'scope', 'global')
for line in out.split('\n'):
fields = line.split()
if fields and fields[0] == 'inet':
ip_params = fields[1:-1]
old_ip_params.append(ip_params)
if ip_params[0] != full_ip:
new_ip_params.append(ip_params)
if not old_ip_params or old_ip_params[0][0] != full_ip:
old_routes = []
result = _execute('ip', 'route', 'show', 'dev', dev)
if result:
out, err = result
for line in out.split('\n'):
fields = line.split()
if fields and 'via' in fields:
old_routes.append(fields)
_execute('ip', 'route', 'del', fields[0],
'dev', dev, run_as_root=True)
for ip_params in old_ip_params:
_execute(*_ip_bridge_cmd('del', ip_params, dev),
run_as_root=True, check_exit_code=[0, 2, 254])
for ip_params in new_ip_params:
_execute(*_ip_bridge_cmd('add', ip_params, dev),
run_as_root=True, check_exit_code=[0, 2, 254])
for fields in old_routes:
_execute('ip', 'route', 'add', *fields,
run_as_root=True)
if CONF.send_arp_for_ha and CONF.send_arp_for_ha_count > 0:
send_arp_for_ip(network_ref['dhcp_server'], dev,
CONF.send_arp_for_ha_count)
if CONF.use_ipv6:
_execute('ip', '-f', 'inet6', 'addr',
'change', network_ref['cidr_v6'],
'dev', dev, run_as_root=True)
def get_dhcp_leases(context, network_ref):
"""Return a network's hosts config in dnsmasq leasefile format."""
hosts = []
host = None
if network_ref['multi_host']:
host = CONF.host
for fixedip in objects.FixedIPList.get_by_network(context,
network_ref,
host=host):
# NOTE(cfb): Don't return a lease entry if the IP isn't
# already leased
if fixedip.leased:
hosts.append(_host_lease(fixedip))
return '\n'.join(hosts)
def get_dhcp_hosts(context, network_ref, fixedips):
"""Get network's hosts config in dhcp-host format."""
hosts = []
macs = set()
for fixedip in fixedips:
if fixedip.allocated:
if fixedip.virtual_interface.address not in macs:
hosts.append(_host_dhcp(fixedip))
macs.add(fixedip.virtual_interface.address)
return '\n'.join(hosts)
def get_dns_hosts(context, network_ref):
"""Get network's DNS hosts in hosts format."""
hosts = []
for fixedip in objects.FixedIPList.get_by_network(context, network_ref):
if fixedip.allocated:
hosts.append(_host_dns(fixedip))
return '\n'.join(hosts)
def _add_dnsmasq_accept_rules(dev):
"""Allow DHCP and DNS traffic through to dnsmasq."""
table = iptables_manager.ipv4['filter']
for port in [67, 53]:
for proto in ['udp', 'tcp']:
args = {'dev': dev, 'port': port, 'proto': proto}
table.add_rule('INPUT',
'-i %(dev)s -p %(proto)s -m %(proto)s '
'--dport %(port)s -j ACCEPT' % args)
iptables_manager.apply()
def _remove_dnsmasq_accept_rules(dev):
"""Remove DHCP and DNS traffic allowed through to dnsmasq."""
table = iptables_manager.ipv4['filter']
for port in [67, 53]:
for proto in ['udp', 'tcp']:
args = {'dev': dev, 'port': port, 'proto': proto}
table.remove_rule('INPUT',
'-i %(dev)s -p %(proto)s -m %(proto)s '
'--dport %(port)s -j ACCEPT' % args)
iptables_manager.apply()
# NOTE(russellb) Curious why this is needed? Check out this explanation from
# markmc: https://bugzilla.redhat.com/show_bug.cgi?id=910619#c6
def _add_dhcp_mangle_rule(dev):
table = iptables_manager.ipv4['mangle']
table.add_rule('POSTROUTING',
'-o %s -p udp -m udp --dport 68 -j CHECKSUM '
'--checksum-fill' % dev)
iptables_manager.apply()
def _remove_dhcp_mangle_rule(dev):
table = iptables_manager.ipv4['mangle']
table.remove_rule('POSTROUTING',
'-o %s -p udp -m udp --dport 68 -j CHECKSUM '
'--checksum-fill' % dev)
iptables_manager.apply()
def get_dhcp_opts(context, network_ref, fixedips):
"""Get network's hosts config in dhcp-opts format."""
gateway = network_ref['gateway']
# NOTE(vish): if we are in multi-host mode and we are not sharing
# addresses, then we actually need to hand out the
# dhcp server address as the gateway.
if network_ref['multi_host'] and not (network_ref['share_address'] or
CONF.share_dhcp_address):
gateway = network_ref['dhcp_server']
hosts = []
if CONF.use_single_default_gateway:
for fixedip in fixedips:
if fixedip.allocated:
vif_id = fixedip.virtual_interface_id
if fixedip.default_route:
hosts.append(_host_dhcp_opts(vif_id, gateway))
else:
hosts.append(_host_dhcp_opts(vif_id))
else:
hosts.append(_host_dhcp_opts(None, gateway))
return '\n'.join(hosts)
def release_dhcp(dev, address, mac_address):
try:
utils.execute('dhcp_release', dev, address, mac_address,
run_as_root=True)
except processutils.ProcessExecutionError:
raise exception.NetworkDhcpReleaseFailed(address=address,
mac_address=mac_address)
def update_dhcp(context, dev, network_ref):
conffile = _dhcp_file(dev, 'conf')
host = None
if network_ref['multi_host']:
host = CONF.host
fixedips = objects.FixedIPList.get_by_network(context,
network_ref,
host=host)
write_to_file(conffile, get_dhcp_hosts(context, network_ref, fixedips))
restart_dhcp(context, dev, network_ref, fixedips)
def update_dns(context, dev, network_ref):
hostsfile = _dhcp_file(dev, 'hosts')
host = None
if network_ref['multi_host']:
host = CONF.host
fixedips = objects.FixedIPList.get_by_network(context,
network_ref,
host=host)
write_to_file(hostsfile, get_dns_hosts(context, network_ref))
restart_dhcp(context, dev, network_ref, fixedips)
def update_dhcp_hostfile_with_text(dev, hosts_text):
conffile = _dhcp_file(dev, 'conf')
write_to_file(conffile, hosts_text)
def kill_dhcp(dev):
pid = _dnsmasq_pid_for(dev)
if pid:
# Check that the process exists and looks like a dnsmasq process
conffile = _dhcp_file(dev, 'conf')
out, _err = _execute('cat', '/proc/%d/cmdline' % pid,
check_exit_code=False)
if conffile.split('/')[-1] in out:
_execute('kill', '-9', pid, run_as_root=True)
else:
LOG.debug('Pid %d is stale, skip killing dnsmasq', pid)
_remove_dnsmasq_accept_rules(dev)
_remove_dhcp_mangle_rule(dev)
# NOTE(ja): Sending a HUP only reloads the hostfile, so any
# configuration options (like dchp-range, vlan, ...)
# aren't reloaded.
@utils.synchronized('dnsmasq_start')
def restart_dhcp(context, dev, network_ref, fixedips):
"""(Re)starts a dnsmasq server for a given network.
If a dnsmasq instance is already running then send a HUP
signal causing it to reload, otherwise spawn a new instance.
"""
conffile = _dhcp_file(dev, 'conf')
optsfile = _dhcp_file(dev, 'opts')
write_to_file(optsfile, get_dhcp_opts(context, network_ref, fixedips))
os.chmod(optsfile, 0o644)
_add_dhcp_mangle_rule(dev)
# Make sure dnsmasq can actually read it (it setuid()s to "nobody")
os.chmod(conffile, 0o644)
pid = _dnsmasq_pid_for(dev)
# if dnsmasq is already running, then tell it to reload
if pid:
out, _err = _execute('cat', '/proc/%d/cmdline' % pid,
check_exit_code=False)
# Using symlinks can cause problems here so just compare the name
# of the file itself
if conffile.split('/')[-1] in out:
try:
_execute('kill', '-HUP', pid, run_as_root=True)
_add_dnsmasq_accept_rules(dev)
return
except Exception as exc: # pylint: disable=W0703
LOG.error(_LE('Hupping dnsmasq threw %s'), exc)
else:
LOG.debug('Pid %d is stale, relaunching dnsmasq', pid)
cmd = ['env',
'CONFIG_FILE=%s' % jsonutils.dumps(CONF.dhcpbridge_flagfile),
'NETWORK_ID=%s' % str(network_ref['id']),
'dnsmasq',
'--strict-order',
'--bind-interfaces',
'--conf-file=%s' % CONF.dnsmasq_config_file,
'--pid-file=%s' % _dhcp_file(dev, 'pid'),
'--dhcp-optsfile=%s' % _dhcp_file(dev, 'opts'),
'--listen-address=%s' % network_ref['dhcp_server'],
'--except-interface=lo',
'--dhcp-range=set:%s,%s,static,%s,%ss' %
(network_ref['label'],
network_ref['dhcp_start'],
network_ref['netmask'],
CONF.dhcp_lease_time),
'--dhcp-lease-max=%s' % len(netaddr.IPNetwork(network_ref['cidr'])),
'--dhcp-hostsfile=%s' % _dhcp_file(dev, 'conf'),
'--dhcp-script=%s' % CONF.dhcpbridge,
'--no-hosts',
'--leasefile-ro']
# dnsmasq currently gives an error for an empty domain,
# rather than ignoring. So only specify it if defined.
if CONF.dhcp_domain:
cmd.append('--domain=%s' % CONF.dhcp_domain)
dns_servers = set(CONF.dns_server)
if CONF.use_network_dns_servers:
if network_ref.get('dns1'):
dns_servers.add(network_ref.get('dns1'))
if network_ref.get('dns2'):
dns_servers.add(network_ref.get('dns2'))
if network_ref['multi_host']:
cmd.append('--addn-hosts=%s' % _dhcp_file(dev, 'hosts'))
if dns_servers:
cmd.append('--no-resolv')
for dns_server in dns_servers:
cmd.append('--server=%s' % dns_server)
_execute(*cmd, run_as_root=True)
_add_dnsmasq_accept_rules(dev)
@utils.synchronized('radvd_start')
def update_ra(context, dev, network_ref):
conffile = _ra_file(dev, 'conf')
conf_str = """
interface %s
{
AdvSendAdvert on;
MinRtrAdvInterval 3;
MaxRtrAdvInterval 10;
prefix %s
{
AdvOnLink on;
AdvAutonomous on;
};
};
""" % (dev, network_ref['cidr_v6'])
write_to_file(conffile, conf_str)
# Make sure radvd can actually read it (it setuid()s to "nobody")
os.chmod(conffile, 0o644)
pid = _ra_pid_for(dev)
# if radvd is already running, then tell it to reload
if pid:
out, _err = _execute('cat', '/proc/%d/cmdline'
% pid, check_exit_code=False)
if conffile in out:
try:
_execute('kill', pid, run_as_root=True)
except Exception as exc: # pylint: disable=W0703
LOG.error(_LE('killing radvd threw %s'), exc)
else:
LOG.debug('Pid %d is stale, relaunching radvd', pid)
cmd = ['radvd',
'-C', '%s' % _ra_file(dev, 'conf'),
'-p', '%s' % _ra_file(dev, 'pid')]
_execute(*cmd, run_as_root=True)
def _host_lease(fixedip):
"""Return a host string for an address in leasefile format."""
timestamp = timeutils.utcnow()
seconds_since_epoch = calendar.timegm(timestamp.utctimetuple())
return '%d %s %s %s *' % (seconds_since_epoch + CONF.dhcp_lease_time,
fixedip.virtual_interface.address,
fixedip.address,
fixedip.instance.hostname or '*')
def _host_dhcp_network(vif_id):
return 'NW-%s' % vif_id
def _host_dhcp(fixedip):
"""Return a host string for an address in dhcp-host format."""
if CONF.use_single_default_gateway:
net = _host_dhcp_network(fixedip.virtual_interface_id)
return '%s,%s.%s,%s,net:%s' % (fixedip.virtual_interface.address,
fixedip.instance.hostname,
CONF.dhcp_domain,
fixedip.address,
net)
else:
return '%s,%s.%s,%s' % (fixedip.virtual_interface.address,
fixedip.instance.hostname,
CONF.dhcp_domain,
fixedip.address)
def _host_dns(fixedip):
return '%s\t%s.%s' % (fixedip.address,
fixedip.instance.hostname,
CONF.dhcp_domain)
def _host_dhcp_opts(vif_id=None, gateway=None):
"""Return an empty gateway option."""
values = []
if vif_id is not None:
values.append(_host_dhcp_network(vif_id))
# NOTE(vish): 3 is the dhcp option for gateway.
values.append('3')
if gateway:
values.append('%s' % gateway)
return ','.join(values)
def _execute(*cmd, **kwargs):
"""Wrapper around utils._execute for fake_network."""
if CONF.fake_network:
LOG.debug('FAKE NET: %s', ' '.join(map(str, cmd)))
return 'fake', 0
else:
return utils.execute(*cmd, **kwargs)
def device_exists(device):
"""Check if ethernet device exists."""
return os.path.exists('/sys/class/net/%s' % device)
def _dhcp_file(dev, kind):
"""Return path to a pid, leases, hosts or conf file for a bridge/device."""
fileutils.ensure_tree(CONF.networks_path)
return os.path.abspath('%s/nova-%s.%s' % (CONF.networks_path,
dev,
kind))
def _ra_file(dev, kind):
"""Return path to a pid or conf file for a bridge/device."""
fileutils.ensure_tree(CONF.networks_path)
return os.path.abspath('%s/nova-ra-%s.%s' % (CONF.networks_path,
dev,
kind))
def _dnsmasq_pid_for(dev):
"""Returns the pid for prior dnsmasq instance for a bridge/device.
Returns None if no pid file exists.
If machine has rebooted pid might be incorrect (caller should check).
"""
pid_file = _dhcp_file(dev, 'pid')
if os.path.exists(pid_file):
try:
with open(pid_file, 'r') as f:
return int(f.read())
except (ValueError, IOError):
return None
def _ra_pid_for(dev):
"""Returns the pid for prior radvd instance for a bridge/device.
Returns None if no pid file exists.
If machine has rebooted pid might be incorrect (caller should check).
"""
pid_file = _ra_file(dev, 'pid')
if os.path.exists(pid_file):
with open(pid_file, 'r') as f:
return int(f.read())
def _ip_bridge_cmd(action, params, device):
"""Build commands to add/del ips to bridges/devices."""
cmd = ['ip', 'addr', action]
cmd.extend(params)
cmd.extend(['dev', device])
return cmd
def _set_device_mtu(dev, mtu=None):
"""Set the device MTU."""
if not mtu:
mtu = CONF.network_device_mtu
if mtu:
utils.execute('ip', 'link', 'set', dev, 'mtu',
mtu, run_as_root=True,
check_exit_code=[0, 2, 254])
def _create_veth_pair(dev1_name, dev2_name):
"""Create a pair of veth devices with the specified names,
deleting any previous devices with those names.
"""
for dev in [dev1_name, dev2_name]:
delete_net_dev(dev)
utils.execute('ip', 'link', 'add', dev1_name, 'type', 'veth', 'peer',
'name', dev2_name, run_as_root=True)
for dev in [dev1_name, dev2_name]:
utils.execute('ip', 'link', 'set', dev, 'up', run_as_root=True)
utils.execute('ip', 'link', 'set', dev, 'promisc', 'on',
run_as_root=True)
_set_device_mtu(dev)
def _ovs_vsctl(args):
full_args = ['ovs-vsctl', '--timeout=%s' % CONF.ovs_vsctl_timeout] + args
try:
return utils.execute(*full_args, run_as_root=True)
except Exception as e:
LOG.error(_LE("Unable to execute %(cmd)s. Exception: %(exception)s"),
{'cmd': full_args, 'exception': e})
raise exception.AgentError(method=full_args)
def create_ovs_vif_port(bridge, dev, iface_id, mac, instance_id):
_ovs_vsctl(['--', '--if-exists', 'del-port', dev, '--',
'add-port', bridge, dev,
'--', 'set', 'Interface', dev,
'external-ids:iface-id=%s' % iface_id,
'external-ids:iface-status=active',
'external-ids:attached-mac=%s' % mac,
'external-ids:vm-uuid=%s' % instance_id])
_set_device_mtu(dev)
def delete_ovs_vif_port(bridge, dev):
_ovs_vsctl(['--', '--if-exists', 'del-port', bridge, dev])
delete_net_dev(dev)
def create_ivs_vif_port(dev, iface_id, mac, instance_id):
utils.execute('ivs-ctl', 'add-port',
dev, run_as_root=True)
def delete_ivs_vif_port(dev):
utils.execute('ivs-ctl', 'del-port', dev,
run_as_root=True)
utils.execute('ip', 'link', 'delete', dev,
run_as_root=True)
def create_tap_dev(dev, mac_address=None):
if not device_exists(dev):
try:
# First, try with 'ip'
utils.execute('ip', 'tuntap', 'add', dev, 'mode', 'tap',
run_as_root=True, check_exit_code=[0, 2, 254])
except processutils.ProcessExecutionError:
# Second option: tunctl
utils.execute('tunctl', '-b', '-t', dev, run_as_root=True)
if mac_address:
utils.execute('ip', 'link', 'set', dev, 'address', mac_address,
run_as_root=True, check_exit_code=[0, 2, 254])
utils.execute('ip', 'link', 'set', dev, 'up', run_as_root=True,
check_exit_code=[0, 2, 254])
def delete_net_dev(dev):
"""Delete a network device only if it exists."""
if device_exists(dev):
try:
utils.execute('ip', 'link', 'delete', dev, run_as_root=True,
check_exit_code=[0, 2, 254])
LOG.debug("Net device removed: '%s'", dev)
except processutils.ProcessExecutionError:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed removing net device: '%s'"), dev)
# Similar to compute virt layers, the Linux network node
# code uses a flexible driver model to support different ways
# of creating ethernet interfaces and attaching them to the network.
# In the case of a network host, these interfaces
# act as gateway/dhcp/vpn/etc. endpoints not VM interfaces.
interface_driver = None
def _get_interface_driver():
global interface_driver
if not interface_driver:
interface_driver = importutils.import_object(
CONF.linuxnet_interface_driver)
return interface_driver
def plug(network, mac_address, gateway=True):
return _get_interface_driver().plug(network, mac_address, gateway)
def unplug(network):
return _get_interface_driver().unplug(network)
def get_dev(network):
return _get_interface_driver().get_dev(network)
class LinuxNetInterfaceDriver(object):
"""Abstract class that defines generic network host API
for all Linux interface drivers.
"""
def plug(self, network, mac_address):
"""Create Linux device, return device name."""
raise NotImplementedError()
def unplug(self, network):
"""Destroy Linux device, return device name."""
raise NotImplementedError()
def get_dev(self, network):
"""Get device name."""
raise NotImplementedError()
# plugs interfaces using Linux Bridge
class LinuxBridgeInterfaceDriver(LinuxNetInterfaceDriver):
def plug(self, network, mac_address, gateway=True):
vlan = network.get('vlan')
if vlan is not None:
iface = CONF.vlan_interface or network['bridge_interface']
LinuxBridgeInterfaceDriver.ensure_vlan_bridge(
vlan,
network['bridge'],
iface,
network,
mac_address,
network.get('mtu'))
iface = 'vlan%s' % vlan
else:
iface = CONF.flat_interface or network['bridge_interface']
LinuxBridgeInterfaceDriver.ensure_bridge(
network['bridge'],
iface,
network, gateway)
if network['share_address'] or CONF.share_dhcp_address:
isolate_dhcp_address(iface, network['dhcp_server'])
# NOTE(vish): applying here so we don't get a lock conflict
iptables_manager.apply()
return network['bridge']
def unplug(self, network, gateway=True):
vlan = network.get('vlan')
if vlan is not None:
iface = 'vlan%s' % vlan
LinuxBridgeInterfaceDriver.remove_vlan_bridge(vlan,
network['bridge'])
else:
iface = CONF.flat_interface or network['bridge_interface']
LinuxBridgeInterfaceDriver.remove_bridge(network['bridge'],
gateway)
if network['share_address'] or CONF.share_dhcp_address:
remove_isolate_dhcp_address(iface, network['dhcp_server'])
iptables_manager.apply()
return self.get_dev(network)
def get_dev(self, network):
return network['bridge']
@staticmethod
def ensure_vlan_bridge(vlan_num, bridge, bridge_interface,
net_attrs=None, mac_address=None,
mtu=None):
"""Create a vlan and bridge unless they already exist."""
interface = LinuxBridgeInterfaceDriver.ensure_vlan(vlan_num,
bridge_interface, mac_address,
mtu)
LinuxBridgeInterfaceDriver.ensure_bridge(bridge, interface, net_attrs)
return interface
@staticmethod
def remove_vlan_bridge(vlan_num, bridge):
"""Delete a bridge and vlan."""
LinuxBridgeInterfaceDriver.remove_bridge(bridge)
LinuxBridgeInterfaceDriver.remove_vlan(vlan_num)
@staticmethod
@utils.synchronized('lock_vlan', external=True)
def ensure_vlan(vlan_num, bridge_interface, mac_address=None, mtu=None):
"""Create a vlan unless it already exists."""
interface = 'vlan%s' % vlan_num
if not device_exists(interface):
LOG.debug('Starting VLAN interface %s', interface)
_execute('ip', 'link', 'add', 'link', bridge_interface,
'name', interface, 'type', 'vlan',
'id', vlan_num, run_as_root=True,
check_exit_code=[0, 2, 254])
# (danwent) the bridge will inherit this address, so we want to
# make sure it is the value set from the NetworkManager
if mac_address:
_execute('ip', 'link', 'set', interface, 'address',
mac_address, run_as_root=True,
check_exit_code=[0, 2, 254])
_execute('ip', 'link', 'set', interface, 'up', run_as_root=True,
check_exit_code=[0, 2, 254])
# NOTE(vish): set mtu every time to ensure that changes to mtu get
# propogated
_set_device_mtu(interface, mtu)
return interface
@staticmethod
@utils.synchronized('lock_vlan', external=True)
def remove_vlan(vlan_num):
"""Delete a vlan."""
vlan_interface = 'vlan%s' % vlan_num
delete_net_dev(vlan_interface)
@staticmethod
@utils.synchronized('lock_bridge', external=True)
def ensure_bridge(bridge, interface, net_attrs=None, gateway=True,
filtering=True):
"""Create a bridge unless it already exists.
:param interface: the interface to create the bridge on.
:param net_attrs: dictionary with attributes used to create bridge.
:param gateway: whether or not the bridge is a gateway.
:param filtering: whether or not to create filters on the bridge.
If net_attrs is set, it will add the net_attrs['gateway'] to the bridge
using net_attrs['broadcast'] and net_attrs['cidr']. It will also add
the ip_v6 address specified in net_attrs['cidr_v6'] if use_ipv6 is set.
The code will attempt to move any ips that already exist on the
interface onto the bridge and reset the default gateway if necessary.
"""
if not device_exists(bridge):
LOG.debug('Starting Bridge %s', bridge)
_execute('brctl', 'addbr', bridge, run_as_root=True)
_execute('brctl', 'setfd', bridge, 0, run_as_root=True)
# _execute('brctl setageing %s 10' % bridge, run_as_root=True)
_execute('brctl', 'stp', bridge, 'off', run_as_root=True)
# (danwent) bridge device MAC address can't be set directly.
# instead it inherits the MAC address of the first device on the
# bridge, which will either be the vlan interface, or a
# physical NIC.
_execute('ip', 'link', 'set', bridge, 'up', run_as_root=True)
if interface:
LOG.debug('Adding interface %(interface)s to bridge %(bridge)s',
{'interface': interface, 'bridge': bridge})
out, err = _execute('brctl', 'addif', bridge, interface,
check_exit_code=False, run_as_root=True)
if (err and err != "device %s is already a member of a bridge; "
"can't enslave it to bridge %s.\n" % (interface, bridge)):
msg = _('Failed to add interface: %s') % err
raise exception.NovaException(msg)
out, err = _execute('ip', 'link', 'set', interface, 'up',
check_exit_code=False, run_as_root=True)
# NOTE(vish): This will break if there is already an ip on the
# interface, so we move any ips to the bridge
# NOTE(danms): We also need to copy routes to the bridge so as
# not to break existing connectivity on the interface
old_routes = []
out, err = _execute('ip', 'route', 'show', 'dev', interface)
for line in out.split('\n'):
fields = line.split()
if fields and 'via' in fields:
old_routes.append(fields)
_execute('ip', 'route', 'del', *fields,
run_as_root=True)
out, err = _execute('ip', 'addr', 'show', 'dev', interface,
'scope', 'global')
for line in out.split('\n'):
fields = line.split()
if fields and fields[0] == 'inet':
if fields[-2] in ('secondary', 'dynamic', ):
params = fields[1:-2]
else:
params = fields[1:-1]
_execute(*_ip_bridge_cmd('del', params, fields[-1]),
run_as_root=True, check_exit_code=[0, 2, 254])
_execute(*_ip_bridge_cmd('add', params, bridge),
run_as_root=True, check_exit_code=[0, 2, 254])
for fields in old_routes:
_execute('ip', 'route', 'add', *fields,
run_as_root=True)
if filtering:
# Don't forward traffic unless we were told to be a gateway
ipv4_filter = iptables_manager.ipv4['filter']
if gateway:
for rule in get_gateway_rules(bridge):
ipv4_filter.add_rule(*rule)
else:
ipv4_filter.add_rule('FORWARD',
('--in-interface %s -j %s'
% (bridge, CONF.iptables_drop_action)))
ipv4_filter.add_rule('FORWARD',
('--out-interface %s -j %s'
% (bridge, CONF.iptables_drop_action)))
@staticmethod
@utils.synchronized('lock_bridge', external=True)
def remove_bridge(bridge, gateway=True, filtering=True):
"""Delete a bridge."""
if not device_exists(bridge):
return
else:
if filtering:
ipv4_filter = iptables_manager.ipv4['filter']
if gateway:
for rule in get_gateway_rules(bridge):
ipv4_filter.remove_rule(*rule)
else:
drop_actions = ['DROP']
if CONF.iptables_drop_action != 'DROP':
drop_actions.append(CONF.iptables_drop_action)
for drop_action in drop_actions:
ipv4_filter.remove_rule('FORWARD',
('--in-interface %s -j %s'
% (bridge, drop_action)))
ipv4_filter.remove_rule('FORWARD',
('--out-interface %s -j %s'
% (bridge, drop_action)))
delete_net_dev(bridge)
@utils.synchronized('ebtables', external=True)
def ensure_ebtables_rules(rules, table='filter'):
for rule in rules:
cmd = ['ebtables', '-t', table, '-D'] + rule.split()
_execute(*cmd, check_exit_code=False, run_as_root=True)
cmd[3] = '-I'
_execute(*cmd, run_as_root=True)
@utils.synchronized('ebtables', external=True)
def remove_ebtables_rules(rules, table='filter'):
for rule in rules:
cmd = ['ebtables', '-t', table, '-D'] + rule.split()
_execute(*cmd, check_exit_code=False, run_as_root=True)
def isolate_dhcp_address(interface, address):
# block arp traffic to address across the interface
rules = []
rules.append('INPUT -p ARP -i %s --arp-ip-dst %s -j DROP'
% (interface, address))
rules.append('OUTPUT -p ARP -o %s --arp-ip-src %s -j DROP'
% (interface, address))
rules.append('FORWARD -p IPv4 -i %s --ip-protocol udp '
'--ip-destination-port 67:68 -j DROP'
% interface)
rules.append('FORWARD -p IPv4 -o %s --ip-protocol udp '
'--ip-destination-port 67:68 -j DROP'
% interface)
# NOTE(vish): the above is not possible with iptables/arptables
ensure_ebtables_rules(rules)
def remove_isolate_dhcp_address(interface, address):
# block arp traffic to address across the interface
rules = []
rules.append('INPUT -p ARP -i %s --arp-ip-dst %s -j DROP'
% (interface, address))
rules.append('OUTPUT -p ARP -o %s --arp-ip-src %s -j DROP'
% (interface, address))
rules.append('FORWARD -p IPv4 -i %s --ip-protocol udp '
'--ip-destination-port 67:68 -j DROP'
% interface)
rules.append('FORWARD -p IPv4 -o %s --ip-protocol udp '
'--ip-destination-port 67:68 -j DROP'
% interface)
remove_ebtables_rules(rules)
# NOTE(vish): the above is not possible with iptables/arptables
def get_gateway_rules(bridge):
interfaces = CONF.forward_bridge_interface
if 'all' in interfaces:
return [('FORWARD', '-i %s -j ACCEPT' % bridge),
('FORWARD', '-o %s -j ACCEPT' % bridge)]
rules = []
for iface in CONF.forward_bridge_interface:
if iface:
rules.append(('FORWARD', '-i %s -o %s -j ACCEPT' % (bridge,
iface)))
rules.append(('FORWARD', '-i %s -o %s -j ACCEPT' % (iface,
bridge)))
rules.append(('FORWARD', '-i %s -o %s -j ACCEPT' % (bridge, bridge)))
rules.append(('FORWARD', '-i %s -j %s' % (bridge,
CONF.iptables_drop_action)))
rules.append(('FORWARD', '-o %s -j %s' % (bridge,
CONF.iptables_drop_action)))
return rules
# plugs interfaces using Open vSwitch
class LinuxOVSInterfaceDriver(LinuxNetInterfaceDriver):
def plug(self, network, mac_address, gateway=True):
dev = self.get_dev(network)
if not device_exists(dev):
bridge = CONF.linuxnet_ovs_integration_bridge
_ovs_vsctl(['--', '--may-exist', 'add-port', bridge, dev,
'--', 'set', 'Interface', dev, 'type=internal',
'--', 'set', 'Interface', dev,
'external-ids:iface-id=%s' % dev,
'--', 'set', 'Interface', dev,
'external-ids:iface-status=active',
'--', 'set', 'Interface', dev,
'external-ids:attached-mac=%s' % mac_address])
_execute('ip', 'link', 'set', dev, 'address', mac_address,
run_as_root=True)
_set_device_mtu(dev, network.get('mtu'))
_execute('ip', 'link', 'set', dev, 'up', run_as_root=True)
if not gateway:
# If we weren't instructed to act as a gateway then add the
# appropriate flows to block all non-dhcp traffic.
_execute('ovs-ofctl',
'add-flow', bridge, 'priority=1,actions=drop',
run_as_root=True)
_execute('ovs-ofctl', 'add-flow', bridge,
'udp,tp_dst=67,dl_dst=%s,priority=2,actions=normal' %
mac_address, run_as_root=True)
# .. and make sure iptbles won't forward it as well.
iptables_manager.ipv4['filter'].add_rule('FORWARD',
'--in-interface %s -j %s' % (bridge,
CONF.iptables_drop_action))
iptables_manager.ipv4['filter'].add_rule('FORWARD',
'--out-interface %s -j %s' % (bridge,
CONF.iptables_drop_action))
else:
for rule in get_gateway_rules(bridge):
iptables_manager.ipv4['filter'].add_rule(*rule)
return dev
def unplug(self, network):
dev = self.get_dev(network)
bridge = CONF.linuxnet_ovs_integration_bridge
_ovs_vsctl(['--', '--if-exists', 'del-port', bridge, dev])
return dev
def get_dev(self, network):
dev = 'gw-' + str(network['uuid'][0:11])
return dev
# plugs interfaces using Linux Bridge when using NeutronManager
class NeutronLinuxBridgeInterfaceDriver(LinuxNetInterfaceDriver):
BRIDGE_NAME_PREFIX = 'brq'
GATEWAY_INTERFACE_PREFIX = 'gw-'
def plug(self, network, mac_address, gateway=True):
dev = self.get_dev(network)
bridge = self.get_bridge(network)
if not gateway:
# If we weren't instructed to act as a gateway then add the
# appropriate flows to block all non-dhcp traffic.
# .. and make sure iptbles won't forward it as well.
iptables_manager.ipv4['filter'].add_rule('FORWARD',
('--in-interface %s -j %s'
% (bridge, CONF.iptables_drop_action)))
iptables_manager.ipv4['filter'].add_rule('FORWARD',
('--out-interface %s -j %s'
% (bridge, CONF.iptables_drop_action)))
return bridge
else:
for rule in get_gateway_rules(bridge):
iptables_manager.ipv4['filter'].add_rule(*rule)
create_tap_dev(dev, mac_address)
if not device_exists(bridge):
LOG.debug("Starting bridge %s ", bridge)
utils.execute('brctl', 'addbr', bridge, run_as_root=True)
utils.execute('brctl', 'setfd', bridge, str(0), run_as_root=True)
utils.execute('brctl', 'stp', bridge, 'off', run_as_root=True)
utils.execute('ip', 'link', 'set', bridge, 'address', mac_address,
run_as_root=True, check_exit_code=[0, 2, 254])
utils.execute('ip', 'link', 'set', bridge, 'up', run_as_root=True,
check_exit_code=[0, 2, 254])
LOG.debug("Done starting bridge %s", bridge)
full_ip = '%s/%s' % (network['dhcp_server'],
network['cidr'].rpartition('/')[2])
utils.execute('ip', 'address', 'add', full_ip, 'dev', bridge,
run_as_root=True, check_exit_code=[0, 2, 254])
return dev
def unplug(self, network):
dev = self.get_dev(network)
if not device_exists(dev):
return None
else:
delete_net_dev(dev)
return dev
def get_dev(self, network):
dev = self.GATEWAY_INTERFACE_PREFIX + str(network['uuid'][0:11])
return dev
def get_bridge(self, network):
bridge = self.BRIDGE_NAME_PREFIX + str(network['uuid'][0:11])
return bridge
# provide compatibility with existing configs
QuantumLinuxBridgeInterfaceDriver = NeutronLinuxBridgeInterfaceDriver
iptables_manager = IptablesManager()
|
apache-2.0
| -7,665,344,293,116,199,000
| 37.979748
| 79
| 0.538292
| false
| 3.971448
| false
| false
| false
|
ProgVal/Supybot-website
|
plugins/models.py
|
1
|
2683
|
from django.contrib.auth.models import User
from django.db import models
from django import forms
import datetime
class Plugin(models.Model):
author = models.ForeignKey(User, help_text='The user who wrote the plugin.')
name = models.SlugField(max_length=255, help_text='The name of the plugin.',
unique=True)
short_description = models.TextField(max_length=512, help_text='A short '
'description of the plugin, shown in list view.')
description = models.TextField(help_text='A full description of the '
'plugin.')
minimal_version = models.CharField(max_length=4096, help_text='The oldest '
'Supybot version compatible with this plugin.', default='0.83.4.1')
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
published = models.BooleanField(default=False, help_text='Determines '
'whether or not the plugin can be seen by everyone.')
url = models.URLField(blank=True, help_text='The URL to the website for the plugin.')
# git_repo is not a foreign key to GitRepository, because GitRepository
# items are only helpers for developpers, and are totally optionnal.
git_repo = models.CharField(max_length=512, help_text='The URL to the '
'Git repository.', blank=True)
def __unicode__(self):
return u'%s' % self.name
def get_absolute_url(self):
return '/plugins/view/%s/' % self.name
class Meta:
ordering = ['-created_at']
class PluginSubmitForm(forms.ModelForm):
class Meta:
model = Plugin
fields = ('name', 'published', 'minimal_version', 'git_repo', 'url',
'short_description', 'description')
class PluginEditForm(PluginSubmitForm):
class Meta(PluginSubmitForm.Meta):
exclude = ('name',)
class PluginComment(models.Model):
key = models.ForeignKey(Plugin)
user = models.ForeignKey(User)
text = models.TextField()
created_date = models.DateTimeField(auto_now_add=True)
class GitRepository(models.Model):
maintainer = models.ForeignKey(User)
name = models.SlugField(unique=True)
url = models.CharField(max_length=512)
latest_fetch = models.DateTimeField(default=datetime.datetime.min)
state = models.CharField(max_length=1, choices=(
('c', 'cloning'),
('o', 'ok'),
('w', 'working'),
('n', 'not initialized')))
def __unicode__(self):
return self.name
class Meta:
verbose_name_plural = 'Git repositories'
class GitRepositoryForm(forms.ModelForm):
class Meta:
model = GitRepository
fields = ('name', 'url',)
|
bsd-3-clause
| -7,488,097,766,186,343,000
| 34.302632
| 89
| 0.660082
| false
| 3.922515
| false
| false
| false
|
imcgreer/simqso
|
simqso/sqrun.py
|
1
|
27784
|
#!/usr/bin/env python
import os
import numpy as np
from functools import partial
from astropy.io import fits
from astropy.table import Table,hstack
from astropy import cosmology
from . import sqbase
from . import sqgrids as grids
from . import hiforest
from . import dustextinction
from . import sqphoto
from . import sqmodels
import multiprocessing
def buildWaveGrid(simParams):
dispersionScale = simParams.get('DispersionScale','logarithmic')
if dispersionScale == 'logarithmic':
lam1,lam2 = simParams['waveRange']
R = simParams['SpecDispersion']
wave = sqbase.fixed_R_dispersion(lam1,lam2,R)
else:
raise ValueError('Dispersion scale %s not supported' % dispersionScale)
return wave
def reseed(par):
try:
np.random.seed(par['RandomSeed'])
except KeyError:
pass
def buildQsoGrid(simParams):
'''
Create a grid of simulated quasar "points". This function parses the
'GridParams' section of simParams, and intreprets the following options:
- FluxRedshiftGrid : points are defined by (appMag,z)
- LuminosityRedshiftGrid : points are defined by (absMag,z)
- LuminosityFunction : points are defined by (appMag,z) and sampled from
a luminosity function.
'''
cosmodef = simParams.get('Cosmology')
gridPars = simParams['GridParams']
try:
gridType = gridPars['GridType']
except KeyError:
raise ValueError('Must specify a GridType')
kcorrType = gridPars.get('InitialKCorrection','Continuum')
if kcorrType == 'Continuum':
kcorr = sqbase.ContinuumKCorr(gridPars['ObsBand'],
gridPars['RestBand'])
elif kcorrType == 'DefaultEmissionLine':
kcorr = sqbase.EmissionLineKCorr(gridPars['ObsBand'],
gridPars['RestBand'])
else:
raise ValueError
reseed(gridPars)
#
def get_nbins(low,high,n):
if type(n) is int:
return n
else:
return int( np.floor((high - low) / n) )
if gridType.endswith('RedshiftGrid'):
m1,m2,nm = gridPars['mRange']
z1,z2,nz = gridPars['zRange']
nBins = ( get_nbins(m1,m2,nm), get_nbins(z1,z2,nz) )
mSampler = grids.UniformSampler(m1,m2)
zSampler = grids.UniformSampler(z1,z2)
if gridType.startswith('Luminosity'):
m = grids.AbsMagVar(mSampler,restWave=gridPars['LumUnits'])
units = 'luminosity'
elif gridType.startswith('Flux'):
m = grids.AppMagVar(mSampler,gridPars['ObsBand'])
units = 'flux'
z = grids.RedshiftVar(zSampler)
elif gridType == 'FixedGrid':
raise NotImplementedError
m = grids.FixedSampler(gridPars['fixed_M'])
z = grids.FixedSampler(gridPars['fixed_z'])
# XXX units
elif gridType == 'LuminosityFunction':
try:
qlf = gridPars['QLFmodel']
qlf.set_cosmology(cosmodef)
except KeyError:
raise ValueError('Must specify a parameterization of the LF')
qsoGrid = grids.generateQlfPoints(qlf,
gridPars['mRange'],
gridPars['zRange'],
kcorr,
**gridPars['QLFargs'])
units = 'flux'
else:
raise ValueError('GridType %s unknown' % gridType)
if gridType != 'LuminosityFunction':
qsoGrid = grids.QsoSimGrid([m,z],nBins,gridPars['nPerBin'],
units=units,cosmo=cosmodef)
try:
_ = qsoGrid.absMag
except:
absMag = grids.AbsMagFromAppMagVar(qsoGrid.appMag,z,kcorr,cosmo,
gridPars['RestBand'])
qsoGrid.addVar(absMag)
return qsoGrid
def buildForest(wave,z,simParams,outputDir):
'''Create a set of absorbers for a given number of lines-of-sight,
sampled according to the input forest model. Then calculate the
transmission along each line of sight. The input redshifts correspond
to individual QSOs. The number of LOSs is generally smaller so that
fewer forest computations are needed; individual LOSs are built up
in redshift steps as each QSO redshift is iterated.
'''
forestParams = simParams['ForestParams']
reseed(forestParams)
forestFn = forestParams.get('FileName')
tgrid = None
if forestFn:
try:
tgrid = hiforest.CachedIGMTransmissionGrid(forestFn,outputDir)
if not np.allclose(wave[:len(tgrid.specWave)],tgrid.specWave):
raise ValueError("Input wavegrid doesn't match stored wave")
except IOError:
pass
if tgrid is None:
nlos = forestParams['NumLinesOfSight']
forestModel = forestParams['ForestModel']
if isinstance(forestModel,str):
forestModel = sqmodels.forestModels[forestModel]
tgrid = hiforest.IGMTransmissionGrid(wave,forestModel,nlos,
zmax=z.max(),**forestParams)
return tgrid
def buildContinuumModels(qsoGrid,simParams,verbose=0):
continuumParams = simParams['QuasarModelParams']['ContinuumParams']
reseed(continuumParams)
slopes = continuumParams['PowerLawSlopes'][::2]
breakpts = continuumParams['PowerLawSlopes'][1::2]
if verbose > 0:
print('... building continuum grid')
cmodel = continuumParams['ContinuumModel']
if cmodel == 'BrokenPowerLaw':
slopeVars = [ grids.GaussianSampler(*s) for s in slopes ]
continuumVars = [ grids.BrokenPowerLawContinuumVar(slopeVars,
breakpts) ]
elif isinstance(cmodel,grids.QsoSimVar):
continuumVars = [ cmodel ]
else:
raise ValueError
qsoGrid.addVars(continuumVars)
def buildEmissionLineGrid(qsoGrid,simParams):
emLineParams = simParams['QuasarModelParams']['EmissionLineParams']
reseed(emLineParams)
if emLineParams['EmissionLineModel'] == 'FixedVdBCompositeLines':
emLineGrid = grids.generateVdBCompositeEmLines(
minEW=emLineParams.get('minEW',1.0),
noFe=emLineParams.get('VdB_noFe',False))
elif emLineParams['EmissionLineModel'] == 'VariedEmissionLineGrid':
emLineGrid = grids.generateBEffEmissionLines(qsoGrid.absMag,
**emLineParams)
elif isinstance(emLineParams['EmissionLineModel'],grids.QsoSimVar):
emLineGrid = emLineParams['EmissionLineModel']
else:
raise ValueError('invalid emission line model: ' +
emLineParams['EmissionLineModel'])
qsoGrid.addVar(emLineGrid)
def buildDustGrid(qsoGrid,simParams,verbose=0):
if verbose > 0:
print('... building dust extinction grid')
dustParams = simParams['QuasarModelParams']['DustExtinctionParams']
reseed(dustParams)
if dustParams['DustExtinctionModel'] == 'Fixed E(B-V)':
sampler = grids.ConstSampler(dustParams['E(B-V)'])
elif dustParams['DustExtinctionModel']=='Exponential E(B-V) Distribution':
sampler = grids.ExponentialSampler(dustParams['E(B-V)'])
else:
raise ValueError('invalid dust extinction model: '+
dustParams['DustExtinctionModel'])
if dustParams['DustModelName'] == 'SMC':
dustVar = grids.SMCDustVar(sampler)
elif dustParams['DustModelName'] == 'CalzettiSB':
dustVar = grids.CalzettiDustVar(sampler)
else:
raise ValueError('invalid dust extinction model: '+
dustParams['DustModelName'])
# XXX
# fraction=dustParams.get('DustLOSfraction',1.0))
qsoGrid.addVar(dustVar)
def buildFeatures(qsoGrid,wave,simParams,forest=None,verbose=0):
buildContinuumModels(qsoGrid,simParams,verbose=verbose)
qsoParams = simParams['QuasarModelParams']
if 'EmissionLineParams' in qsoParams:
buildEmissionLineGrid(qsoGrid,simParams)
if 'IronEmissionParams' in qsoParams:
# only option for now is the VW01 template
scalings = qsoParams['IronEmissionParams'].get('FeScalings')
feGrid = grids.VW01FeTemplateGrid(qsoGrid.z,wave,scales=scalings)
qsoGrid.addVar(grids.FeTemplateVar(feGrid))
if 'DustExtinctionParams' in qsoParams:
buildDustGrid(qsoGrid,simParams,verbose=verbose)
if forest is not None:
if isinstance(forest,hiforest.CachedIGMTransmissionGrid):
losMap = forest.losMap
else:
losMap = None
if isinstance(forest,hiforest.GridForest):
forestVar = grids.SightlineVar(forest,losMap=losMap)
else:
forestVar = grids.HIAbsorptionVar(forest,losMap=losMap)
qsoGrid.addVar(forestVar)
def _getpar(feature,obj):
if feature is None:
return None
elif isinstance(feature.sampler,grids.NullSampler):
return None
elif isinstance(feature.sampler,grids.IndexSampler):
return obj.index
else:
return obj[feature.name]
def buildQsoSpectrum(wave,cosmo,specFeatures,obj,iterNum=1,
save_components=False):
spec = sqbase.Spectrum(wave,z=obj['z'])
if save_components:
base = sqbase.Spectrum(spec.wave,spec.f_lambda.copy(),spec.z)
components = {}
# start with continuum
if cosmo is None:
fluxNorm = None
else:
distmod = lambda z: cosmo.distmod(z).value
fluxNorm = {'wavelength':1450.,'M_AB':obj['absMag'],'DM':distmod}
for feature in specFeatures:
if isinstance(feature,grids.ContinuumVar):
assocvals = _getpar(feature.get_associated_var(),obj)
spec = feature.add_to_spec(spec,_getpar(feature,obj),
assocvals=assocvals,
fluxNorm=fluxNorm)
if save_components:
components[feature.name] = spec - base
base.f_lambda[:] = spec.f_lambda
# add emission (multiplicative) features
emspec = sqbase.Spectrum(wave,z=obj['z'])
if save_components:
base = sqbase.Spectrum(emspec.wave,emspec.f_lambda.copy(),emspec.z)
for feature in specFeatures:
if isinstance(feature,grids.EmissionFeatureVar):
assocvals = _getpar(feature.get_associated_var(),obj)
emspec = feature.add_to_spec(emspec,_getpar(feature,obj),
assocvals=assocvals)
if save_components:
components[feature.name] = emspec - base
base.f_lambda[:] = emspec.f_lambda
spec *= emspec + 1
# add any remaining features
for feature in specFeatures:
if isinstance(feature,grids.ContinuumVar) or \
isinstance(feature,grids.EmissionFeatureVar):
continue
assocvals = _getpar(feature.get_associated_var(),obj)
spec = feature.add_to_spec(spec,_getpar(feature,obj),
assocvals=assocvals,
advance=(iterNum==1))
if save_components:
components[feature.name] = spec - base
base.f_lambda[:] = spec.f_lambda
if save_components:
return spec,components
else:
return spec
def buildGrpSpectra(wave,cosmo,specFeatures,photoCache,saveSpectra,
fluxBand,nIter,verbose,objGroup):
n = len(objGroup)
if verbose and verbose > 0:
losGrp = objGroup['igmlos'][0]
if losGrp % verbose == 0:
print('processing ',n,' obj in group ',losGrp)
rv = dict()
if photoCache:
nb = len(photoCache)
rv['synMag'] = np.zeros((n,nb),dtype=np.float32)
rv['synFlux'] = np.zeros((n,nb),dtype=np.float32)
if saveSpectra:
nw = len(wave)
rv['spectra'] = np.zeros((n,nw),dtype=np.float32)
zi = objGroup['z'].argsort()
for i in zi:
for iterNum in range(1,nIter+1):
sp = buildQsoSpectrum(wave,cosmo,specFeatures,objGroup[i],iterNum)
if photoCache is not None:
synMag,synFlux = sqphoto.calcSynPhot(sp,photoCache=photoCache)
if nIter > 1:
dm = synMag[fluxBand] - objGroup['appMag'][i]
objGroup['absMag'][i] -= dm
# resample features with updated absolute mags
for var in specFeatures:
if var.dependentVars is not None:
var.resample(objGroup[var.dependentVars][i],ii=i)
# pass index as 1d-array to preserve correct shape
objGroup[var.name][i] = var(None,ii=np.array([i]))
if np.abs(dm) < 0.005:
break
if photoCache is not None:
rv['synMag'][i] = synMag
rv['synFlux'][i] = synFlux
if saveSpectra:
rv['spectra'][i] = sp.f_lambda
rv['absMag'] = objGroup['absMag'].copy()
return rv
def _regroup(spOut):
# XXX tell me there's a better way to do this
n = len(spOut[0])
rv = [ [] for i in range(n) ]
for sp in spOut:
for j in range(n):
rv[j].append(sp[j])
return [ np.array(v) for v in rv ]
def buildSpectraBySightLine(wave,qsoGrid,procMap=map,
maxIter=1,verbose=0,saveSpectra=False):
'''Assemble the spectral components of QSOs from the input parameters.
Parameters
----------
wave : `~numpy.ndarray`
Input wavelength grid.
'''
photoCache = qsoGrid.getPhotoCache(wave)
if verbose > 0:
print('simulating ',qsoGrid.nObj,' quasar spectra')
print('units are ',qsoGrid.units)
print('max number iterations: ',maxIter)
verby = 0 if not verbose else qsoGrid.nObj//(5*verbose)
if qsoGrid.units == 'luminosity' or photoCache is None:
nIter = 1
fluxBand = None
else:
nIter = maxIter
fluxBand = qsoGrid.getObsBandIndex()
#
# extract the feature lists, group by sightline, and run
specFeatures = qsoGrid.getVars(grids.SpectralFeatureVar)
build_grp_spec = partial(buildGrpSpectra,wave,qsoGrid.cosmo,
specFeatures,photoCache,saveSpectra,
fluxBand,nIter,verby)
qsoGroups = qsoGrid.group_by('igmlos',with_index=True)
# pool.map() doesn't like the iterable produced by table.group_by(), so
# forcing resolution of the elements here with list() -- not that much
# memory anyway
specOut = list(procMap(build_grp_spec,list(qsoGroups)))
if qsoGrid.photoMap:
bands = qsoGrid.photoBands
def newarr():
return np.zeros((qsoGrid.nObj,len(bands)),dtype=np.float32)
qsoGrid.addVar(grids.SynMagVar(grids.FixedSampler(newarr())))
qsoGrid.addVar(grids.SynFluxVar(grids.FixedSampler(newarr())))
# the output needs to be remapped to the input locations
for objgrp,out in zip(qsoGroups,specOut):
for k in ['absMag','synMag','synFlux']:
qsoGrid.data[k][objgrp['_ii']] = out[k]
if saveSpectra:
spectra = np.vstack([s['spectra'] for s in specOut])
spectra = spectra[qsoGroups.parent['_ii'].argsort()]
else:
spectra = None
return qsoGrid,spectra
def buildSpecWithPhot(wave,cosmo,specFeatures,photoCache,
objData,iterNum=None,saveSpectra=False):
sp = buildQsoSpectrum(wave,cosmo,specFeatures,objData,
iterNum=iterNum)
if photoCache is None:
rv = (None,None)
else:
rv = sqphoto.calcSynPhot(sp,photoCache=photoCache)
if saveSpectra:
rv = rv + (sp.f_lambda,)
else:
rv = rv + (None,)
return rv
def buildSpectraBulk(wave,qsoGrid,procMap=map,
maxIter=1,verbose=0,saveSpectra=False):
'''Assemble the spectral components of QSOs from the input parameters.
Parameters
----------
wave : `~numpy.ndarray`
Input wavelength grid.
'''
photoCache = qsoGrid.getPhotoCache(wave)
if verbose > 0:
print('simulating ',qsoGrid.nObj,' quasar spectra')
print('units are ',qsoGrid.units)
if qsoGrid.units == 'luminosity' or photoCache is None:
nIter = 1
fluxBand = None
else:
nIter = maxIter
fluxBand = qsoGrid.getObsBandIndex()
#
for iterNum in range(1,nIter+1):
specFeatures = qsoGrid.getVars(grids.SpectralFeatureVar)
samplers = []
for f in specFeatures:
samplers.append(f.sampler)
if not ( isinstance(f.sampler,grids.NullSampler) or
isinstance(f.sampler,grids.IndexSampler) ):
f.sampler = None
build_one_spec = partial(buildSpecWithPhot,wave,qsoGrid.cosmo,
specFeatures,photoCache,iterNum=iterNum,
saveSpectra=saveSpectra)
if verbose > 1:
print('buildSpectra iteration ',iterNum,' out of ',nIter)
specOut = list(procMap(build_one_spec,qsoGrid))
specOut = _regroup(specOut)
synMag,synFlux,spectra = specOut
v = qsoGrid.getVars(grids.SightlineVar)
if len(v) > 0 and isinstance(v[0].forest,hiforest.GridForest):
jj,dm,df = v[0].forest.get(qsoGrid.data['igmlos'],
qsoGrid.data['z'])
synMag[:,jj] += dm
synFlux[:,jj] *= df
for f,s in zip(specFeatures,samplers):
f.sampler = s
if nIter > 1:
# find the largest mag offset
dm = synMag[:,fluxBand] - qsoGrid.appMag
if verbose > 1:
print('--> delta mag mean = %.7f, rms = %.7f, |max| = %.7f' % \
(dm.mean(),dm.std(),np.abs(dm).max()))
qsoGrid.absMag[:] -= dm
dmagMax = np.abs(dm).max()
# resample features with updated absolute mags
for var in specFeatures:
if var.dependentVars is not None:
var.resample(qsoGrid.data[var.dependentVars])
qsoGrid.data[var.name][:] = var(None)
if dmagMax < 0.01:
break
if qsoGrid.photoMap is not None:
qsoGrid.addVar(grids.SynMagVar(grids.FixedSampler(synMag)))
qsoGrid.addVar(grids.SynFluxVar(grids.FixedSampler(synFlux)))
return qsoGrid,spectra
def readSimulationData(fileName,outputDir,retParams=False,clean=False):
qsoGrid = grids.QsoSimObjects()
qsoGrid.read(os.path.join(outputDir,fileName+'.fits'),clean=clean)
simPars = qsoGrid.simPars
gridPars = simPars['GridParams']
if True:
mSampler = grids.FixedSampler(qsoGrid.appMag)
m = grids.AppMagVar(mSampler,gridPars['ObsBand'])
try:
mSampler = grids.FixedSampler(qsoGrid.appMag)
m = grids.AppMagVar(mSampler,gridPars['ObsBand'])
except:
mSampler = grids.FixedSampler(qsoGrid.absMag)
m = grids.AbsMagVar(mSampler,restWave=gridPars['LumUnits'])
z = grids.RedshiftVar(grids.FixedSampler(qsoGrid.z))
qsoGrid.addVars([m,z])
if retParams:
return qsoGrid,simPars
return qsoGrid
def restore_qso_grid(fileName,wave,outputDir='.',**kwargs):
qsoGrid = grids.QsoSimObjects()
if not fileName.endswith('.fits'):
fileName += '.fits'
qsoGrid.read(os.path.join(outputDir,fileName),**kwargs)
# IGM transmission spectra depend on a (possibly) pre-computed grid,
# which must be regenerated
try:
hiVar = qsoGrid.getVars(grids.HIAbsorptionVar)[0]
fmodel,nlos,kwargs = hiVar.varmeta
igmGrid = hiforest.IGMTransmissionGrid(wave,fmodel,nlos,**kwargs)
hiVar.set_forest_grid(igmGrid)
except IndexError:
# no forest
pass
# Fe template spectra depend on a (possibly) pre-computed grid,
# which must be regenerated
try:
feVar = qsoGrid.getVars(grids.FeTemplateVar)[0]
kwargs = feVar.varmeta
fetempl = grids.VW01FeTemplateGrid(qsoGrid.z,wave,**kwargs)
feVar.set_template_grid(fetempl)
except IndexError:
# no forest
pass
#
return qsoGrid
def qsoSimulation(simParams,**kwargs):
'''
Run a complete simulation.
1. Construct grid of QSOs.
2. Generate Lyman forest transmission spectra from a subsample of
random LOSs (optional).
3. Sample QSO spectral features (continuum, emission lines, dust).
4. Build simulated spectra and derive photometry (photometry is optional).
5. Transfer the simulated photometry to observed photometry by
calculating errors and folding them in (optional).
Parameters
----------
saveSpectra : bool
save the simulated spectra, not just the photometry.
Beware! result may be quite large (Nqso x Npixels). [default:False]
forestOnly : bool
Only generate the forest transmission spectra. [default:False]
noPhotoMap : bool
skip the simulation of observed photometry [default:False]
outputDir : str
write files to this directory [default:'./']
nproc : int
number of processes to use [default: 1]
'''
saveSpectra = kwargs.get('saveSpectra',False)
forestOnly = kwargs.get('forestOnly',False)
noPhotoMap = kwargs.get('noPhotoMap',False)
noWriteOutput = kwargs.get('noWriteOutput',False)
outputDir = kwargs.get('outputDir','./')
nproc = kwargs.get('nproc',1)
verbose = kwargs.get('verbose',0)
#
# build or restore the grid of (M,z) for each QSO
#
wave = buildWaveGrid(simParams)
reseed(simParams)
if nproc > 1:
pool = multiprocessing.Pool(nproc)
procMap = pool.map
else:
procMap = map
timerLog = sqbase.TimerLog()
try:
qsoGrid,simParams = readSimulationData(simParams['FileName'],
outputDir,retParams=True,
clean=True)
except IOError:
if verbose > 0:
print(simParams['FileName']+' output not found')
if 'GridFileName' in simParams:
if verbose > 0:
print('restoring grid from ',simParams['GridFileName'])
try:
qsoGrid = readSimulationData(simParams['GridFileName'],
outputDir)
except IOError:
if verbose > 0:
print(simParams['GridFileName'],' not found, generating')
qsoGrid = buildQsoGrid(simParams)
qsoGrid.write(simParams,outputDir,
simParams['GridFileName']+'.fits')
else:
if verbose > 0:
print('generating QSO grid')
qsoGrid = buildQsoGrid(simParams)
if not forestOnly:
if not noWriteOutput and 'GridFileName' in simParams:
qsoGrid.write(simParams,outputDir,
simParams['GridFileName']+'.fits')
qsoGrid.setCosmology(simParams.get('Cosmology'))
timerLog('Initialize Grid')
#
# configure the IGM transmission spectra grid (load if cached)
#
if 'ForestParams' in simParams:
forest = buildForest(wave,qsoGrid.z,simParams,outputDir)
else:
forest = None
if forestOnly:
timerLog.dump()
return
#
if isinstance(forest,hiforest.IGMTransmissionGrid):
# build sightlines on-the-fly
buildSpec = buildSpectraBySightLine
# if the user specified a file name, save the forest spectra in it
fpar = simParams.get('ForestParams',{})
forestFn = fpar.get('FileName')
if forestFn:
# map the objects to sightlines and save the forest spectra grid
losSampler = grids.RandomSubSampler(forest.numSightLines)
losMap = losSampler.sample(qsoGrid.nObj)
forest.write(forestFn,outputDir,losMap=losMap,
z_em=qsoGrid.z,**fpar)
# now use the cached forest
forest = hiforest.CachedIGMTransmissionGrid(forestFn,outputDir)
if not np.allclose(wave[:len(tgrid.specWave)],tgrid.specWave):
raise ValueError("Input wavegrid doesn't match stored wave")
timerLog('Generate Forest')
else:
# else no forest or cached forest
buildSpec = buildSpectraBulk
#
qsoGrid.loadPhotoMap(simParams['PhotoMapParams']['PhotoSystems'])
if 'GridForestFile' in simParams:
forest = hiforest.GridForest(simParams['GridForestFile'],
qsoGrid.photoBands)
#
# add the quasar model variables to the grid (does the random sampling)
#
buildFeatures(qsoGrid,wave,simParams,forest,verbose=verbose)
timerLog('Generate Features')
#
# Use continuum and emission line distributions to build the components
# of the intrinsic QSO spectrum, then calculate photometry
#
_,spectra = buildSpec(wave,qsoGrid,procMap,
maxIter=simParams.get('maxFeatureIter',5),
verbose=verbose,saveSpectra=saveSpectra)
timerLog('Build Quasar Spectra')
#
# map the simulated photometry to observed values with uncertainties
#
if not noPhotoMap:
if verbose > 0:
print('mapping photometry')
reseed(simParams['PhotoMapParams'])
photoData = sqphoto.calcObsPhot(qsoGrid.synFlux,qsoGrid.photoMap)
qsoGrid.addData(photoData)
timerLog('PhotoMap')
timerLog.dump()
if nproc > 1:
pool.close()
if not noWriteOutput:
qsoGrid.write(simPars=simParams,outputDir=outputDir)
if saveSpectra:
spfn = os.path.join(outputDir,simParams['FileName']+'_spectra.fits')
save_spectra(wave,spectra,spfn,outputDir)
return qsoGrid,spectra
else:
return qsoGrid
def load_sim_output(simFileName,outputDir='.',with_spec=True):
simdat,par = readSimulationData(simFileName,outputDir,retParams=True)
if with_spec:
sp = fits.getdata(os.path.join(outputDir,simFileName+'_spectra.fits'))
wave = buildWaveGrid(par)
qsos = hstack([simdat.data,Table(dict(spec=sp))])
return wave,qsos
else:
return simdat.data
def save_spectra(wave,spectra,fileName,outputDir='.',overwrite=True):
logwave = np.log(wave[:2])
dloglam = np.diff(logwave)
hdr = fits.Header()
hdr['CD1_1'] = float(dloglam)
hdr['CRPIX1'] = 1
hdr['CRVAL1'] = logwave[0]
hdr['CRTYPE1'] = 'LOGWAVE'
hdr['SPECSCAL'] = (1e-17,'erg/s/cm^2/A')
spectra = (spectra*1e17).astype(np.float32)
if not fileName.endswith('.fits'):
fileName += '.fits'
fits.writeto(os.path.join(outputDir,fileName),spectra,header=hdr,
overwrite=overwrite)
def load_spectra(fileName,outputDir='.'):
if not fileName.endswith('.fits'):
fileName += '.fits'
spec,hdr = fits.getdata(fileName,header=True)
wi = np.arange(spec.shape[-1])
logwave = hdr['CRVAL1'] + hdr['CD1_1']*(wi-(hdr['CRPIX1']-1))
wave = np.exp(logwave)
return wave,spec
def generate_default_binned_forest(fileName,outputDir='.',**kwargs):
nlos = kwargs.pop('numSightlines',1000)
zbins = kwargs.pop('zBins',np.arange(0.1,4.6,0.025))
waverange = kwargs.pop('waverange',(1300.,7000))
R = kwargs.pop('R',300)
hiforest.generate_binned_forest(fileName,sqmodels.WP11_model,
nlos,zbins,waverange,R,
outputDir=outputDir,**kwargs)
|
bsd-3-clause
| -7,249,735,259,691,682,000
| 38.634807
| 79
| 0.610711
| false
| 3.651945
| false
| false
| false
|
lrei/canonical_urls
|
urlhelpers.py
|
1
|
1775
|
"""
"""
import re
import logging
import rfc3987
import urlparse
def url_encode_non_ascii(b):
return re.sub('[\x80-\xFF]', lambda c: '%%%02x' % ord(c.group(0)), b)
def ensure_url(iri):
'''If IRI, convert to URL
If fragments (#), remove
http://stackoverflow.com/posts/4391299/revisions
'''
# if it's not unicode, it must be utf8, otherwise fail
if not isinstance(iri, unicode):
try:
uri = iri.decode('utf8') # noqa - we check if decoding works here
except Exception as e:
logging.exception(e)
return None
parts = urlparse.urlparse(iri)
url_parts = []
for index, part in enumerate(parts):
if index == 1:
url_parts.append(part.lower().encode('idna'))
else:
url_parts.append(url_encode_non_ascii(part.encode('utf-8')))
url = urlparse.urlunparse(url_parts)
url = urlparse.urldefrag(url)[0]
return url
def validate_url(url):
'''
Validates URL (actually, IRIs).
'''
try:
rfc3987.parse(url, rule='IRI')
except:
return False
return True
def url_or_error(url):
"""Return a valid url or None
"""
# if it's not unicode, it must be utf8, otherwise fail
if not isinstance(url, unicode):
try:
url = url.decode('utf8') # noqa - we check if decoding works here
except Exception as e:
logging.exception(e)
return None
# Convert URI to URL if necessary
try:
url = ensure_url(url)
except Exception as e:
logging.exception(e)
return None
# Validate URL
if not validate_url(url):
msg = 'bad url: {} '.format(url)
logging.error(msg)
return None
return url
|
mit
| 2,921,001,869,390,411,000
| 21.75641
| 78
| 0.579155
| false
| 3.705637
| false
| false
| false
|
thruflo/pyramid_redis
|
src/pyramid_redis/hooks.py
|
1
|
4492
|
# -*- coding: utf-8 -*-
"""Provides a ``RedisFactory`` to get a configured redis client from a
settings dictionary, e.g.::
>>> factory = RedisFactory()
>>> client = factory({'redis.url': 'redis://localhost:6379'})
And ``GetRedisClient`` which wraps the factory so it can be used as a
Pyramid request method.
"""
__all__ = [
'GetRedisClient',
'RedisFactory',
]
import logging
logger = logging.getLogger(__name__)
import pyramid.exceptions
import redis
try:
import urlparse
except ImportError: # py3
import urllib.parse as urlparse
from zope.component import getGlobalSiteManager
from zope.interface import Interface
from zope.interface import directlyProvides
class IRedisClientConfiguration(Interface):
"""Marker interface provided by RedisClientConfiguration"""
class RedisClientConfiguration(dict):
"""Parse the application settings into connection pool kwargs."""
def __init__(self, **kwargs):
self.parse_url = kwargs.get('parse_url', urlparse.urlparse)
self.pool_cls = kwargs.get('pool_cls', redis.BlockingConnectionPool)
def __call__(self, settings):
"""Unpack the settings. Parse the url into components and build
a dict to return. As an alternative, you may also provide a
unix_socket_path.
"""
self.clear() # make sure you can reconfigure the client
db = settings.get('redis.db', 0)
config = {'db': int(db)}
if ('redis.unix_socket_path' in settings and
settings['redis.unix_socket_path'] is not None):
config['unix_socket_path'] = settings['redis.unix_socket_path']
elif ('redis.url' in settings and
settings['redis.url'] is not None): # should default to
# `redis://localhost:6379`
# Unpack.
url = settings['redis.url']
# Parse into a config dict.
o = self.parse_url(url)
config.update({
'host': o.hostname,
'port': o.port,
})
if o.password:
config['password'] = o.password
max_connections = settings.get('redis.max_connections', None)
if max_connections is not None:
config['max_connections'] = int(max_connections)
config = {'connection_pool': self.pool_cls(**config)}
else:
raise pyramid.exceptions.ConfigurationError(
"""To use redis with pyramid, redis.url or
redis.unix_socket_path should be provided"""
)
self.update(config)
return self
class RedisFactory(object):
def __init__(self, **kwargs):
self.get_registry = kwargs.get('get_registry', getGlobalSiteManager)
self.config = kwargs.get('parse_config', RedisClientConfiguration())
self.provides = kwargs.get('provides', directlyProvides)
self.redis_cls = kwargs.get('redis_cls', redis.StrictRedis)
def __call__(self, settings, registry=None):
"""Returns a ``redis`` client that uses a client configuration
registered in the ``registry`` provided that is, in turn,
configured with the ``settings`` provided.
"""
# If called without a registry, i.e.: not within the context of a
# Pyramid application, then register the connection pool in a
# zope.component registry.
if registry is None:
registry = self.get_registry()
# Query the registry for a client_configuration. If it doesn't exist,
# instantiate and register one for next time.
redis_client_conf = registry.queryUtility(IRedisClientConfiguration)
if not redis_client_conf:
redis_client_conf = self.config(settings) # update RedisClientConf
self.provides(self.config, IRedisClientConfiguration)
registry.registerUtility(self.config,
IRedisClientConfiguration)
# And use it to instantiate a redis client.
return self.redis_cls(**redis_client_conf)
class GetRedisClient(object):
"""Provide the redis factory as a Pyramid request method."""
def __init__(self, **kwargs):
self.redis_factory = kwargs.get('redis_factory', RedisFactory())
def __call__(self, request):
registry = request.registry
return self.redis_factory(registry.settings, registry=registry)
|
unlicense
| 5,676,939,344,290,679,000
| 34.370079
| 79
| 0.61821
| false
| 4.460775
| true
| false
| false
|
YufeiZhang/Principles-of-Programming-Python-3
|
Preparing/words.py
|
1
|
1255
|
# words.py
def main():
try:
#txt = open("test_1.txt")
#txt = open("test_2.txt")
#txt = open("test_3.txt")
txt = open("test_4.txt")
#target = input("Enter characters (spaces will be ignored): ")
#target = "cluuud IN DeD 23*"
target = "NSCRT - oooe+*"
except OSError:
print("OSError: Cannot find the file.")
string = ''
for ch in target: string += ch.lower()
lines = []
for line in txt: line = line.strip(); lines.append(line)
all_words = []
for line in lines:
words = line.split()
for word in words:
if not word[-1].isalpha(): word = word[:-1]
if word.lower() not in all_words:
all_words.append(word.lower())
all_words = sorted(all_words)
#print(all_words)
is_in = {}
for word in all_words:
flag = 1
for char in word:
if char == '.':
pass
else:
if char in string and char:
pass
else:
flag = 0
break
if flag:
if len(word) not in is_in:
is_in[len(word)] = [word]
else:
is_in[len(word)].append(word)
is_in = sorted(is_in.items(), key = lambda x:x[0])
for key in is_in:
print("Words of length {:d} built from these characters, in lexicographic order:".format(key[0]))
for ch in key[1]:
print('\t', ch)
if __name__ == '__main__':
main()
|
gpl-3.0
| -474,582,302,238,988,500
| 18.936508
| 99
| 0.588048
| false
| 2.687366
| false
| false
| false
|
NERC-CEH/jules-jasmin
|
majic/joj/tests/functional/test_model_run_pre_create.py
|
1
|
3962
|
# Majic
# Copyright (C) 2014 CEH
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from urlparse import urlparse
from hamcrest import *
from joj.tests import *
from joj.services.model_run_service import ModelRunService
from joj.utils import constants
from joj.model import session_scope, Session, User
from joj.services.user import UserService
class TestModelRunControllerPreCreate(TestController):
def setUp(self):
super(TestModelRunControllerPreCreate, self).setUp()
self.clean_database()
def test_GIVEN_nothing_WHEN_navigate_to_create_or_redirect_THEN_create_run_page_shown(self):
self.login()
response = self.app.get(
url(controller='model_run', action='pre_create'))
assert_that(response.status_code, is_(302), "Response is redirect")
assert_that(urlparse(response.response.location).path, is_(url(controller='model_run', action='create')), "url")
def test_GIVEN_user_over_quota_WHEN_navigate_to_precreate_THEN_index_shown(self):
user = self.login()
self.create_run_model(storage_in_mb=user.storage_quota_in_gb * 1024 + 1, name="big_run", user=user)
response = self.app.get(
url(controller='model_run', action='pre_create'))
assert_that(response.status_code, is_(302), "Response is redirect")
assert_that(urlparse(response.response.location).path, is_(url(controller='model_run', action='index')), "url")
def test_GIVEN_model_created_and_user_not_seen_page_WHEN_navigate_to_create_or_redirect_THEN_create_run_page_shown(self):
user = self.login()
self.create_run_model(storage_in_mb=0, name="big_run", user=user, status=constants.MODEL_RUN_STATUS_CREATED)
response = self.app.get(
url(controller='model_run', action='pre_create'))
assert_that(response.status_code, is_(302), "Response is redirect")
assert_that(urlparse(response.response.location).path, is_(url(controller='model_run', action='create')), "url")
def test_GIVEN_model_created_and_user_action_set_WHEN_navigate_to_create_or_redirect_THEN_user_action_page_shown(self):
user = self.login()
user_service = UserService()
user_service.set_current_model_run_creation_action(user, "driving_data")
self.create_run_model(storage_in_mb=0, name="big_run", user=user, status=constants.MODEL_RUN_STATUS_CREATED)
response = self.app.get(
url(controller='model_run', action='pre_create'))
assert_that(response.status_code, is_(302), "Response is redirect")
assert_that(urlparse(response.response.location).path, is_(url(controller='model_run', action='driving_data')), "url")
def test_GIVEN_no_model_created_and_user_action_set_WHEN_navigate_to_create_or_redirect_THEN_create_page_shown(self):
user = self.login()
user_service = UserService()
user_service.set_current_model_run_creation_action(user, "driving_data")
response = self.app.get(
url(controller='model_run', action='pre_create'))
assert_that(response.status_code, is_(302), "Response is redirect")
assert_that(urlparse(response.response.location).path, is_(url(controller='model_run', action='create')), "url")
|
gpl-2.0
| -5,171,435,831,803,058,000
| 44.022727
| 126
| 0.693337
| false
| 3.556553
| true
| false
| false
|
google/mirandum
|
alerts/streamjar/models.py
|
1
|
1874
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
import main.models
import json
import iso8601
class StreamjarUpdate(main.models.Updater):
access_token = models.CharField(max_length=255)
class StreamjarEvent(main.models.UpdaterEvent):
details = models.TextField()
updater = models.ForeignKey(StreamjarUpdate)
def as_dict(self):
details = json.loads(self.details)
name = details.get("name", "Anonymous")
amount = " ".join([str(details['amount']), details['currency']])
timestamp = iso8601.parse_date(details['created_at'])
info = {
'name': name,
'amount': amount,
'comment': details['message'],
'donation_amount': float(details['amount']),
'currency': details['currency'],
'timestamp': timestamp,
}
return info
class StreamjarAlertConfig(main.models.AlertConfig):
blacklist = models.TextField(blank=True, null=True)
filter_type = models.CharField(max_length=20, choices=(
('1equal', 'Equals'),
('2gt', 'Greater than'),
('3default', 'Default'),
), default='3default', help_text="When filtering for specific amounts, comparison to use.")
filter_amount = models.FloatField(blank=True, null=True)
|
apache-2.0
| -5,070,340,945,048,618,000
| 37.244898
| 95
| 0.670224
| false
| 4.012848
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.