repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
quecolectivo/server
|
djangoserver/quecolectivo/api/admin.py
|
Python
|
gpl-3.0
| 211
| 0.009479
|
from django.con
|
trib.gis import admin
from .models import Line, Point, Polygon, Roads
@admin.register(Line, Point, Polygon, Roads)
class OSMAdmin(admin.OSMGeoAdmin):
fields = ('wa
|
y', 'osm_id', 'ref', 'name')
|
VeeSot/blog
|
auth/views.py
|
Python
|
gpl-2.0
| 2,792
| 0.0015
|
import asjson
from flask.views import MethodView
from functools import wraps
from flask.ext.mongoengine.wtf import model_form
from flask import request, render_template, Blueprint, redirect, abort, session, make_response
from .models import User, SessionStorage
from mongoengine import DoesNotExist
auth = Blueprint('auth', __name__, template_folder='templates')
class UserAuth(MethodView):
@staticmethod
def get():
form = model_form(User)(request.form)
return render_template('auth/index.html', form=form)
@staticmethod
def post():
if request.form:
try:
username = request.form['name']
password = request.form['password']
user = User.objects.get(name=username)
if user and user.password == password:
# prepare response/redirect
response = make_response(redirect('/panel_control'))
if 'session' in request.cookies:
session_id = request.cookies['session']
else:
session_id = session['csrf_token']
# Setting user-cookie
response.set_cookie('session_id', value=session_id)
# After.We update our storage session(remove old + add new record)
record = SessionStorage()
record.remove_old_session(username)
record.u
|
ser = username
record.session_key = session_id
record.save()
# And redirect to admin-panel
return response
else:
raise DoesNotExist
except DoesNotExist:
return abort(401)
@staticmethod
def is_admin():
# Выуживаем куки из различных мест,т.к. от
|
правлять могут в виде атрибута заголовков
cookies = request.cookies
if not cookies: # Ничего не нашли на первой иттерации.Попробуем вытащить из заголовка
try:
cookies = asjson.loads(request.headers['Set-Cookie'])
except KeyError:
pass
if 'session_id' in cookies:
session_id = cookies['session_id']
return bool(SessionStorage.objects.filter(session_key=session_id))
else:
return False
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
if not UserAuth.is_admin():
return redirect('auth')
return f(*args, **kwargs)
return decorated
auth.add_url_rule('/auth/', view_func=UserAuth.as_view('auth'))
|
raeeschachar/edx-e2e-mirror
|
regression/pages/studio/studio_home.py
|
Python
|
agpl-3.0
| 2,009
| 0
|
"""
Dashboard page for Studio
"""
from edxapp_acceptance.pages.studio.index import DashboardPage
from bok_choy.promise import BrokenPromise
from regression.pages.studio import BASE_URL
from regression.pages.lms import BASE_URL_LMS
class DashboardPageExtended(DashboardPage):
"""
This class is an extended class of Studio Dashboard Page,
where we add methods that are different or not used in DashboardPage
"""
url = BASE_URL + '/home'
def is_browser_on_page(self):
"""
Verifies if the browser is on the correct page
"""
return self.q(css='.courses-tab.active').present
def select_course(self, course_title):
"""
Selects the course we want to perform tests
|
on
"""
|
course_names = self.q(css='.course-link h3')
for vals in course_names:
if course_title in vals.text:
vals.click()
return
raise BrokenPromise('Course title not found')
def click_logout_button(self):
"""
Clicks username drop down than logout button
"""
self.q(css='.account-username').click()
self.wait_for_element_visibility(
'.action-signout', 'Sign out button visibility')
self.q(css='.action-signout').click()
def click_view_live_button(self):
"""
Clicks view live button
"""
self.browser.execute_script(
"document.querySelectorAll('[data-course-key = \"course-v1:"
"ArbiRaees+AR-1000+fall\"] .view-button')[0].click();")
self.browser.switch_to_window(self.browser.window_handles[-1])
def click_terms_of_service(self):
"""
Clicks Terms of Service link
"""
self.q(css='a[href="' + BASE_URL_LMS + '/edx-terms-service"]').click()
def click_privacy_policy(self):
"""
Clicks Privacy Policy link
"""
self.q(
css='a[href="' + BASE_URL_LMS + '/edx-privacy-policy"]').click()
|
dhp-denero/LibrERP
|
report_aeroo_ooo/installer.py
|
Python
|
agpl-3.0
| 6,702
| 0.008207
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2008-2012 Alistek Ltd (http://www.alistek.com) All Rights Reserved.
# General contacts <info@alistek.com>
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This module is GPLv3 or newer and incompatible
# with OpenERP SA "AGPL + Private Use License"!
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from osv import fields
from osv import osv
import netsvc
import tools
from xml.dom import minidom
import os, base64
import urllib2
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from tools.translate import _
from report_aeroo_ooo.DocumentConverter import DocumentConversionException
from report_aeroo_ooo.report import OpenOffice_service
from report_aeroo.report_aeroo import aeroo_lock
_url = 'http://www.alistek.com/aeroo_banner/v6_1_report_aeroo_ooo.png'
class aeroo_config_installer(osv.osv_memory):
_name = 'aeroo_config.installer'
_inherit = 'res.config.installer'
_rec_name = 'host'
_logo_image = None
def _get_image(self, cr, uid, context=None):
if self._logo_image:
return self._logo_image
try:
im = urllib2.urlopen(_url.encode("UTF-8"))
if im.headers.maintype!='image':
raise TypeError(im.headers.maintype)
except Exception, e:
path = os.path.join('report_aeroo','config_pixmaps','module_banner.png')
image_file = file_data = tools.file_open(path,'rb')
try:
file_data = image_file.read()
self._logo_image = base64.encodestring(file_data)
return self._logo_image
finally:
image_file.close()
else:
self._logo_image = base64.encodestring(im.read())
return self._logo_image
def _get_image_fn(self, cr, uid, ids, name, args, context=None):
image = self._get_image(cr, uid, context)
return dict.fromkeys(ids, image) # ok to use .fromkeys() as the image is same for all
_columns = {
'host':fields.char('Host', size=64, required=True),
'port':fields.integer('Port', required=True),
'ooo_restart_cmd': fields.char('OOO restart command', size=256, \
help='Enter the shell command that will be executed to restart the LibreOffice/OpenOffice background process.'+ \
'The command will be executed as the user of the OpenERP server process,'+ \
'so you may need to prefix it with sudo and configure your sudoers file to have this command executed without password.'),
'state':fields.selection([
('init','Init'),
('error','Error'),
('done','Done'),
],'State', select=True, readonly=True),
'msg': fields.text('Message', readonly=True),
'error_details': fields.text('Error Details', readonly=True),
'link':fields.char('Installation Manual', size=128, help='Installation (Dependencies and Base system setup)', readonly=True),
'config_logo': fields.function(_get_image_fn, string='Image', type='binary', method=True),
}
def default_get(self, cr, uid, fields, context=None):
config_obj = self.pool.get('oo.config')
data = super(aeroo_config_installer, self).default_get(cr, uid, fields, context=context)
ids = config_obj.search(cr, 1, [], context=context)
if ids:
res = config_obj.read(cr, 1, ids[0], context=context)
del res['id']
data.update(res)
return dat
|
a
def check(self, cr, uid, ids, context=None):
config_obj = self.pool.get('oo.config')
data = self.read(cr, uid, ids, ['host','port','ooo_restart_cmd'])[0]
del data['id']
config_id = config_obj.search(cr, 1, [], context=context)
if config_id:
config_obj.write(cr, 1, config_id, data, context=context)
else:
config_id = config_obj.create(cr, 1, data, context=context)
try:
fp = tools.file_open('report_aeroo_ooo/te
|
st_temp.odt', mode='rb')
file_data = fp.read()
DC = netsvc.Service._services.setdefault('openoffice', \
OpenOffice_service(cr, data['host'], data['port']))
with aeroo_lock:
DC.putDocument(file_data)
DC.saveByStream()
fp.close()
DC.closeDocument()
del DC
except DocumentConversionException, e:
netsvc.Service.remove('openoffice')
error_details = str(e)
state = 'error'
except Exception, e:
error_details = str(e)
state = 'error'
else:
error_details = ''
state = 'done'
if state=='error':
msg = _('Connection to OpenOffice.org instance was not established or convertion to PDF unsuccessful!')
else:
msg = _('Connection to the OpenOffice.org instance was successfully established and PDF convertion is working.')
return self.write(cr, uid, ids, {'msg':msg,'error_details':error_details,'state':state})
_defaults = {
'config_logo': _get_image,
'host':'localhost',
'port':8100,
'ooo_restart_cmd': 'sudo /etc/init.d/libreoffice restart',
'state':'init',
'link':'http://www.alistek.com/wiki/index.php/Aeroo_Reports_Linux_server#Installation_.28Dependencies_and_Base_system_setup.29',
}
aeroo_config_installer()
|
github/codeql
|
python/ql/test/query-tests/Classes/overwriting-attribute/overwriting_attribute.py
|
Python
|
mit
| 451
| 0.015521
|
#Attribute set in both superclass and subclass
class C(object):
def __init__(self):
self.var = 0
cla
|
ss D(C):
def __init__(self):
self.var = 1 # self.var will be overwritten
C.__init__(self)
#Attribute set in both superclass and subclass
|
class E(object):
def __init__(self):
self.var = 0 # self.var will be overwritten
class F(E):
def __init__(self):
E.__init__(self)
self.var = 1
|
b0nk/botxxy
|
src/tpb.py
|
Python
|
gpl-2.0
| 241
| 0.016598
|
import feedparser
def getLatest():
feed = feedparser.parse("http://
|
rss.thepiratebay.se/0")
title = feed['entries'][0]['title']
link = feed['entries'][0]['comments']
|
.replace('http://', 'https://')
return "%s - %s" % (title, link)
|
MiracleWong/PythonBasic
|
PythonExcel/testExcel.py
|
Python
|
mit
| 471
| 0.012739
|
#!/usr/bin/python
#-*- co
|
ding: utf-8 -*-
from xlrd import open_workbook
x_data1=[]
y_data1=[]
wb = open_workbook('phase_detector.xlsx')
for s in wb.sheets():
print 'Sheet:',s.name
for row in range(s.nrows):
print 'the row is:',row+1
values = []
for col in range(s.ncols):
values.append(s.cell(row,col).value)
print values
x_data1.append(values[0])
y_data1.append(values[1]
|
)
print x_data1
print y_data1
|
MilkyWeb/dyndns
|
install.py
|
Python
|
mit
| 881
| 0.010216
|
#!/usr/bin/python3
import sys
import os
def printUsage():
sys.exit('Usage: %s server|client' % sys.argv[0])
if ((len(sys.argv)!=2) or (sys.argv[1] != 'client') and (sys.argv[1] != 'server')):
printUsage()
print("Generating daemon script\n")
fileContents = open('dyndns.sh').read( os.path.getsize('dyndns.sh') )
fileContents = fileContents.replace('{DYNDNS_PATH}', os.getcwd()
|
)
fileContents = fileCo
|
ntents.replace('{VERSION}', sys.argv[1])
fileContents = fileContents.replace('{USER}', os.getlogin())
print("Writing daemon script in /etc/init.d\n")
daemonPath = '/etc/init.d/dyndns'
daemon = open(daemonPath, 'w')
daemon.write(fileContents)
daemon.close()
print('Changing permissions\n')
os.chmod(daemonPath, 0o755)
print('Installing the init script')
os.system('update-rc.d dyndns defaults')
print('done.\nYou can start the service by using:\nsudo service dyndns start')
|
leprikon-cz/leprikon
|
leprikon/views/journals.py
|
Python
|
bsd-3-clause
| 5,762
| 0.001736
|
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.urls.base import reverse_lazy as reverse
from django.utils.translation import ugettext_lazy as _
from ..forms.journals import JournalEntryForm, JournalForm, JournalLeaderEntryForm
from ..models.journals import Journal, JournalEntry, JournalLeaderEntry, Subject
from .generic import CreateView, DeleteView, DetailView, TemplateView, UpdateView
class AlternatingView(TemplateView):
template_name = "leprikon/alternating.html"
def get_title(self):
return _("Alternating in school year {}").format(self.request.school_year)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["alternate_leader_entries"] = self.request.leader.get_alternate_leader_entries(self.request.school_year)
return context
class JournalQuerySetMixin:
def get_queryset(self):
qs = super().get_queryset()
if not self.request.user.is_staff:
qs = qs.filter(leaders=self.request.leader)
return qs
class JournalView(JournalQuerySetMixin, DetailView):
model = Journal
template_name_suffix = "_journal"
class JournalCreateView(CreateView):
model = Journal
form_class = JournalForm
template_name = "leprikon/journal_form.html"
title = _("New journal")
def dispatch(self, request, subject):
kwargs = {"id": subject}
if not self.request.user.is_staff:
kwargs["leaders"] = self.request.leader
self.subject = get_object_or_404(Subject, **kwargs)
self.success_url = reverse("leprikon:subject_journals", args=(self.subject.subject_type.slug, self.subject.id))
return super().dispatch(request)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["subject"] = self.subject
return kwargs
def get_message(self):
return _("New journal {} has been created.").format(self.object)
class JournalUpdateView(JournalQuerySetMixin, UpdateView):
model = Journal
form_class = JournalForm
success_url = reverse("leprikon:summary")
template_name = "leprikon/
|
journal_form.html"
title = _("Change journal")
class JournalDeleteView(DeleteView):
model = Journal
title = _("Delete journal")
message = _("Journal has been deleted.")
def get_queryset(self):
qs = super().get_queryset()
|
if not self.request.user.is_staff:
qs = qs.filter(subject__leaders=self.request.leader)
return qs
def get_object(self):
obj = super().get_object()
if obj.all_journal_entries:
raise Http404()
return obj
def get_question(self):
return _("Do You really want to delete the journal {}?").format(self.object)
class JournalEntryCreateView(CreateView):
model = JournalEntry
form_class = JournalEntryForm
template_name = "leprikon/journalentry_form.html"
title = _("New journal entry")
message = _("The journal entry has been created.")
def dispatch(self, request, *args, **kwargs):
if self.request.user.is_staff:
self.journal = get_object_or_404(Journal, id=int(kwargs.pop("journal")))
else:
self.journal = get_object_or_404(Journal, id=int(kwargs.pop("journal")), leaders=self.request.leader)
return super().dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["journal"] = self.journal
return kwargs
class JournalEntryUpdateView(UpdateView):
model = JournalEntry
form_class = JournalEntryForm
template_name = "leprikon/journalentry_form.html"
title = _("Change journal entry")
message = _("The journal entry has been updated.")
def get_object(self):
obj = super().get_object()
if self.request.user.is_staff or self.request.leader in obj.journal.all_leaders + obj.all_alternates:
return obj
else:
raise Http404()
class JournalEntryDeleteView(DeleteView):
model = JournalEntry
title = _("Delete journal entry")
message = _("The journal entry has been deleted.")
def get_queryset(self):
qs = super().get_queryset()
if not self.request.user.is_staff:
qs = qs.filter(journal__leaders=self.request.leader)
return qs
def get_object(self):
obj = super().get_object()
if obj.affects_submitted_timesheets:
raise Http404()
return obj
def get_question(self):
return _("Do You really want to delete journal entry?")
class JournalLeaderEntryUpdateView(UpdateView):
model = JournalLeaderEntry
form_class = JournalLeaderEntryForm
template_name = "leprikon/journalleaderentry_form.html"
title = _("Change timesheet entry")
message = _("The timesheet entry has been updated.")
def get_object(self):
obj = super().get_object()
if (
self.request.user.is_staff
or obj.timesheet.leader == self.request.leader
or self.request.leader in obj.journal_entry.journal.all_leaders
):
return obj
else:
raise Http404()
class JournalLeaderEntryDeleteView(DeleteView):
model = JournalLeaderEntry
title = _("Delete timesheet entry")
message = _("The timesheet entry has been deleted.")
def get_queryset(self):
return (
super()
.get_queryset()
.filter(
timesheet__leader=self.request.leader,
timesheet__submitted=False,
)
)
def get_question(self):
return _("Do You really want to delete timesheet entry?")
|
shrekshao/Polyhedron3D
|
assets/models/test/txt2json_parser.py
|
Python
|
mit
| 8,122
| 0.006156
|
import json
from sets import Set
from sys import maxint
import math
# tmp hacky functions for vec3
def norm2 (a):
return dot(a, a)
def dot ( a, b ):
return a[0] * b[0] + a[1] * b[1] + a[2] * b[2]
def area (a, b, c):
u = [ b[0] - a[0], b[1] - a[1], b[2] - a[2] ]
v = [ c[0] - a[0], c[1] - a[1], c[2] - a[2] ]
dot_uv = dot(u, v)
cross2 = norm2(u) * norm2(v) - dot_uv * dot_uv
return math.sqrt(cross2) * 0.5
class DiagramJson:
def __init__(self):
self.json = {
'form': {
'vertices': {},
'vertices_2_force_faces': {}, # face array
'vertices_2_force_cells': {},
'vertices_external': None, # converted from set: vid: 1
'edges': {}
},
'force': {
'vertices': {},
'edges': {},
'faces_e': {},
'faces_v': {},
'cells': {}
},
'strength_scaler': {
'min': maxint,
'max': 0
},
'force_face_2_strength': {}
}
class Txt2JsonParser:
def __init__(self):
self.diagramJson = DiagramJson()
# # tmp data structures used only when parsing
# self.form_edge_2_vertex = {}
self.force_face_2_form_edge = {} # inverse index, for caluclate edge width i.e. area of faces (strength)
# self.form_vertex_external_count = {} # vid: count - 0, 1, 2
def readFormVertex(self, filename):
f = open(filename)
v = self.diagramJson.json['form']['vertices']
v2fa = self.diagramJson.json['form']['vertices_2_force_faces']
for line in f:
vertex = line.strip().split('\t')
# print vertex
v[vertex[0]] = map(float, vertex[1:])
# create array for form_vertices to force_face array (cells)
v2fa[vertex[0]] = []
# print self.diagramJson.json
f.close()
def readFormEdge
|
(self, filename_edge_vertex, filename_edge_to_force_face, filename_edge_ex):
f_edge_vertex = open(filename_edge_vertex)
edges = self.diagramJson.json['form']['edges']
for line
|
in f_edge_vertex:
edge = line.strip().split('\t')
e = edges[edge[0]] = {}
e['vertex'] = edge[1:]
# e['external'] = False
# print edge[0], e['vertex']
# print edges
f_edge_vertex.close()
v2fa = self.diagramJson.json['form']['vertices_2_force_faces']
f_edge_to_force_face = open(filename_edge_to_force_face)
for line in f_edge_to_force_face:
edge = line.strip().split('\t')
f = edge[1] if edge[1] != "Null" else None
edges[edge[0]]['force_face'] = f
edge_vertex = edges[edge[0]]['vertex']
for v in edge_vertex:
v2fa[v].append(f)
# force_face_2_form_edge (tmp structure) for compute strength
if f != None:
self.force_face_2_form_edge[f] = edge[0]
f_edge_to_force_face.close()
vertex_ex_set = Set()
f_edge_ex = open(filename_edge_ex)
for line in f_edge_ex:
edge = line.strip().split('\t')
for e in edge:
edges[e]['external'] = True
vertex_ex_set.add(edges[e]['vertex'][0])
vertex_ex_set.add(edges[e]['vertex'][1])
f_edge_ex.close()
self.diagramJson.json['form']['vertices_external'] = dict.fromkeys(vertex_ex_set, 1)
# label external force edge
for e in edges:
is_ex_vertex_0 = edges[e]['vertex'][0] in vertex_ex_set
is_ex_vertex_1 = edges[e]['vertex'][1] in vertex_ex_set
if is_ex_vertex_0 != is_ex_vertex_1:
# print edges[e]['vertex'][0], ':', is_ex_vertex_0, ' , ', edges[e]['vertex'][1], ':', is_ex_vertex_1
# force vector: from v0 to v1
edges[e]['ex_force'] = True
# print edges
# print self.diagramJson.json
def readForceVertex(self, filename):
f = open(filename)
v = self.diagramJson.json['force']['vertices']
for line in f:
vertex = line.strip().split('\t')
# print vertex
v[vertex[0]] = map(float, vertex[1:])
# print self.diagramJson.json
f.close()
def readForceEdge(self, filename_edge_vertex):
f_edge_vertex = open(filename_edge_vertex)
edges = self.diagramJson.json['force']['edges']
for line in f_edge_vertex:
edge = line.strip().split('\t')
edges[edge[0]] = edge[1:]
# print edges
f_edge_vertex.close()
# print self.diagramJson.json
def readForceFaceEdge(self, filename_face_edge):
f_face_edge = open(filename_face_edge)
edges = self.diagramJson.json['force']['edges']
faces_e = self.diagramJson.json['force']['faces_e']
# faces_v = self.diagramJson.json['force']['faces_v']
for line in f_face_edge:
face = line.strip().split('\t')
faces_e[face[0]] = face[1:]
# # convert face edge to face vertex
# cur_face_vertex = Set()
# for e in face[1:]:
# # extend vertex array
# # cur_face_vertex.extend(edges[e])
# for v in edges[e]:
# cur_face_vertex.add(v)
# faces_v[face[0]] = list(cur_face_vertex)
# print faces_v[face[0]]
f_face_edge.close()
# print self.diagramJson.json
def readForceFaceVertex(self, filename_face_vertex):
f_face_vertex = open(filename_face_vertex)
# fan shape order
faces_v = self.diagramJson.json['force']['faces_v']
strengthScaler = self.diagramJson.json['strength_scaler']
force_face_2_strength = self.diagramJson.json['force_face_2_strength']
v = self.diagramJson.json['force']['vertices']
e = self.diagramJson.json['form']['edges']
for line in f_face_vertex:
face = line.strip().split('\t')
faces_v[face[0]] = face[1:]
strength = 0
if len(face) == 4:
# tri
strength = area( v[face[1]], v[face[2]], v[face[3]] )
elif len(face) == 5:
# quad
strength = area( v[face[1]], v[face[2]], v[face[3]] ) + area( v[face[1]], v[face[3]], v[face[4]] )
else:
print 'Error: face ', face[0], ' is not tri or quad!!'
# if face[0] == '17f' or face[0] == '19f':
# print face[0], face[1:], map( lambda vid: v[vid], face[1:] ), area(v[face[1]], v[face[2]], v[face[3]]), strength
# e[ self.force_face_2_form_edge[face[0]] ]['strength'] = strength
force_face_2_strength[ face[0] ] = strength
curEdge = e[ self.force_face_2_form_edge[face[0]] ]
if 'external' not in curEdge and 'ex_force' not in curEdge:
strengthScaler['max'] = max(strength, strengthScaler['max'])
strengthScaler['min'] = min(strength, strengthScaler['min'])
f_face_vertex.close()
if __name__ == "__main__":
# foldername = "example_01"
# foldername = "example_02"
# foldername = "example_03"
foldername = "example_04"
parser = Txt2JsonParser()
parser.readFormVertex(foldername + "/form_v.txt")
parser.readFormEdge(foldername + "/form_e_v.txt", \
foldername + "/form_e_to_force_f.txt", \
foldername + "/form_e_ex.txt")
parser.readForceVertex(foldername + "/force_v.txt")
parser.readForceEdge(foldername + "/force_e_v.txt")
# parser.readForceFaceEdge(foldername + "/force_f_e.txt")
parser.readForceFaceVertex(foldername + "/force_f_v.txt")
with open(foldername + '/diagram.json', 'w') as out:
json.dump(parser.diagramJson.json, out)
|
ssinger/skytools-cvs
|
python/setadm.py
|
Python
|
isc
| 159
| 0.012579
|
#! /usr/bin/env python
import sys, pgq.setadmin
if __name__ == '__main__':
script = pgq.setadmin.SetAdmin('set_admi
|
n', sys.argv[1:])
script.
|
start()
|
wheldom01/privacyidea
|
privacyidea/lib/auth.py
|
Python
|
agpl-3.0
| 4,375
| 0
|
# -*- coding: utf-8 -*-
#
# 2015-11-03 Cornelius Kölbel <cornelius@privacyidea.org>
# Add check if an admin user exists
# 2014-12-15 Cornelius Kölbel, info@privacyidea.org
# Initial creation
#
# (c) Cornelius Kölbel
# Info: http://www.privacyidea.org
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from privacyidea.models import Admin
from privacyidea.lib.token import check_user_pass
from privacyidea.lib.policydecorators import libpolicy, login_mode
from privacyidea.lib.crypto import hash_with_pepper, verify_with_pepper
class ROLE(object):
ADMIN = "admin"
USER = "user"
VALIDATE = "validate"
def verify_db_admin(username, password):
"""
This function is used to verify the username and the password against the
database table "Admin".
:param username: The administrator username
:param password: The password
:return: True if password is correct for the admin
:rtype: bool
"""
success = False
qa = Admin.query.filter(Admin.username == username).first()
if qa:
success = verify_with_pepper(qa.password, password)
return success
def db_admin_exist(username):
"""
Checks if a local admin in the database exists
:param username: The username of the admin
:return: True, if exist
"""
return bool(get_db_admin(username))
def create_db_admin(app, username, email=None, password=None):
pw_dig = None
if password:
pw_dig = hash_with_pepper(password)
user = Admin(email=email, username=username, password=pw_dig)
user.save()
def list_db_admin():
admins = Admin.query.all()
print("Name \t email")
print(30*"=")
for admin in admins:
print("{0!s} \t {1!s}".format(admin.username, admin.email))
def get_db_admins():
admins = Admin.query.all()
return admins
def get_db_admin(username):
return Admin.query.filter(Admin.username == username).first()
def delete_db_admin(username):
print("Deleting admin {0!s}".format(username))
Admin.query.filter(Admin.username == username).first().delete()
@libpolicy(login_mode)
def check_webui_user(user_obj,
password,
options=None,
superuser_realms=None,
check_otp=False):
"""
This function is used to authenticate the user at the web ui.
It checks against the userstore or against OTP/privacyidea (check_otp).
It returns a tuple of
* true/false if the user authenticated successfully
* the role of the user
* the "detail" dictionary of the response
:param user_obj: The user who tries to authenticate
:type user_obj: User Object
:param password: Password, static and or OTP
:param options: additional options like g and clientip
:type options: dict
:param superuser_realms: list of realms, that contain admins
:type superuser_realms: list
:param check_otp: If set, the user is not authenticated against the
userstore but against privacyidea
:return: tuple of bool, string and dict/None
"""
options = options or {}
superuser_realms = superuser_realms or []
user_auth = False
role = ROLE.USER
details = None
if check_otp:
# check if the given password matches an OTP token
check, details = check_user_pass(user_obj, password, options=options)
if check:
user_auth = True
else:
# ch
|
eck the password of the user against the userstore
if user_obj.check_password(password):
user_auth = True
# If the realm is in the SUPERUSER_REALM then the authorization rol
|
e
# is risen to "admin".
if user_obj.realm in superuser_realms:
role = ROLE.ADMIN
return user_auth, role, details
|
TAXIIProject/django-taxii-services
|
tests/test_query_handler.py
|
Python
|
bsd-3-clause
| 3,576
| 0.003356
|
# Copyright (c) 2014, The MITRE Corporation. All rights reserved.
# For license information, see the LICENSE.txt file
from __future__ import absolute_import
from django.conf import settings
from django.test import Client, TestCase
class TETestObj(object):
def __init__(self, target, expected_stubs, expected_operand=None, expected_nsmap=None):
self.target = target
self.expected_stub_set = set(expected_stubs)
self.expected_operand = expected_operand
self.expected_nsmap = expected_nsmap
def check_result(self, xpath_builders, operand=None, nsmap=None):
xpath_stubs = ['/'.join(xb.xpath_parts) for xb in xpath_builders]
xpath_stub_set = set(xpath_stubs)
if self.expected_stub_set != xpath_stub_set:
raise ValueError('Expected XPath Stubs failure!\n'
'Expected: %s\n'
'Actual : %s\n' % (self.expected_stub_set, xpath_stub_set))
if self.expected_operand is not None:
if self.expected_operand != operand:
raise ValueError('Expected operand failure!\n'
'Expected: %s\n'
'Actual : %s\n' % (self.expected_operand, operand))
if self.expected_nsmap is not None:
if self.expected_nsmap != nsmap:
raise ValueError('Expected nsmap failure!\n'
'Expected: %s\n'
'Actual : %\n' % (self.expected_nsmap, nsmap))
no_wc_001
|
= TETestObj(target='STIX_Package/STIX_Header/Handling/Marking/Marking_Structure/Terms_Of_Use',
expected_stubs=[
'/stix:STIX_Package/stix:STIX_Header/stix:Handling/marking:Marking/marking:Marking_Structure/'
'terms:Terms_Of_Use',
])
# l_wc_001 = TETestObj(target='**/NameElement',
# expected_stubs=['//xal:NameElement'
|
, ])
l_wc_002 = TETestObj(target='*/STIX_Header/Title',
expected_stubs=['/*/stix:STIX_Header/stix:Title', ])
l_wc_003 = TETestObj(target='**/@cybox_major_version',
expected_stubs=['//@cybox_major_version',])
m_wc_001 = TETestObj(target='STIX_Package/*/Title',
expected_stubs=['/stix:STIX_Package/*/stix:Title'])
# m_wc_002 = TETestObj(target='STIX_Package/**/NameElement',
# expected_stubs=['/stix:STIX_Package//xal:NameElement'])
t_wc_001 = TETestObj(target='STIX_Package/STIX_Header/*',
expected_stubs=['/stix:STIX_Package/stix:STIX_Header/*',
'/stix:STIX_Package/stix:STIX_Header/@*'])
t_wc_002 = TETestObj(target='STIX_Package/TTPs/**',
expected_stubs=['/stix:STIX_Package/stix:TTPs//*',
'/stix:STIX_Package/stix:TTPs//@*'])
class BaseXmlQueryHandlerTests(TestCase):
def test_01(self):
"""
Test the target_to_xpath_stubs2() function
:return:
"""
test_tes = (no_wc_001,
# l_wc_001, l_wc_002,
l_wc_002,
m_wc_001, # m_wc_002,
t_wc_001, t_wc_002)
from taxii_services.query_handlers.stix_xml_111_handler import StixXml111QueryHandler
for test_te in test_tes:
xpath_builders, nsmap = StixXml111QueryHandler.target_to_xpath_builders(None, test_te.target)
test_te.check_result(xpath_builders, nsmap)
|
rcgee/oq-hazardlib
|
openquake/hazardlib/gsim/akkar_bommer_2010.py
|
Python
|
agpl-3.0
| 21,696
| 0.003134
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2012-2016 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Module exports :class:`AkkarBommer2010`,
class:`AkkarBommer2010SWISS01`,
class:`AkkarBommer2010SWISS04`,
class:`AkkarBommer2010SWISS08`,
"""
from __future__ import division
import numpy as np
from scipy.constants import g
from openquake.hazardlib.gsim.base import GMPE, CoeffsTable
from openquake.hazardlib import const
from openquake.hazardlib.imt import PGA, PGV, SA
from openquake.hazardlib.gsim.akkar_bommer_2010_swiss_coeffs import (
COEFFS_FS_ROCK_SWISS01,
COEFFS_FS_ROCK_SWISS04,
COEFFS_FS_ROCK_SWISS08
)
from openquake.hazardlib.gsim.utils_swiss_gmpe import _apply_adjustments
class AkkarBommer2010(GMPE):
"""
Implements GMPE developed by Sinan Akkar and Julian J. Bommer
and published as "Empirical Equations for the Prediction of PGA, PGV,
and Spectral Accelerations in Europe, the Mediterranean Region, and
the Middle East", Seismological Research Letters, 81(2), 195-206.
SA at 4 s (not supported by the original equations) has been added in the
context of the SHARE project and assumed to be equal to SA at 3 s but
scaled with proper factor.
Equation coefficients for PGA and SA periods up to 0.05 seconds have been
taken from updated model as described in 'Extending ground-motion
prediction equations for spectral accelerations to higher response
frequencies',Julian J. Bommer, Sinan Akkar, Stephane Drouet,
Bull. Earthquake Eng. (2012) volume 10, pages 379 - 399.
Coefficients for PGV and SA above 0.05 seconds are taken from the
original 2010 publication.
"""
#: Supported tectonic region type is 'active shallow crust' because the
#: equations have been derived from data from Southern Europe, North
#: Africa, and active areas of the Middle East, as explained in the
# 'Introduction', page 195.
DEFINED_FOR_TECTONIC_REGION_TYPE = const.TRT.ACTIVE_SHALLOW_CRUST
#: Set of :mod:`intensity measure types <openquake.hazardlib.imt>`
#: this GSIM can calculate. A set should contain classes from module
#: :mod:`openquake.hazardlib.imt`.
DEFINED_FOR_INTENSITY_MEASURE_TYPES = set([
PGA,
PGV,
SA
])
#: Supported intensity measure component is the geometric mean of two
#: horizontal components
#: :attr:`~openquake.hazardlib.const.IMC.AVERAGE_HORIZONTAL`, see page 196.
DEFINED_FOR_INTENSITY_MEASURE_COMPONENT = const.IMC.AVERAGE_HORIZONTAL
#: Supported standard deviation types are inter-event, intra-event
#: and total, see equation 2, page 199.
DEFINED_FOR_STANDARD_DEVIATION_TYPES = set([
const.StdDev.TOTAL,
const.StdDev.INTER_EVENT,
const.StdDev.INTRA_EVENT
])
#: Required site parameter is only Vs30 (used to distinguish rock
#: and stiff and soft soil).
REQUIRES_SITES_PARAMETERS = set(('vs30', ))
#: Required rupture parameters are magnitude and rake (eq. 1, page 199).
REQUIRES_RUPTURE_PARAMETERS = set(('rake', 'mag'))
#: Required distance measure is RRup (eq. 1, page 199).
REQUIRES_DISTANCES = set(('rjb', ))
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# extracting dictionary of coefficients specific to required
# intensity measure type.
C = self.COEFFS[imt]
imean = (self._compute_magnitude(rup, C) +
self._compute_distance(rup, dists, imt, C) +
self._get_site_amplification(sites, imt, C) +
self._get_mechanism(sites, rup, imt, C))
# Convert units to g,
# but only for PGA and SA (not PGV):
if isinstance(imt, (PGA, SA)):
mean = np.log((10.0 ** (imean - 2.0)) / g)
else:
# PGV:
mean = np.log(10.0 ** imean)
# apply scaling factor for SA at 4 s
if isinstance(imt, SA) and imt.period == 4.0:
mean /= 0.8
istddevs = self._get_stddevs(
C, stddev_types, num_sites=len(sites.vs30)
)
stddevs = np.log(10 ** np.array(istddevs))
return mean, stddevs
def _get_stddevs(self, C, stddev_types, num_sites):
"""
Return standard deviations as define
|
d in table 1, p. 200.
"""
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(C['SigmaTot'] + np.zeros(num_sites))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(C['Sigma1'] + np.
|
zeros(num_sites))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(C['tau'] + np.zeros(num_sites))
return stddevs
def _compute_magnitude(self, rup, C):
"""
Compute the first term of the equation described on p. 199:
``b1 + b2 * M + b3 * M**2``
"""
return C['b1'] + (C['b2'] * rup.mag) + (C['b3'] * (rup.mag ** 2))
def _compute_distance(self, rup, dists, imt, C):
"""
Compute the second term of the equation described on p. 199:
``(b4 + b5 * M) * log(sqrt(Rjb ** 2 + b6 ** 2))``
"""
return (((C['b4'] + C['b5'] * rup.mag)
* np.log10((np.sqrt(dists.rjb ** 2.0 + C['b6'] ** 2.0)))))
def _get_site_amplification(self, sites, imt, C):
"""
Compute the third term of the equation described on p. 199:
``b7 * Ss + b8 * Sa``
"""
Ss, Sa = self._get_site_type_dummy_variables(sites)
return (C['b7'] * Ss) + (C['b8'] * Sa)
def _get_site_type_dummy_variables(self, sites):
"""
Get site type dummy variables, ``Ss`` (for soft and stiff soil sites)
and ``Sa`` (for rock sites).
"""
Ss = np.zeros((len(sites.vs30),))
Sa = np.zeros((len(sites.vs30),))
# Soft soil; Vs30 < 360 m/s. Page 199.
idxSs = (sites.vs30 < 360.0)
# Stiff soil Class A; 360 m/s <= Vs30 <= 750 m/s. Page 199.
idxSa = (sites.vs30 >= 360.0) & (sites.vs30 <= 750.0)
Ss[idxSs] = 1
Sa[idxSa] = 1
return Ss, Sa
def _get_mechanism(self, sites, rup, imt, C):
"""
Compute the fourth term of the equation described on p. 199:
``b9 * Fn + b10 * Fr``
"""
Fn, Fr = self._get_fault_type_dummy_variables(sites, rup, imt)
return (C['b9'] * Fn) + (C['b10'] * Fr)
def _get_fault_type_dummy_variables(self, sites, rup, imt):
"""
Same classification of SadighEtAl1997. Akkar and Bommer 2010 is based
on Akkar and Bommer 2007b; read Strong-Motion Dataset and Record
Processing on p. 514 (Akkar and Bommer 2007b).
"""
Fn, Fr = 0, 0
if rup.rake >= -135 and rup.rake <= -45:
# normal
Fn = 1
elif rup.rake >= 45 and rup.rake <= 135:
# reverse
Fr = 1
return Fn, Fr
#: For PGA and SA up to 0.05 seconds, coefficients are taken from table 5,
#: page 385 of 'Extending ground-motion prediction equations for spectral
#: accelerations to higher response frequencies', while for PGV and SA with
#: periods greater than 0.05 coeffi
|
robrocker7/h1z1map
|
server/players/migrations/0002_player_last_updated.py
|
Python
|
apache-2.0
| 442
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('
|
players', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='player',
name='last_updated',
field=models.DateTimeField(null=True, blank=True),
|
preserve_default=True,
),
]
|
Snergster/virl-salt
|
openstack/nova/files/mitaka/nova+network+neutronv2+api.py
|
Python
|
gpl-2.0
| 90,885
| 0.000275
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved
# Copyright (c) 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import copy
import time
import uuid
from keystoneauth1 import loading as ks_loading
from neutronclient.common import exceptions as neutron_client_exc
from neutronclient.v2_0 import client as clientv20
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import uuidutils
import six
from nova.api.openstack import extensions
from nova.compute import utils as compute_utils
from nova import exception
from nova.i18n import _, _LE, _LI, _LW
from nova.network import base_api
from nova.network import model as network_model
from nova.network.neutronv2 import constants
from nova import objects
from nova.pci import manager as pci_manager
from nova.pci import request as pci_request
from nova.pci import whitelist as pci_whitelist
neutron_opts = [
cfg.StrOpt('url',
default='http://127.0.0.1:9696',
help='URL for connecting to neutron'),
cfg.StrOpt('region_name',
help='Region name for connecting to neutron in admin context'),
cfg.StrOpt('ovs_bridge',
default='br-int',
help='Default OVS bridge name to use if not specified '
'by Neutron'),
cfg.IntOpt('extension_sync_interval',
default=600,
help='Number of seconds before querying neutron for'
' extensions'),
]
NEUTRON_GROUP = 'neutron'
CONF = cfg.CONF
CONF.register_opts(neutron_opts, NEUTRON_GROUP)
deprecations = {'cafile': [cfg.DeprecatedOpt('ca_certificates_file',
group=NEUTRON_GROUP)],
'insecure': [cfg.DeprecatedOpt('api_insecure',
group=NEUTRON_GROUP)],
'timeout': [cfg.DeprecatedOpt('url_timeout',
group=NEUTRON_GROUP)]}
_neutron_options = ks_loading.register_session_conf_options(
CONF, NEUTRON_GROUP, deprecated_opts=deprecations)
ks_loading.register_auth_conf_options(CONF, NEUTRON_GROUP)
CONF.import_opt('default_floating_pool', 'nova.network.floating_ips')
CONF.import_opt('flat_injected', 'nova.network.manager')
LOG = logging.getLogger(__name__)
soft_external_network_attach_authorize = extensions.soft_core_authorizer(
'network', 'attach_external_network')
_SESSION = None
_ADMIN_AUTH = None
DEFAULT_SECGROUP = 'default'
def list_opts():
opts = copy.deepcopy(_neutron_options)
opts.insert(0, ks_loading.get_auth_common_conf_options()[0])
# NOTE(dims): There are a lot of auth plugins, we just generate
# the config opt
|
ions for a few common ones
plugins = ['password', 'v2password', 'v3password']
for name in plugins:
plugin = ks_loading.get_plugin_loader(name)
|
for plugin_option in ks_loading.get_auth_plugin_conf_options(plugin):
for option in opts:
if option.name == plugin_option.name:
break
else:
opts.append(plugin_option)
opts.sort(key=lambda x: x.name)
return [(NEUTRON_GROUP, opts)]
def reset_state():
global _ADMIN_AUTH
global _SESSION
_ADMIN_AUTH = None
_SESSION = None
def _load_auth_plugin(conf):
auth_plugin = ks_loading.load_auth_from_conf_options(conf, NEUTRON_GROUP)
if auth_plugin:
return auth_plugin
err_msg = _('Unknown auth type: %s') % conf.neutron.auth_type
raise neutron_client_exc.Unauthorized(message=err_msg)
def get_client(context, admin=False):
# NOTE(dprince): In the case where no auth_token is present we allow use of
# neutron admin tenant credentials if it is an admin context. This is to
# support some services (metadata API) where an admin context is used
# without an auth token.
global _ADMIN_AUTH
global _SESSION
auth_plugin = None
if not _SESSION:
_SESSION = ks_loading.load_session_from_conf_options(
CONF, NEUTRON_GROUP)
if admin or (context.is_admin and not context.auth_token):
if not _ADMIN_AUTH:
_ADMIN_AUTH = _load_auth_plugin(CONF)
auth_plugin = _ADMIN_AUTH
elif context.auth_token:
auth_plugin = context.get_auth_plugin()
if not auth_plugin:
# We did not get a user token and we should not be using
# an admin token so log an error
raise neutron_client_exc.Unauthorized()
return clientv20.Client(session=_SESSION,
auth=auth_plugin,
endpoint_override=CONF.neutron.url,
region_name=CONF.neutron.region_name)
def _is_not_duplicate(item, items, items_list_name, instance):
present = item in items
# The expectation from this function's perspective is that the
# item is not part of the items list so if it is part of it
# we should at least log it as a warning
if present:
LOG.warning(_LW("%(item)s already exists in list: %(list_name)s "
"containing: %(items)s. ignoring it"),
{'item': item,
'list_name': items_list_name,
'items': items},
instance=instance)
return not present
class API(base_api.NetworkAPI):
"""API for interacting with the neutron 2.x API."""
def __init__(self, skip_policy_check=False):
super(API, self).__init__(skip_policy_check=skip_policy_check)
self.last_neutron_extension_sync = None
self.extensions = {}
def setup_networks_on_host(self, context, instance, host=None,
teardown=False):
"""Setup or teardown the network structures."""
def _get_available_networks(self, context, project_id,
net_ids=None, neutron=None):
"""Return a network list available for the tenant.
The list contains networks owned by the tenant and public networks.
If net_ids specified, it searches networks with requested IDs only.
"""
if not neutron:
neutron = get_client(context)
if net_ids:
# If user has specified to attach instance only to specific
# networks then only add these to **search_opts. This search will
# also include 'shared' networks.
search_opts = {'id': net_ids}
nets = neutron.list_networks(**search_opts).get('networks', [])
else:
# (1) Retrieve non-public network list owned by the tenant.
search_opts = {'tenant_id': project_id, 'shared': False}
nets = neutron.list_networks(**search_opts).get('networks', [])
# (2) Retrieve public network list.
search_opts = {'shared': True}
nets += neutron.list_networks(**search_opts).get('networks', [])
_ensure_requested_network_ordering(
lambda x: x['id'],
nets,
net_ids)
return nets
def _create_port(self, port_client, instance, network_id, port_req_body,
fixed_ip=None, security_group_ids=None,
available_macs=None, dhcp_opts=None):
"""Attempts to create a port for the instance on the given network.
:param port_client: The client to use to create the port.
:param instance: Create the port for the given instance.
:param network_id: Create the port on the given network.
:param port_req_body: Pre-populated port reque
|
JarnoRFB/qtpyvis
|
qtgui/panels/__init__.py
|
Python
|
mit
| 122
| 0
|
from .activation
|
s import ActivationsPanel
from .experiments import ExperimentsPanel
from .occlusio
|
n import OcclusionPanel
|
chimkentec/KodiMODo_rep
|
script.module.libtorrent/python_libtorrent/python_libtorrent/functions.py
|
Python
|
gpl-3.0
| 5,308
| 0.008855
|
#-*- coding: utf-8 -*-
'''
python-libtorrent for Kodi (script.module.libtorrent)
Copyright (C) 2015-2016 DiMartino, srg70, RussakHH, aisman
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
import os
import xbmc, xbmcgui, xbmcvfs, xbmcaddon
from net import HTTP
__libbaseurl__ = "https://github.com/DiMartinoXBMC/script.module.libtorrent/raw/master/python_libtorrent"
__settings__ = xbmcaddon.Addon(id='script.module.libtorrent')
__version__ = __settings__.getAddonInfo('version')
__plugin__ = __settings__.getAddonInfo('name') + " v." + __version__
__icon__=os.path.join(xbmc.translatePath('special://home'), 'addons',
'script.module.libtorrent', 'icon.png')
__language__ = __settings__.getLocalizedString
from python_libtorrent.platform_pulsar import get_platform, get_libname
def log(msg):
try:
xbmc.log("### [%s]: %s" % (__plugin__,msg,), level=xbmc.LOGNOTICE )
except UnicodeEncodeError:
xbmc.log("### [%s]: %s" % (__plugin__,msg.encode("utf-8", "ignore"),), level=xbmc.LOGNOTICE )
except:
xbmc.log("### [%s]: %s" % (__plugin__,'ERROR LOG',), level=xbmc.LOGNOTICE )
def getSettingAsBool(setting):
return __settings__.getSetting(setting).lower() == "true"
class LibraryManager():
def __init__(self, dest_path, platform):
self.dest_path = dest_path
self.platform = platform
self.root=os.path.dirname(os.path.dirname(__file__))
def check_exist(self):
for libname in get_libname(self.platform):
if not xbmcvfs.exists(os.path.join(self.dest_path,libname)):
return False
return True
def check_update(self):
need_update=False
for libname in get_libname(self.platform):
if libname!='liblibtorrent.so':
self.libpath = os.path.join(self.dest_path, libname)
self.sizepath=os.path.join(self.root, self.platform['system'], self.platform['version'], libname+'.size.txt')
size=str(os.path.getsize(self.libpath))
size_old=open( self.sizepath, "r" ).read()
if size_old!=size:
need_update=True
return need_update
def update(self):
if self.check_update():
for libname in get_libname(self.platform):
self.libpath = os.path.join(self.dest_path, libname)
xbmcvfs.delete(self.libpath)
self.download()
def download(self):
xbmcvfs.mkdirs(self.dest_path)
for libname in get_libname(self.platform):
dest = os.path.join(self.dest_path, libname)
log("try to fetch %s" % libname)
url = "%s/%s/%s/%s.zip" % (__libbaseurl__, self.platform['system'], self.platform['version'], libname)
if libname!='liblibtorrent.so':
try:
self.http = HTTP()
self.http.fetch(url, download=dest + ".zip", progress=True)
log("%s -> %s" % (url, dest))
xbmc.executebuiltin('XBMC.Extract("%s.zip","%s")' % (dest, self.dest_path), True)
xbmcvfs.delete(dest + ".zip")
except:
text = 'Failed download %s!' % libname
xbmc.executebuiltin("XBMC.Notification(%s,%s,%s,%s)" % (__plugin__,text,750,__icon__))
else:
xbmcvfs.copy(os.path.join(s
|
elf.dest_path, 'libtorrent.so'), dest)
return True
def android_workaround(self, new_dest_path):
for libname in get_libname(self.platform):
libpath=os.path.join(self.dest_path, libname)
size=str(os.path.getsize(libpath))
new_libpath=os.path.join(new_dest_path, libname)
|
if not xbmcvfs.exists(new_libpath):
xbmcvfs.copy(libpath, new_libpath)
log('Copied %s -> %s' %(libpath, new_libpath))
else:
new_size=str(os.path.getsize(new_libpath))
if size!=new_size:
xbmcvfs.delete(new_libpath)
xbmcvfs.copy(libpath, new_libpath)
log('Deleted and copied (%s) %s -> (%s) %s' %(size, libpath, new_size, new_libpath))
return new_dest_path
|
varenius/salsa
|
USRP/usrp_gnuradio_dev/plot_array_file.py
|
Python
|
mit
| 2,472
| 0.016181
|
import matplotlib.pyplot as plt
import numpy as np
import sys
import time
import scipy.signal as sig
infile = sys.argv[1]
indata = np.load(infile)
spec = indata[0]
samp_rate = indata[1]
fftsize = indata[2]
center_freq = indata[3] # MHz
halffft = int(0.5*fftsize)
freqs = 0.5*samp_rate*np.array(range(-halffft,halffft))/(halffft)
#plt.plot(spec)
delta_nu = samp_rate/fftsize
plt.plot(freqs,spec)
plt.xlabel('relative to center [Mhz]')
#plt.figure()
RFI = [[1419.4-0.210, 0.02],
[1419.4-1.937, 0.015],
[1419.4-4.4, 0.015],
[1419.4+3.0, 0.01],
[center_freq, 4*delta_nu], # remove dip in the center of band, always about 4 fft points wide. Use 8, else errors
[1416.4-0.8, 0.04],
[1420
|
.4-2, 0.01],
[1425, 0.01],
[1424.4-1.8, 0.01],
[1424.4+0.5845, 0.01],
[1424.4+0.483, 0.005],
|
]
flags = []
#plt.plot(spec)
for item in RFI:
RFI_freq = item[0]
RFI_width = item[1]
ch0_freq = center_freq - 0.5*samp_rate
ind_low = int(np.floor((RFI_freq-0.5*RFI_width - ch0_freq)/delta_nu))
ind_high = int(np.ceil((RFI_freq+0.5*RFI_width - ch0_freq)/delta_nu))
if ind_low>0 and ind_high<len(spec):
margin = min(ind_high-ind_low, ind_low, len(spec)-ind_high)
RFI_part = spec[ind_low-margin:ind_high+margin]
xdata = np.arange(len(RFI_part))
weights = np.ones_like(RFI_part)
weights[margin:-margin] = 0.0 # Ignore RFI when fitting
pf = np.polyfit(xdata, RFI_part, deg=1, w=weights)
interpdata = np.polyval(pf, xdata)
#plt.figure()
#plt.plot(xdata, interpdata)
spec[ind_low:ind_high] = interpdata[margin:-margin]
else:
print 'Ignoring', item
plt.figure()
calspec = spec * 750/1.6
plt.plot(calspec)
plt.ylabel('Roughly [K]')
#plt.figure()
#fftsize = 0.8*fftsize
#halffft = int(0.5*fftsize)
#freqs = 0.5*samp_rate*np.array(range(-halffft,halffft))/(halffft)
#l = len(spec)
#lind = 0.1*l
#hind = 0.9*l
#newspec = spec[lind:hind-1]
#print np.shape(newspec), np.shape(freqs)
#plt.plot(freqs, newspec)
#xdata = np.arange(len(newspec))
#weights = np.ones_like(newspec)
#margin = 0.25*len(newspec)
#weights[margin:-margin] = 0.0 # Ignore RFI when fitting
#pf = np.polyfit(xdata, newspec, w=weights, deg=8)
#interpdata = np.polyval(pf, xdata)
#plt.plot(freqs,interpdata)
#plt.figure()
#plt.plot(freqs, newspec-interpdata)
#plt.figure()
#dec = sig.decimate(spec, 8, axis=0)
#plt.plot(dec)
plt.show()
|
Spiderlover/Toontown
|
otp/distributed/OtpDoGlobals.py
|
Python
|
mit
| 3,475
| 0.000288
|
from direct.distributed.MsgTypes import *
OTP_DO_ID_SERVER_ROOT = 4007
OTP_DO_ID_FRIEND_MANAGER = 4501
OTP_DO_ID_LEADERBOARD_MANAGER = 4502
OTP_DO_ID_SERVER = 4600
OTP_DO_ID_UBER_DOG = 4601
OTP_CHANNEL_AI_AND_UD_BROADCAST = 4602
OTP_CHANNEL_UD_BROADCAST = 4603
OTP_CHANNEL_AI_BROADCAST = 4604
OTP_NET_MSGR_CHANNEL_ID_ALL_AI = 4605
OTP_NET_MSGR_CHANNEL_ID_UBER_DOG = 4606
OTP_NET_MSGR_CHANNEL_ID_AI_ONLY = 4607
OTP_DO_ID_COMMON = 4615
OTP_DO_ID_GATEWAY = 4616
OTP_DO_ID_PIRATES = 4617
OTP_DO_ID_TOONTOWN = 4618
OTP_DO_ID_FAIRIES = 4619
OTP_DO_ID_CARS = 4620
OTP_DO_ID_AVATARS = 4630
OTP_DO_ID_FRIENDS = 4640
OTP_DO_ID_GUILDS = 4650
OTP_DO_ID_ESCROW = 4660
OTP_DO_ID_CLIENT_SERVICES_MANAGER = 4665
OTP_DO_ID_TTI_FRIENDS_MANAGER = 4666
OTP_DO_ID_GLOBAL_PARTY_MANAGER = 4477
OTP_DO_ID_PIRATES_AVATAR_MANAGER = 4674
OTP_DO_ID_PIRATES_CREW_MANAGER = 4675
OTP_DO_ID_PIRATES_INVENTORY_MANAGER = 4677
OTP_DO_ID_PIRATES_SPEEDCHAT_RELAY = 4711
OTP_DO_ID_PIRATES_SHIP_MANAGER = 4678
OTP_DO_ID_PIRATES_TRAVEL_AGENT = 4679
OTP_DO_ID_PIRATES_FRIENDS_MANAGER = 4680
OTP_DO_ID_CHAT_MANAGER = 4681
OTP_DO_ID_TOONTOWN_AVATAR_MANAGER = 4682
OTP_DO_ID_TOONTOWN_DELIVERY_MANAGER = 4683
OTP_DO_ID_TOONTOWN_TEMP_STORE_MANAGER = 4684
OTP_DO_ID_TOONTOWN_SPEEDCHAT_RELAY = 4712
OTP_DO_ID_SWITCHBOARD_MANAGER = 4685
OTP_DO_ID_AVATAR_FRIENDS_MANAGER = 4686
OTP_DO_ID_PLAYER_FRIENDS_MANAGER = 4687
OTP_DO_ID_CENTRAL_LOGGER = 4688
OTP_DO_ID_CARS_AVATAR_MANAGER = 4689
OTP_DO_ID_TOONTOWN_MAIL_MANAGER = 4690
OTP_DO_ID_TOONTOWN_PARTY_MANAGER = 4691
OTP_DO_ID_TOONTOWN_RAT_MANAGER = 4692
OTP_DO_ID_STATUS_DATABASE = 4693
OTP_DO_ID_TOONTOWN_AWARD_MANAGER = 4694
OTP_DO_ID_TOONTOWN_CODE_REDEMPTION_MANAGER = 4695
OTP_DO_ID_TOONTOWN_IN_GAME_NEWS_MANAGER = 46
|
96
OTP_DO_ID_TOONTOWN_NON_REPEATABLE_RANDOM_SOURCE = 4697
OTP_DO_ID_AI_TRADE_AVATAR = 4698
OTP_DO_ID_TOONTOWN_WHITELIST_MANAGER = 4699
OTP_DO_ID_PIRATES_MATCH_MAKER = 4700
OTP_DO_ID_PIRATES_GUILD_MANAGER = 4701
OTP_DO_ID_PIRATES_AWARD_MAKER = 4702
OTP_DO_ID_PIRATES_CODE_REDEMPTION = 4703
OTP_DO_ID_PIRATES_SETTINGS_MANAGER = 4704
OTP_DO_ID_PIRATES_HOLIDAY_MANAGER = 4
|
705
OTP_DO_ID_PIRATES_CREW_MATCH_MANAGER = 4706
OTP_DO_ID_PIRATES_AVATAR_ACCESSORIES_MANAGER = 4710
OTP_DO_ID_TOONTOWN_CPU_INFO_MANAGER = 4713
OTP_DO_ID_TOONTOWN_SECURITY_MANAGER = 4714
OTP_DO_ID_SNAPSHOT_DISPATCHER = 4800
OTP_DO_ID_SNAPSHOT_RENDERER = 4801
OTP_DO_ID_SNAPSHOT_RENDERER_01 = 4801
OTP_DO_ID_SNAPSHOT_RENDERER_02 = 4802
OTP_DO_ID_SNAPSHOT_RENDERER_03 = 4803
OTP_DO_ID_SNAPSHOT_RENDERER_04 = 4804
OTP_DO_ID_SNAPSHOT_RENDERER_05 = 4805
OTP_DO_ID_SNAPSHOT_RENDERER_06 = 4806
OTP_DO_ID_SNAPSHOT_RENDERER_07 = 4807
OTP_DO_ID_SNAPSHOT_RENDERER_08 = 4808
OTP_DO_ID_SNAPSHOT_RENDERER_09 = 4809
OTP_DO_ID_SNAPSHOT_RENDERER_10 = 4810
OTP_DO_ID_SNAPSHOT_RENDERER_11 = 4811
OTP_DO_ID_SNAPSHOT_RENDERER_12 = 4812
OTP_DO_ID_SNAPSHOT_RENDERER_13 = 4813
OTP_DO_ID_SNAPSHOT_RENDERER_14 = 4814
OTP_DO_ID_SNAPSHOT_RENDERER_15 = 4815
OTP_DO_ID_SNAPSHOT_RENDERER_16 = 4816
OTP_DO_ID_SNAPSHOT_RENDERER_17 = 4817
OTP_DO_ID_SNAPSHOT_RENDERER_18 = 4818
OTP_DO_ID_SNAPSHOT_RENDERER_19 = 4819
OTP_DO_ID_SNAPSHOT_RENDERER_20 = 4820
OTP_DO_ID_PIRATES_INVENTORY_MANAGER_BASE = 5001
OTP_ZONE_ID_INVALID = 0
OTP_ZONE_ID_OLD_QUIET_ZONE = 1
OTP_ZONE_ID_MANAGEMENT = 2
OTP_ZONE_ID_DISTRICTS = 3
OTP_ZONE_ID_DISTRICTS_STATS = 4
OTP_ZONE_ID_ELEMENTS = 5
OTP_NET_MESSENGER_CHANNEL = (OTP_DO_ID_UBER_DOG << 32) + OTP_ZONE_ID_MANAGEMENT
OTP_MOD_CHANNEL = 6200
OTP_ADMIN_CHANNEL = 6400
OTP_SYSADMIN_CHANNEL = 6500
|
hkociemba/RubiksCube-TwophaseSolver
|
package_src/twophase/client_gui2.py
|
Python
|
gpl-3.0
| 12,351
| 0.00421
|
# ################ A simple graphical interface which communicates with the server #####################################
# While client_gui only allows to set the facelets with the mouse, this file (client_gui2) also takes input from the
# webcam and includes sliders for some opencv parameters.
from tkinter import *
import socket
import twophase.cubie as cubie
import twophase.vision_params as vision_params
# ################################## some global variables and constants ###############################################
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = '8080'
width = 60 # width of a facelet in pixels
facelet_id = [[[0 for col in range(3)] for row in range(3)] for fc in range(6)]
colorpick_id = [0 for i in range(6)]
curcol = None
t = ("U", "R", "F", "D", "L", "B")
cols = ("yellow", "green", "red", "white", "blue", "orange")
########################################################################################################################
# ################################################ Diverse functions ###################################################
def show_text(txt):
"""Display messages."""
print(txt)
display.insert(INSERT, txt)
root.update_idletasks()
def create_facelet_rects(a):
"""Initialize the facelet grid on the canvas."""
offset = ((1, 0), (2, 1), (1, 1), (1, 2), (0, 1), (3, 1))
for f in range(6):
for row in range(3):
y = 10 + offset[f][1] * 3 * a + row * a
for col in range(3):
x = 10 + offset[f][0] * 3 * a + col * a
facelet_id[f][row][col] = canvas.create_rectangle(x, y, x + a, y + a, fill="grey")
if row == 1 and col == 1:
canvas.create_text(x + width // 2, y + width // 2, font=("", 14), text=t[f], state=DISABLED)
for f in range(6):
canvas.itemconfig(facelet_id[f][1][1], fill=cols[f])
def create_colorpick_rects(a):
"""Initialize the "paintbox" on the canvas."""
global curcol
global cols
for i in range(6):
x = (i % 3) * (a + 5) + 7 * a
y = (i // 3) * (a + 5) + 7 * a
colorpick_id[i] = canvas.create_rectangle(x, y, x + a, y + a, fill=cols[i])
canvas.itemconfig(colorpick_id[0], width=4)
curcol = cols[0]
def get_definition_string():
"""Generate the cube definition string from the facelet colors."""
color_to_facelet = {}
for i in range(6):
color_to_facelet.update({canvas.itemcget(facelet_id[i][1][1], "fill"): t[i]})
s = ''
for f in range(6):
for row in range(3):
for col in range(3):
s += color_to_facelet[canvas.itemcget(facelet_id[f][row][col], "fill")]
return s
########################################################################################################################
# ############################### Solve the displayed cube with a local or remote server ###############################
def solve():
"""Connect to the server and return the solving maneuver."""
display.delete(1.0, END) # clear output window
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
show_text('Failed to create socket')
return
# host = 'f9f0b2jt6zmzyo6b.myfritz.net' # my RaspberryPi, if online
host = txt_host.get(1.0, END).rstrip() # default is localhost
port = int(txt_port.get(1.0, END)) # default is port 8080
try:
remote_ip = socket.gethostbyname(host)
except socket.gaierror:
show_text('Hostname could not be resolved.')
return
try:
s.connect((remote_ip, port))
except BaseException as e:
show_text('Cannot connect to server! ' + e.__doc__)
return
show_text('Connected with ' + remote_ip + '\n')
try:
defstr = get_definition_string() + '\n'
except BaseException as e:
show_text('Invalid facelet configuration.\nWrong or missing colors. ' + e.__doc__)
return
show_text(defstr)
try:
s.sendall((defstr + '\n').encode())
except BaseException as e:
show_text('Cannot send cube configuration to server. ' + e.__doc__)
return
show_text(s.recv(2048).decode())
########################################################################################################################
# ################################# Functions to change the facelet colors #############################################
def clean():
"""Restore the cube to a clean cube."""
for f in range(6):
for row in range(3):
for col in range(3):
canvas.itemconfig(facelet_id[f][row][col], fill=canvas.itemcget(facelet_id[f][1][1], "fill"))
def empty():
"""Remove the facelet colors except the center facelets colors."""
for f in range(6):
for row in range(3):
for col in range(3):
if row != 1 or col != 1:
canvas.itemconfig(facelet_id[f][row][col], fill="grey")
def random():
"""Generate a random cube and sets the corresponding facelet colors."""
cc = cubie.CubieCube()
cc.randomize()
fc = cc.to_facelet_cube()
idx = 0
for f in range(6):
for row in range(3):
for col in range(3):
canvas.itemconfig(facelet_id[f][row][col], fill=cols[fc.f[idx]])
idx += 1
########################################################################################################################
# ################################### Edit the facelet colors ##########################################################
def click(_event):
"""Define how to react on left mouse clicks"""
global curcol
idlist = canvas.find_withtag("current")
if len(idlist) > 0:
if idlist[0] in colorpick_id:
curcol = canvas.itemcget("current", "fill")
for i in range(6):
canvas.itemconfig(colorpick_id[i], width=1)
canvas.itemconfig("current", width=5)
else:
canvas.itemconfig("current", fill=curcol)
##########################################################################################################
|
##############
# ######################################### functions to set the slider values #########################################
def set_rgb_L(val):
vision_params.rgb_L = int(val)
def set_orange_L(val):
vision_params.orange_L = int(val)
def set_orange_H(val):
vision_params.orange_H = int(val)
def set_yellow_H(val):
vision_params.yellow_H = int(val)
def set_green_H(val):
vision_params.green_H = int(val)
def set_blue_H(val):
vision_p
|
arams.blue_H = int(val)
def set_sat_W(val):
vision_params.sat_W = int(val)
def set_val_W(val):
vision_params.val_W = int(val)
def set_sigma_C(val):
vision_params.sigma_C = int(val)
def set_delta_C(val):
vision_params.delta_C = int(val)
def transfer():
"""Transfer the facelet colors detected by the opencv vision to the GUI editor."""
if len(vision_params.face_col) == 0:
return
centercol = vision_params.face_col[1][1]
vision_params.cube_col[centercol] = vision_params.face_col
vision_params.cube_hsv[centercol] = vision_params.face_hsv
dc = {}
for i in range(6):
dc[canvas.itemcget(facelet_id[i][1][1], "fill")] = i # map color to face number
for i in range(3):
for j in range(3):
canvas.itemconfig(facelet_id[dc[centercol]][i][j], fill=vision_params.face_col[i][j])
# ######################################################################################################################
# ###################################### Generate and display the TK_widgets ##########################################
root = Tk()
root.wm_title("Solver Client")
canvas = Canvas(root, width=12 * width + 20, height=9 * width + 20)
canvas.pack()
bsolve = Button(text="Solve", height=2, width=10, relief=RAISED, command=solve)
bsolve_window = canvas.create_window(10 + 10.5 * width, 10 + 6.5 * width, anchor=NW, window=bsolve)
bclean = Button(text="Clean", height=1, width=10, relief=RAISED, command=clean)
bclean_w
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractDreamstlTumblrCom.py
|
Python
|
bsd-3-clause
| 761
| 0.027595
|
def extractDreamstlTumblrCom(item):
'''
Parser for 'dreamstl.tumblr.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('the s ranks that i raised', 'The S-Ranks that I Raised', 'translated')
|
,
('the s ranks that i\'ve raised', 'The S-Ranks that I Raised', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return b
|
uildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
mbourqui/django-publications-bootstrap
|
publications_bootstrap/admin/__init__.py
|
Python
|
mit
| 358
| 0
|
# -
|
*- coding: utf-8 -*-
from django.contrib import admin
from .catalogadmin import CatalogAdmin
from .publicationadmin import PublicationAdmin
from .typeadmin import TypeAdmin
from ..models import Type, Catalog, Publication
admin.site.register(Type, T
|
ypeAdmin)
admin.site.register(Catalog, CatalogAdmin)
admin.site.register(Publication, PublicationAdmin)
|
bitmovin/bitcodin-python
|
bitcodin/test/input/testcase_get_non_existent_input.py
|
Python
|
unlicense
| 614
| 0
|
__author__ = 'Dominic Miglar <dominic.miglar@bitmovin.net>'
import unittest
from bitcodin import get_input
|
from bitcodin.exceptions import BitcodinNotFoundError
from bitcodin.test.bitcodin_test_case import BitcodinTestCase
class GetNonExistentInputTestCase(BitcodinTestCase):
def setUp(self):
super(GetNonExistentInputTestCase, self).setUp()
def runTest(self):
with self.assertRaises(BitcodinNotFoundError):
encoding_profile = get_input(0)
def tearDown(self):
super(GetNo
|
nExistentInputTestCase, self).tearDown()
if __name__ == '__main__':
unittest.main()
|
olympiag3/olypy
|
tests/unit/test_olymap_item.py
|
Python
|
apache-2.0
| 5,307
| 0.000942
|
import olymap.item
def test_get_animal():
tests = (
({}, None),
({'IT': {'an': ['1']}}, True)
|
,
({'IT': {'an': ['0']}}, None),
({'IT': {'de': ['1']}}, None),
({'IM': {'an':
|
['1']}}, None),
)
for box, answer in tests:
assert olymap.item.get_animal(box) == answer
def test_get_attack_bonus():
tests = (
({}, 0),
({'IM': {'ab': ['60']}}, 60),
({'IM': {'ab': ['0']}}, 0),
({'IM': {'de': ['60']}}, 0),
({'IT': {'ab': ['60']}}, 0),
)
for box, answer in tests:
assert olymap.item.get_attack_bonus(box) == answer
def test_get_aura_bonus():
tests = (
({}, None),
({'IM': {'ba': ['60']}}, '60'),
({'IM': {'ba': ['0']}}, '0'),
({'IM': {'de': ['60']}}, None),
({'IT': {'ba': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_aura_bonus(box) == answer
def test_get_defense_bonus():
tests = (
({}, 0),
({'IM': {'db': ['60']}}, 60),
({'IM': {'db': ['0']}}, 0),
({'IM': {'de': ['60']}}, 0),
({'IT': {'db': ['60']}}, 0),
)
for box, answer in tests:
assert olymap.item.get_defense_bonus(box) == answer
def test_get_fly_capacity():
tests = (
({}, None),
({'IT': {'fc': ['100']}}, '100'),
({'IT': {'fc': ['0']}}, '0'),
({'IT': {'de': ['60']}}, None),
({'IM': {'fc': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_fly_capacity(box) == answer
def test_get_item_attack():
tests = (
({}, None),
({'IT': {'at': ['60']}}, '60'),
({'IT': {'at': ['0']}}, '0'),
({'IT': {'de': ['60']}}, None),
({'IM': {'at': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_item_attack(box) == answer
def test_get_item_bonuses():
tests = (
({}, {'attack_bonus': 0, 'defense_bonus': 0, 'missile_bonus': 0, 'aura_bonus': None}),
({'IM': {'ab': ['60'], 'mb': ['61'], 'db': ['62'], 'ba': ['63']}}, {'attack_bonus': 60, 'defense_bonus': 62, 'missile_bonus': 61, 'aura_bonus': '63'}),
({'IM': {'ab': ['60']}}, {'attack_bonus': 60, 'defense_bonus': 0, 'missile_bonus': 0, 'aura_bonus': None}),
)
for box, answer in tests:
assert olymap.item.get_item_bonuses(box) == answer
def test_get_item_defense():
tests = (
({}, None),
({'IT': {'de': ['60']}}, '60'),
({'IT': {'de': ['0']}}, '0'),
({'IT': {'at': ['60']}}, None),
({'IM': {'de': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_item_defense(box) == answer
def test_get_item_missile():
tests = (
({}, None),
({'IT': {'mi': ['60']}}, '60'),
({'IT': {'mi': ['0']}}, '0'),
({'IT': {'de': ['60']}}, None),
({'IM': {'mi': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_item_missile(box) == answer
def test_get_land_capacity():
tests = (
({}, None),
({'IT': {'lc': ['100']}}, '100'),
({'IT': {'lc': ['0']}}, '0'),
({'IT': {'de': ['60']}}, None),
({'IM': {'lc': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_land_capacity(box) == answer
def test_get_lore():
tests = (
({}, None),
({'IM': {'lo': ['100']}}, '100'),
({'IM': {'lo': ['0']}}, '0'),
({'IM': {'de': ['60']}}, None),
({'IT': {'lo': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_lore(box) == answer
def test_get_man_item():
tests = (
({}, None),
({'IT': {'mu': ['1']}}, True),
({'IT': {'mu': ['0']}}, None),
({'IT': {'de': ['1']}}, None),
({'IM': {'mu': ['1']}}, None),
)
for box, answer in tests:
assert olymap.item.get_man_item(box) == answer
def test_get_missile_bonus():
tests = (
({}, 0),
({'IM': {'mb': ['60']}}, 60),
({'IM': {'mb': ['0']}}, 0),
({'IM': {'mi': ['60']}}, 0),
({'IT': {'mb': ['60']}}, 0),
)
for box, answer in tests:
assert olymap.item.get_missile_bonus(box) == answer
def test_get_plural():
tests = (
({}, None),
({'na': ['single'], 'IT': {'pl': ['plural']}}, 'plural'),
({'na': ['single']}, 'single'),
({'na': ['single'], 'IT': {'de': ['plural']}}, 'single'),
({'na': ['single'], 'IM': {'pl': ['plural']}}, 'single'),
)
for box, answer in tests:
assert olymap.item.get_plural(box) == answer
def test_get_prominent():
tests = (
({}, None),
({'IT': {'pr': ['1']}}, True),
({'IT': {'pr': ['0']}}, None),
({'IT': {'de': ['1']}}, None),
({'IM': {'pr': ['1']}}, None),
)
for box, answer in tests:
assert olymap.item.get_prominent(box) == answer
def test_get_ride_capacity():
tests = (
({}, None),
({'IT': {'rc': ['100']}}, '100'),
({'IT': {'rc': ['0']}}, '0'),
({'IT': {'de': ['60']}}, None),
({'IM': {'rc': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_ride_capacity(box) == answer
|
ooici/eeagent
|
eeagent/eeagent_exceptions.py
|
Python
|
apache-2.0
| 337
| 0.002967
|
# Copyright 2013 University of Chicago
class EEAgentParameterException(Exception
|
):
def __init__(self, message):
Exception.__init__(self, message)
class EEAgentUnauthorizedException(Exception):
pass
class EEAgentSupDException(Exception):
def __init__(self, message):
|
Exception.__init__(self, message)
|
gangadhar-kadam/verve_live_frappe
|
frappe/model/base_document.py
|
Python
|
mit
| 15,292
| 0.029362
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, json, sys
from frappe import _
from frappe.utils import cint, flt, now, cstr, strip_html
from frappe.model import default_fields
from frappe.model.naming import set_new_name
from frappe.modules import load_doctype_module
_classes = {}
def get_controller(doctype):
"""Returns the **class** object of the given DocType.
For `custom` type, returns `frappe.model.document.Document`.
:param doctype: DocType name as string."""
from frappe.model.document import Document
if not doctype in _classes:
module_name, custom = frappe.db.get_value("DocType", doctype, ["module", "custom"]) \
or ["Core", False]
if custom:
_class = Document
else:
module = load_doctype_module(doctype, module_name)
classname = doctype.replace(" ", "").replace("-", "")
if hasattr(module, classname):
_class = getattr(module, classname)
if issubclass(_class, BaseDocument):
_class = getattr(module, classname)
else:
raise ImportError, doctype
else:
raise ImportError, doctype
_classes[doctype] = _class
return _classes[doctype]
class BaseDocument(object):
ignore_in_getter = ("doctype", "_meta", "meta", "_table_fields", "_valid_columns")
def __init__(self, d):
self.update(d)
self.dont_update_if_missing = []
if hasattr(self, "__setup__"):
self.__setup__()
@property
def meta(self):
if not hasattr(self, "_meta"):
self._meta = frappe.get_meta(self.doctype)
return self._meta
def update(self, d):
if "doctype" in d:
self.set("doctype", d.get("doctype"))
# first set default field values of base document
for key in default_fields:
if key in d:
self.set(key, d.get(key))
for key, value in d.iteritems():
self.set(key, value)
return self
def update_if_missing(self, d):
if isinstance(d, BaseDocument):
d = d.get_valid_dict()
if "doctype" in d:
self.set("doctype", d.get("doctype"))
for key, value in d.iteritems():
# dont_update_if_missing is a list of fieldnames, for which, you don't want to set default value
if (self.get(key) is None) and (value is not None) and (key not in self.dont_update_if_missing):
self.set(key, value)
def get_db_value(self, key):
return frappe.db.get_value(self.doctype, self.name, key)
def get(self, key=None, filters=None, limit=None, default=None):
if key:
if isinstance(key, dict):
return _filter(self.get_all_children(), key, limit=limit)
if filters:
if isinstance(filters, dict):
value = _filter(self.__dict__.get(key, []), filters, limit=limit)
else:
default = filters
filters = None
value = self.__dict__.get(key, default)
else:
value = self.__dict__.get(key, default)
if value is None and key not in self.ignore_in_getter \
and key in (d.fieldname for d in self.meta.get_table_fields()):
self.set(key, [])
value = self.__dict__.get(key)
return value
else:
return self.__dict__
def getone(self, key, filters=None):
return self.get(key, filters=filters, limit=1)[0]
def set(self, key, value, as_value=False):
if isinstance(value, list) and not as_value:
self.__dict__[key] = []
self.extend(key, value)
else:
self.__dict__[key] = value
def delete_key(self, key):
if key in self.__dict__:
del self.__dict__[key]
def append(self, key, value=None):
if value==None:
value={}
if isinstance(value, (dict, BaseDocument)):
if not self.__dict__.get(key):
self.__dict__[key] = []
value = self._init_child(value, key)
self.__dict__[key].append(value)
# reference parent document
value.parent_doc = self
return value
else:
raise ValueError, "Document attached to child table must be a dict or BaseDocument, not " + str(type(value))[1:-1]
def extend(self, key, value):
if isinstance(value, list):
for v in value:
self.append(key, v)
else:
raise ValueError
def remove(self, doc):
self.get(doc.parentfield).remove(doc)
def _init_child(self, value, key):
if not self.doctype:
return value
if not isinstance(value, BaseDocument):
if "doctype" not in value:
value["doctype"] = self.get_table_field_doctype(key)
if not value["doctype"]:
raise AttributeError, key
value = get_controller(value["doctype"])(value)
value.init_valid_columns()
value.parent = self.name
value.parenttype = self.doctype
value.parentfield = key
if not getattr(value, "idx", None):
value.idx = len(self.get(key) or []) + 1
if not getattr(value, "name", None):
value.__dict__['__islocal'] = 1
return value
def get_valid_dict(self):
d = {}
for fieldname in self.meta.get_valid_columns():
d[fieldname] = self.get(fieldname)
return d
def init_valid_columns(self):
for key in default_fields:
if key not in self.__dict__:
self.__dict__[key] = None
if self.doctype in ("DocField", "DocPerm") and self.parent in ("DocType", "DocField", "DocPerm"):
from frappe.model.meta import get_table_columns
valid = get_table_columns(self.doctype)
else:
valid = self.meta.get_valid_columns()
for key in valid:
if key not in self.__dict__:
self.__dict__[key] = None
def is_new(self):
return self.get("__islocal")
def as_dict(self, no_nulls=False):
doc = self.get_valid_dict()
doc["doctype"] = self.doctype
for df in self.meta.get_table_fields():
children = self.get(df.fieldname) or []
doc[df.fieldname] = [d.as_dict(no_nulls=no_nulls) for d in children]
if no_nulls:
for k in doc.keys():
if doc[k] is None:
del doc[k]
for key in ("_user_tags", "__islocal", "__onload", "_starred_by"):
if self.get(key):
doc[key] = self.get(key)
return frappe._dict(doc)
def as_json(self):
from frappe.utils.response import json_handler
return json.dumps(self.as_dict(), indent=1, sort_keys=True, default=json_handler)
def get_table_field_doctype(self, fieldname):
return self.meta.get_field(fieldname).options
def get_parentfield_of_doctype(self, doctype):
fieldname = [df.fieldname for df in self.meta.get_table_fields() if df.options==doctype]
return fieldname[0] if fieldname else None
def db_insert(self):
"""INSERT the document (with valid columns) in the database."""
if not self.name:
# name will be set by document class in most cases
set_new_name(self)
d = self.get_valid_dict()
columns = d.keys()
try:
frappe.db.sql("""insert into `tab{doctype}`
({columns}) values ({values})""".format(
doctype = self.doctype,
columns = ", ".join(["`"+c+"`" for c in columns]),
|
values = "
|
, ".join(["%s"] * len(columns))
), d.values())
except Exception, e:
if e.args[0]==1062:
if self.meta.autoname=="hash":
self.name = None
self.db_insert()
return
type, value, traceback = sys.exc_info()
frappe.msgprint(_("Duplicate name {0} {1}").format(self.doctype, self.name))
raise frappe.NameError, (self.doctype, self.name, e), traceback
else:
raise
self.set("__islocal", False)
def db_update(self):
if self.get("__islocal") or not self.name:
self.db_insert()
return
d = self.get_valid_dict()
columns = d.keys()
try:
frappe.db.sql("""update `tab{doctype}`
set {values} where name=%s""".format(
doctype = self.doctype,
values = ", ".join(["`"+c+"`=%s" for c in columns])
), d.values() + [d.get("name")])
except Exception, e:
if e.args[0]==1062:
type, value, traceback = sys.exc_info()
fieldname = str(e).split("'")[-2]
frappe.msgprint(_("{0} must be unique".format(self.meta.get_label(fieldname))))
raise frappe.ValidationError, (self.doctype, self.name, e), traceback
else:
raise
def db_set(self, fieldname, value, update_modified=True):
self.set(fieldname, value)
self.set("modified", now())
self.set("modified_by", frappe.session.user)
frappe.db.set_value(self.doctype, self.name, fieldname, value,
self.modified, self.modified_by, update_modified=update_modified)
def _fix_numeric_types(self):
for df in self.meta.get("fields"):
if df.fieldtype == "Check":
self.set(df.fieldn
|
ljx0305/ice
|
python/test/Ice/facets/TestI.py
|
Python
|
gpl-2.0
| 1,190
| 0.006723
|
# **********************************************************************
#
# Copyright (c) 2003-2017 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import Test
class AI(Test.A):
def callA(self, current=None):
return "A"
class BI(Test.B, AI):
def callB(self, current=None):
return "B"
class CI(Test.C, AI):
def callC(self, current=None):
return "C"
class DI(Test.D, BI, C
|
I):
def callD(self, current=None):
return "D"
class EI(Test.E):
def callE(self, current=None):
return "E"
class FI(Test.F, EI):
def callF(self, current=None):
return "F"
class GI(Test.G):
def __init__(self, communicator):
self._communicator = communicator
def shutdown(self, current=None):
self._communicator.shutdown()
def callG(self, current=None):
return "G"
class HI(Test.H, GI):
def __init__(self, communicator):
|
GI.__init__(self, communicator)
def callH(self, current=None):
return "H"
|
mfraezz/osf.io
|
website/project/views/register.py
|
Python
|
apache-2.0
| 10,365
| 0.002605
|
# -*- coding: utf-8 -*-
from rest_framework import status as http_status
import itertools
from flask import request
from framework import status
from framework.exceptions import HTTPError
from framework.flask import redirect # VOL-aware redirect
from framework.auth.decorators import must_be_signed
from website.archiver import ARCHIVER_SUCCESS, ARCHIVER_FAILURE
from addons.base.views import DOWNLOAD_ACTIONS
from website import settings
from osf.exceptions import NodeStateError
from website.project.decorators import (
must_be_valid_project, must_be_contributor_or_public,
must_have_permission, must_be_contributor_and_not_group_member,
must_not_be_registration, must_be_registration,
must_not_be_retracted_registration
)
from osf import features
from osf.models import Identifier, RegistrationSchema
from website.project.utils import serialize_node
from osf.utils.permissions import ADMIN
from website import language
from website.ember_osf_web.decorators import ember_flag_is_active
from website.project import signals as project_signals
from website.project.metadata.schemas import _id_to_name
from website import util
from website.project.metadata.utils import serialize_meta_schema
from website.project.model import has_anonymous_link
from website.archiver.decorators import fail_archive_on_error
from .node import _view_project
from api.waffle.utils import flag_is_active
@must_be_valid_project
@must_not_be_retracted_registration
@must_be_contributor_or_public
def node_register_page(auth, node, **kwargs):
"""Display the registration metadata for a registration.
:return: serialized Node
"""
if node.is_registration:
return serialize_node(node, auth)
else:
status.push_status_message(
'You have been redirected to the project\'s registrations page. From here you can initiate a new Draft Registration to complete the registration process',
trust=False,
id='redirected_to_registrations',
)
return redirect(node.web_url_for('node_registrations', view='draft', _guid=True))
@must_be_valid_project
@must_have_permission(ADMIN)
@must_be_contributor_and_not_group_member
def node_registration_retraction_redirect(auth, node, **kwargs):
return redirect(node.web_url_for('node_registration_retraction_get', _guid=True))
@must_be_valid_project
@must_not_be_retracted_registration
@must_have_permission(ADMIN)
@must_be_contributor_and_not_group_member
def node_registration_retraction_get(auth, node, **kwargs):
"""Prepares node object for registration retraction page.
:return: serialized Node to be retracted
:raises: 400: BAD_REQUEST if registration already pending retraction
"""
if not node.is_registration:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
'message_short': 'Invalid Request',
'message_long': 'Withdrawal of non-registrations is not permitted.'
})
if node.is_pending_retraction:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
'message_short': 'Invalid Request',
'message_long': 'This registration is already pending withdrawal.'
})
return serialize_node(node, auth, primary=True)
@must_be_valid_project
@must_have_permission(ADMIN)
@must_be_contributor_and_not_group_member
def node_registration_retraction_post(auth, node, **kwargs):
"""Handles retraction of public registrations
:param auth: Authentication object for User
:return: Redirect URL for successful POST
"""
if node.is_pending_retraction:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
'message_short': 'Invalid Request',
'message_long': 'This registration is already pending withdrawal'
})
if not node.is_registration:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
'message_short': 'Invalid Request',
'message_long': 'Withdrawal of non-registrations is not permitted.'
})
if node.root_id != node.id:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
'message_short': 'Invalid Request',
'message_long': 'Withdrawal of non-parent registrations is not permitted.'
})
data = request.get_json()
try:
node.retract_registration(auth.user, data.get('justification', None))
node.save()
node.retraction.ask(node.get_active_contributors_recursive(unique_users=True))
except NodeStateError as err:
raise HTTPError(http_status.HTTP_403_FORBIDDEN, data=dict(message_long=str(err)))
return {'redirectUrl': node.web_url_for('view_project')}
@must_be_valid_project
@must_not_be_retracted_registration
@must_be_contributor_or_public
@ember_flag_is_active(features.EMBER_REGISTRATION_FORM_DETAIL)
def node_register_template_page(auth, node, metaschema_id, **kwargs):
if flag_is_active(request, features.EMBER_REGISTRIES_DETAIL_PAGE):
# Registration meta page obviated during redesign
return redirect(node.url)
if node.is_registration and bool(node.registered_schema):
try:
meta_schema = RegistrationSchema.objects.get(_id=metaschema_id)
except RegistrationSchema.DoesNotExist:
# backwards compatability for old urls, lookup by name
meta_schema = RegistrationSchema.objects.filter(name=_id_to_name(metaschema_id)).order_by('-schema_version').first()
if not meta_schema:
raise HTTPError(http_status.HTTP_404_NOT_FOUND, data={
'message_short': 'Invalid schema name',
'message_long': 'No registration schema with that name could be found.'
})
|
ret = _view_project(node, auth, primary=True)
my_meta = serialize_meta_schema(meta_schema)
if has_anonymous_link(node, auth):
for indx, schema_page in enumerate(my_meta['schema']['pages']):
for idx, schema_question in enumerate(schema_page['questions']):
|
if schema_question['title'] in settings.ANONYMIZED_TITLES:
del my_meta['schema']['pages'][indx]['questions'][idx]
ret['node']['registered_schema'] = serialize_meta_schema(meta_schema)
return ret
else:
status.push_status_message(
'You have been redirected to the project\'s registrations page. From here you can initiate a new Draft Registration to complete the registration process',
trust=False,
id='redirected_to_registrations',
)
return redirect(node.web_url_for('node_registrations', view=kwargs.get('template'), _guid=True))
@must_be_valid_project # returns project
@must_have_permission(ADMIN)
@must_be_contributor_and_not_group_member
@must_not_be_registration
def project_before_register(auth, node, **kwargs):
"""Returns prompt informing user that addons, if any, won't be registered."""
# TODO: Avoid generating HTML code in Python; all HTML should be in display layer
messages = {
'full': {
'addons': set(),
'message': 'The content and version history of <strong>{0}</strong> will be copied to the registration.',
},
'partial': {
'addons': set(),
'message': 'The current version of the content in <strong>{0}</strong> will be copied to the registration, but version history will be lost.'
},
'none': {
'addons': set(),
'message': 'The contents of <strong>{0}</strong> cannot be registered at this time, and will not be included as part of this registration.',
},
}
errors = {}
addon_set = [n.get_addons() for n in itertools.chain([node], node.get_descendants_recursive(primary_only=True))]
for addon in itertools.chain(*addon_set):
if not addon.complete:
continue
archive_errors = getattr(addon, 'archive_errors', None)
error = None
if archive_errors:
error = archive_errors()
if error:
errors[addon.config.short_name] = error
continue
|
GreenBlast/Linger
|
LingerActions/StopProcessAndChildrenAction.py
|
Python
|
mit
| 1,242
| 0.005636
|
import LingerActions.LingerBaseAction as lingerActions
class StopProcessAndChildrenAction(lingerActions.LingerBaseAction):
"""Logging that there was a change in a file"
|
""
def __init__(self, configuration):
super(StopProcessAndChildrenAction, self).__init__(configuration)
# Fields
self.process_adapter = self.configuration['process_adapter']
def get_process_adapter(self):
return self.get_adapter_by_uuid(self.process_adapter)
def act(self, configuration):
self.logger.debug("In Stop Children action")
self
|
.get_process_adapter().stop_with_all_children()
class StopProcessAndChildrenActionFactory(lingerActions.LingerBaseActionFactory):
"""StopProcessAndChildrenActionFactory generates StopProcessAndChildrenAction instances"""
def __init__(self):
super(StopProcessAndChildrenActionFactory, self).__init__()
self.item = StopProcessAndChildrenAction
def get_instance_name(self):
return "StopProcessAndChildrenAction"
def get_fields(self):
fields, optional_fields = super(StopProcessAndChildrenActionFactory, self).get_fields()
fields += [('process_adapter','uuid')]
return (fields, optional_fields)
|
protochron/aurora
|
src/main/python/apache/thermos/cli/commands/status.py
|
Python
|
apache-2.0
| 4,163
| 0.010569
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import os
import pwd
import re
import sys
import time
from twitter.common import app
from apache.thermos.cli.common import get_path_detector
from apache.thermos.common.ckpt import CheckpointDispatcher
from apache.thermos.monitoring.detector import TaskDetector
from gen.apache.thermos.ttypes import ProcessState, TaskState
@app.command
@app.command_option("--verbosity", default=0, dest='verbose', type='int',
help="Display more verbosity")
@app.command_option("--only", default=None, dest='only', type='choice',
choices=('active', 'finished'), help="Display only tasks of this type.")
def status(args, options):
"""Get the status of task(s).
Usage: thermos status [options] [task_name(s) or task_regexp(s)]
"""
path_detector = get_path_detector()
def format_task(detector, task_id):
checkpoint_filename = detector.get_checkpoint(task_id)
checkpoint_stat = os.stat(checkpoint_filename)
try:
checkpoint_owner = pwd.getpwuid(checkpoint_stat.st_uid).pw_name
except KeyError:
checkpoint_owner = 'uid:%s' % checkpoint_stat.st_uid
print(' %-20s [owner: %8s]' % (task_id, checkpoint_owner), end='')
if options.verbose == 0:
print()
if options.verbose > 0:
state = CheckpointDispatcher.from_file(checkpoint_filename)
if state is None or state.header is None:
print(' - checkpoint stream CORRUPT or outdated format')
return
print(' state: %8s' % TaskState._VALUES_TO_NAMES.get(state.statuses[-1].state, 'Unknown'),
end='')
print(' start: %25s' % time.asctime(time.localtime(state.header.launch_time_ms / 1000.0)))
if options.verbose > 1:
print(' user: %s' % state.header.user, end='')
if state.header.ports:
print(' ports: %s' % ' '.join('%s -> %s' % (key, val)
for key, val in state.header.ports.items()))
else:
print(' ports: None')
print(' sandbox: %s' % state.header.sandbox)
if options.verbose > 2:
print(' process table:')
for process, process_history in state.processes.items():
print(' - %s runs: %s' % (process, len(process_history)), end='')
last_run = process_history[-1]
print(' last: pid=%s, rc=%s, finish:%s, state:%s' % (
last_run.pid or 'None',
last_run.return_code if last_run.return_code is not None else '',
time.asctime(time.localtime(last_run.stop_time)) if last_run.stop_time else 'None',
ProcessState._
|
VALUES_TO_NAMES.get(last_run.state, 'Unknown')))
print()
matchers = map(re.compile, args or ['.*'])
active = []
finished = []
for root in path_detector.get_paths():
detector = TaskDetector(root)
active.extend((detector, t_id) for _, t_id in detector.get_task_ids(state='active')
if any(pattern.match(t_id) for pattern in matchers))
finished.extend((detector, t_id)for _, t_id in detector.get_task_ids(state='fini
|
shed')
if any(pattern.match(t_id) for pattern in matchers))
found = False
if options.only is None or options.only == 'active':
if active:
print('Active tasks:')
found = True
for detector, task_id in active:
format_task(detector, task_id)
print()
if options.only is None or options.only == 'finished':
if finished:
print('Finished tasks:')
found = True
for detector, task_id in finished:
format_task(detector, task_id)
print()
if not found:
print('No tasks found.')
sys.exit(1)
|
IronLanguages/ironpython3
|
Tests/compat/sbs_simple_ops.py
|
Python
|
apache-2.0
| 17,059
| 0.013424
|
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
from common import *
import testdata
class oldstyle:
def __init__(self, value): self.value = value
def __repr__(self): return "oldstyle(%s)" % self.value
def __add__(self, other): return self.value + other
def __sub__(self, other): return self.value - other
def __mul__(self, other): return self.value * other
def __div__(self, other): return self.value / other
def __floordiv__(self, other): return self.value // other
def __mod__(self, other): return self.value % other
def __divmod__(self, other): return divmod(self.value, other)
def __pow__(self, other): return self.value ** other
def __lshift__(self, other): return self.value << other
def __rshift__(self, other): return self.value >> other
def __and__(self, other): return self.value & other
def __xor__(self, other): return self.value ^ other
def __or__(self, other): return self.value | other
class oldstyle_reflect:
def __init__(self, value): self.value = value
def __repr__(self): return "oldstyle_reflect(%s)" % self.value
def __radd__(self, other): return other + self.value
def __rsub__(self, other): return other - self.value
def __rmul__(self, other):
print("\toldstyle_reflect.__rmul__")
return other * self.value
def __rdiv__(self, other): return other / self.value
def __rfloordiv__(self, other): return other // self.value
def __rmod__(self, other): return other % self.value
def __rdivmod__(self, other): return divmod(other, self.value)
def __rpow__(self, other): return other ** self.value
def __rlshift__(self, other): return other << self.value
def __rrshift__(self, other): return other >> self.value
def __rand__(self, other): return self.value & other
def __rxor__(self, other): return self.value ^ other
def __ror__(self, other): return self.value | other
class oldstyle_inplace:
def __init__(self, value): self.value = value
def __repr__(self): return "oldstyle_inplace(%s)" % self.value
def __iadd__(self, other): return self.value + other
def __isub__(self, other): return self.value - other
def __imul__(self, other): return self.value * other
def __idiv__(self, other): return self.value / other
def __ifloordiv__(self, other): return self.value // other
def __imod__(self, other): return self.value % other
def __idivmod__(self, other): return divmod(self.value, other)
def __ipow__(self, other): return self.value ** other
def __ilshift__(self, other): return self.value << other
def __irshift__(self, other): return self.value >> other
def __iand__(self, other): return self.value & other
def __ixor__(self, other): return self.value ^ other
def __ior__(self, other): return self.value | other
class oldstyle_notdefined:
def __init__(self, value): self.value = value
def __repr__(self): return "oldstyle_notdefined(%s)" % self.value
class newstyle(object):
def __init__(self, value): self.value = value
def __repr__(self): return "newstyle(%s, %r)" % (self.value, type(self.value))
def __add__(self, other): return self.value + other
def __sub__(self, other): return self.value - other
def __mul__(self, other): return self.value * other
def __div__(self, other): return self.value / other
def __floordiv__(self, other): return self.value // other
def __mod__(self, other): return self.value % other
def __divmod__(self, other): return divmod(self.value, other)
def __pow__(self, other): return self.value ** other
def __lshift__(self, other): return self.value << other
def __rshift__(self, other): return self.value >> other
def __and__(self, other): return self.value & other
def __xor__(self, other): return self.value ^ other
def __or__(self, other): return self.value | other
class newstyle_reflect(object):
def __init__(self, value): self.value = value
def __repr__(self): return "newstyle_reflect(%s, %r)" % (self.value, type(self.value))
def __radd__(self, other): return other + self.value
def __rsub__(self, other): return other - self.value
def __rmul__(self, other):
print("\tnewstyle_reflect.__rmul__")
return other * self.value
def __rdiv__(self, other): return other / self.value
def __rfloordiv__(self, other): return other // self.value
def __rmod__(self, other): return other % self.value
def __rdivmod__(self, other): return divmod(other, self.value)
def __rpow__(self, other): return other ** self.value
def __rlshift__(self, other): return other << self.value
def __rrshift__(self, other): return other >> self.value
def __rand__(self, other): return self.value & other
def __rxor__(self, other): return self.value ^ other
def __ror__(self, other): return self.value | other
class newstyle_inplace(object):
def __init__(self, value): self.value = value
def __repr__(self): return "newstyle_inplace(%s, %r)" % (self.value, type(self.value))
def __iadd__(self, other): return self.value + other
def __isub__(self, other): return self.value - other
def __imul__(self, other): return self.value * other
def __idiv__(self, other): return self.value / other
def __ifloordiv__(self, other): return self.value // ot
|
her
def __imod__(self, other): return self.value % other
def __idivmod__(self, other): return divmod(self.value, other)
def __ipow__(self, other): return self.value ** other
def __ilshift__(self, other): return self.value << other
def __irshift__(self, other): return self.value >> other
|
def __iand__(self, other): return self.value & other
def __ixor__(self, other): return self.value ^ other
def __ior__(self, other): return self.value | other
class newstyle_notdefined(object):
def __init__(self, value): self.value = value
def __repr__(self): return "newstyle_notdefined(%s, %r)" % (self.value, type(self.value))
import sys
class common(object):
def normal(self, leftc, rightc):
for a in leftc:
for b in rightc:
try:
printwith("case", a, "+", b, type(a), type(b))
printwithtype(a + b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "-", b, type(a), type(b))
printwithtype(a - b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "*", b, type(a), type(b))
printwithtype(a * b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "/", b, type(a), type(b))
printwithtype(a / b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "//", b, type(a), type(b))
printwithtype(a // b)
except:
|
tooringanalytics/pyambiguity
|
m2py.py
|
Python
|
mit
| 1,433
| 0.001396
|
#!/usr/bin/env python
''' Debug & Test support for matplot to python conversion.
'''
import os
import numpy as np
from scipy.io import loadmat
def dmpdat(s, e):
""" Dump a data structure with its name & shape.
Params:
-------
s: str. The name of the structure
e: expression. An expression to dump. Implicitly assumes e is
array_like
"""
print("%s:" % s)
print(e)
print("%s.shape:" % s)
print(e.shape)
print("%s.dtype:" % s)
print(e.dtype)
print("-------------------------------------------")
def hbrk(msg=None):
if msg is not None:
print(msg)
exit(-1)
def brk(s, e):
""" Used for debugging, just break the script, dumping data.
"""
dmpdat(s, e)
exit(-1)
def chkdat(t, s, e, rtol=1e-05, atol=1e-08):
""" Check this matrix against data dumped by octave, with
given tolerance
"""
mat = loadmat(os.path.join('check_data', t, s) + '.mat')['ex']
is_equal = np.allclose(e, mat, rtol=rtol, atol=atol)
#is_equal = np.array_equal(e, mat)
print("%s:%s:iEqual=%d" % (t, s, is_equal))
if not is_equal:
dmpdat(s + '<python>', e)
dmpdat(s + '<matlab>', mat)
np.savetxt(os.path.join("check_data", t, s) + '_py
|
thon_err', e)
np.savetxt(os.path.join("check_d
|
ata", t, s) + '_matlab_err', mat)
print("FAILED check on expr: %s, signal: %s" % (s, t))
#hbrk()
return is_equal
|
antoinecarme/pyaf
|
tests/model_control/detailed/transf_RelativeDifference/model_control_one_enabled_RelativeDifference_PolyTrend_Seasonal_Minute_ARX.py
|
Python
|
bsd-3-clause
| 167
| 0.047904
|
import tests.model_control.test_ozone_custom
|
_models_enabled as testmod
testmod.build_model( ['RelativeDifference'] , ['PolyTrend'] , ['Seasonal_Minute'] , ['ARX
|
'] );
|
edmorley/django
|
tests/admin_widgets/test_autocomplete_widget.py
|
Python
|
bsd-3-clause
| 5,005
| 0.000999
|
from django import forms
from django.contrib.admin.widgets import AutocompleteSelect
from django.forms import ModelChoiceField
from django.test import TestCase, override_settings
from django.utils import translation
from .models import Album, Band
class AlbumForm(forms.ModelForm
|
):
class Meta:
model = Album
fields = ['band', 'featuring']
widgets = {
'band': AutocompleteSelect(
Album._meta.get_field('band').remote_field,
|
attrs={'class': 'my-class'},
),
'featuring': AutocompleteSelect(
Album._meta.get_field('featuring').remote_field,
)
}
class NotRequiredBandForm(forms.Form):
band = ModelChoiceField(
queryset=Album.objects.all(),
widget=AutocompleteSelect(Album._meta.get_field('band').remote_field),
required=False,
)
class RequiredBandForm(forms.Form):
band = ModelChoiceField(
queryset=Album.objects.all(),
widget=AutocompleteSelect(Album._meta.get_field('band').remote_field),
required=True,
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AutocompleteMixinTests(TestCase):
empty_option = '<option value=""></option>'
maxDiff = 1000
def test_build_attrs(self):
form = AlbumForm()
attrs = form['band'].field.widget.get_context(name='my_field', value=None, attrs={})['widget']['attrs']
self.assertEqual(attrs, {
'class': 'my-classadmin-autocomplete',
'data-ajax--cache': 'true',
'data-ajax--type': 'GET',
'data-ajax--url': '/admin_widgets/band/autocomplete/',
'data-theme': 'admin-autocomplete',
'data-allow-clear': 'false',
'data-placeholder': ''
})
def test_build_attrs_not_required_field(self):
form = NotRequiredBandForm()
attrs = form['band'].field.widget.build_attrs({})
self.assertJSONEqual(attrs['data-allow-clear'], True)
def test_build_attrs_required_field(self):
form = RequiredBandForm()
attrs = form['band'].field.widget.build_attrs({})
self.assertJSONEqual(attrs['data-allow-clear'], False)
def test_get_url(self):
rel = Album._meta.get_field('band').remote_field
w = AutocompleteSelect(rel)
url = w.get_url()
self.assertEqual(url, '/admin_widgets/band/autocomplete/')
def test_render_options(self):
beatles = Band.objects.create(name='The Beatles', style='rock')
who = Band.objects.create(name='The Who', style='rock')
# With 'band', a ForeignKey.
form = AlbumForm(initial={'band': beatles.pk})
output = form.as_table()
selected_option = '<option value="%s" selected>The Beatles</option>' % beatles.pk
option = '<option value="%s">The Who</option>' % who.pk
self.assertIn(selected_option, output)
self.assertNotIn(option, output)
# With 'featuring', a ManyToManyField.
form = AlbumForm(initial={'featuring': [beatles.pk, who.pk]})
output = form.as_table()
selected_option = '<option value="%s" selected>The Beatles</option>' % beatles.pk
option = '<option value="%s" selected>The Who</option>' % who.pk
self.assertIn(selected_option, output)
self.assertIn(option, output)
def test_render_options_required_field(self):
"""Empty option is present if the field isn't required."""
form = NotRequiredBandForm()
output = form.as_table()
self.assertIn(self.empty_option, output)
def test_render_options_not_required_field(self):
"""Empty option isn't present if the field isn't required."""
form = RequiredBandForm()
output = form.as_table()
self.assertNotIn(self.empty_option, output)
def test_media(self):
rel = Album._meta.get_field('band').remote_field
base_files = (
'admin/js/vendor/jquery/jquery.min.js',
'admin/js/vendor/select2/select2.full.min.js',
# Language file is inserted here.
'admin/js/jquery.init.js',
'admin/js/autocomplete.js',
)
languages = (
('de', 'de'),
# Language with code 00 does not exist.
('00', None),
# Language files are case sensitive.
('sr-cyrl', 'sr-Cyrl'),
('zh-cn', 'zh-CN'),
)
for lang, select_lang in languages:
with self.subTest(lang=lang):
if select_lang:
expected_files = (
base_files[:2] +
(('admin/js/vendor/select2/i18n/%s.js' % select_lang),) +
base_files[2:]
)
else:
expected_files = base_files
with translation.override(lang):
self.assertEqual(AutocompleteSelect(rel).media._js, expected_files)
|
Andrew-Dickinson/FantasyFRC
|
customMechanize/_googleappengine.py
|
Python
|
gpl-2.0
| 26,831
| 0.001565
|
"""HTTP related handlers.
Note that some other HTTP handlers live in more specific modules: _auth.py,
_gzip.py, etc.
Copyright 2002-2006 John J Lee <jjl@pobox.com>
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.
|
1 licenses (see the file
COPYING.txt included with the distribution).
"""
import time, htmlentitydefs, logging, \
fakesocket, urllib2, urllib, httplib, sgmllib
from urllib2 import URLError, HTTPError, BaseHandler
from cStri
|
ngIO import StringIO
from _clientcookie import CookieJar
from _headersutil import is_html
from _html import unescape, unescape_charref
from _request import Request
from _response import closeable_response, response_seek_wrapper
import _rfc3986
import _sockettimeout
debug = logging.getLogger("mechanize").debug
debug_robots = logging.getLogger("mechanize.robots").debug
# monkeypatch urllib2.HTTPError to show URL
## def urllib2_str(self):
## return 'HTTP Error %s: %s (%s)' % (
## self.code, self.msg, self.geturl())
## urllib2.HTTPError.__str__ = urllib2_str
CHUNK = 1024 # size of chunks fed to HTML HEAD parser, in bytes
DEFAULT_ENCODING = 'latin-1'
#try:
# socket._fileobject("fake socket", close=True)
#except TypeError:
# python <= 2.4
# create_readline_wrapper = socket._fileobject
#else:
def create_readline_wrapper(fh):
return fakesocket._fileobject(fh, close=True)
# This adds "refresh" to the list of redirectables and provides a redirection
# algorithm that doesn't go into a loop in the presence of cookies
# (Python 2.4 has this new algorithm, 2.3 doesn't).
class HTTPRedirectHandler(BaseHandler):
# maximum number of redirections to any single URL
# this is needed because of the state that cookies introduce
max_repeats = 4
# maximum total number of redirections (regardless of URL) before
# assuming we're in a loop
max_redirections = 10
# Implementation notes:
# To avoid the server sending us into an infinite loop, the request
# object needs to track what URLs we have already seen. Do this by
# adding a handler-specific attribute to the Request object. The value
# of the dict is used to count the number of times the same URL has
# been visited. This is needed because visiting the same URL twice
# does not necessarily imply a loop, thanks to state introduced by
# cookies.
# Always unhandled redirection codes:
# 300 Multiple Choices: should not handle this here.
# 304 Not Modified: no need to handle here: only of interest to caches
# that do conditional GETs
# 305 Use Proxy: probably not worth dealing with here
# 306 Unused: what was this for in the previous versions of protocol??
def redirect_request(self, newurl, req, fp, code, msg, headers):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a redirection
response is received. If a redirection should take place, return a
new Request to allow http_error_30x to perform the redirect;
otherwise, return None to indicate that an HTTPError should be
raised.
"""
if code in (301, 302, 303, "refresh") or \
(code == 307 and not req.has_data()):
# Strictly (according to RFC 2616), 301 or 302 in response to
# a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib2, in this case). In practice,
# essentially all clients do redirect in this case, so we do
# the same.
# XXX really refresh redirections should be visiting; tricky to
# fix, so this will wait until post-stable release
new = Request(newurl,
headers=req.headers,
origin_req_host=req.get_origin_req_host(),
unverifiable=True,
visit=False,
)
new._origin_req = getattr(req, "_origin_req", req)
return new
else:
raise HTTPError(req.get_full_url(), code, msg, headers, fp)
def http_error_302(self, req, fp, code, msg, headers):
# Some servers (incorrectly) return multiple Location headers
# (so probably same goes for URI). Use first header.
if headers.has_key('location'):
newurl = headers.getheaders('location')[0]
elif headers.has_key('uri'):
newurl = headers.getheaders('uri')[0]
else:
return
newurl = _rfc3986.clean_url(newurl, "latin-1")
newurl = _rfc3986.urljoin(req.get_full_url(), newurl)
# XXX Probably want to forget about the state of the current
# request, although that might interact poorly with other
# handlers that also use handler-specific request attributes
new = self.redirect_request(newurl, req, fp, code, msg, headers)
if new is None:
return
# loop detection
# .redirect_dict has a key url if url was previously visited.
if hasattr(req, 'redirect_dict'):
visited = new.redirect_dict = req.redirect_dict
if (visited.get(newurl, 0) >= self.max_repeats or
len(visited) >= self.max_redirections):
raise HTTPError(req.get_full_url(), code,
self.inf_msg + msg, headers, fp)
else:
visited = new.redirect_dict = req.redirect_dict = {}
visited[newurl] = visited.get(newurl, 0) + 1
# Don't close the fp until we are sure that we won't use it
# with HTTPError.
fp.read()
fp.close()
return self.parent.open(new)
http_error_301 = http_error_303 = http_error_307 = http_error_302
http_error_refresh = http_error_302
inf_msg = "The HTTP server returned a redirect error that would " \
"lead to an infinite loop.\n" \
"The last 30x error message was:\n"
# XXX would self.reset() work, instead of raising this exception?
class EndOfHeadError(Exception): pass
class AbstractHeadParser:
# only these elements are allowed in or before HEAD of document
head_elems = ("html", "head",
"title", "base",
"script", "style", "meta", "link", "object")
_entitydefs = htmlentitydefs.name2codepoint
_encoding = DEFAULT_ENCODING
def __init__(self):
self.http_equiv = []
def start_meta(self, attrs):
http_equiv = content = None
for key, value in attrs:
if key == "http-equiv":
http_equiv = self.unescape_attr_if_required(value)
elif key == "content":
content = self.unescape_attr_if_required(value)
if http_equiv is not None and content is not None:
self.http_equiv.append((http_equiv, content))
def end_head(self):
raise EndOfHeadError()
def handle_entityref(self, name):
#debug("%s", name)
self.handle_data(unescape(
'&%s;' % name, self._entitydefs, self._encoding))
def handle_charref(self, name):
#debug("%s", name)
self.handle_data(unescape_charref(name, self._encoding))
def unescape_attr(self, name):
#debug("%s", name)
return unescape(name, self._entitydefs, self._encoding)
def unescape_attrs(self, attrs):
#debug("%s", attrs)
escaped_attrs = {}
for key, val in attrs.items():
escaped_attrs[key] = self.unescape_attr(val)
return escaped_attrs
def unknown_entityref(self, ref):
self.handle_data("&%s;" % ref)
def unknown_charref(self, ref):
self.handle_data("&#%s;" % ref)
try:
import HTMLParser
except ImportError:
pass
else:
class XHTMLCompatibleHeadParser(AbstractHeadParser,
HTMLParser.HTMLParser):
def __init__(self):
HTMLParser.HTMLParser.__init__(self)
AbstractHeadParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag not in self.head_elems:
|
JunctionAt/JunctionWWW
|
config/badges.py
|
Python
|
agpl-3.0
| 862
| 0
|
BADGES = [
{
'badge_id': 'tech',
'img_pa
|
th': '/static/img/badges/wrench.svg',
|
'name': 'Junction Technical Staff',
'description': 'Actively serves on Junction staff',
'priority': 2000
},
{
'badge_id': 'staff',
'img_path': '/static/img/badges/award_fill.svg',
'name': 'Junction Staff',
'description': 'Actively serves on Junction staff',
'priority': 1000
},
{
'badge_id': 'staff_emeritus',
'img_path': '/static/img/badges/heart_fill.svg',
'name': 'Staff Emeritus',
'description': 'Served on Junction staff once',
'priority': 0
},
{
'badge_id': 'butts',
'img_path': '/static/img/badges/heart_stroke.svg',
'name': 'BUTTS',
'description': 'Butts are nice',
'priority': 0
}
]
|
AustereCuriosity/astropy
|
astropy/samp/web_profile.py
|
Python
|
bsd-3-clause
| 5,804
| 0.000689
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from ..extern.six.moves.urllib.parse import parse_qs
from ..extern.six.moves.urllib.request import urlopen
from ..extern.six.moves import input
from ..utils.data import get_pkg_data_contents
from .standard_profile import (SAMPSimpleXMLRPCRequestHandler,
ThreadingXMLRPCServer)
__all__ = []
CROSS_DOMAIN = get_pkg_data_contents('data/crossdomain.xml')
CLIENT_ACCESS_POLICY = get_pkg_data_contents('data/clientaccesspolicy.xml')
class WebProfileRequestHandler(SAMPSimpleXMLRPCRequestHandler):
"""
Handler of XMLRPC requests performed through the Web Profile.
"""
def _send_CORS_header(self):
if self.headers.get('Origin') is not None:
method = self.headers.get('Access-Control-Request-Method')
if method and self.command == "OPTIONS":
# Preflight method
self.send_header('Content-Length', '0')
self.send_header('Access-Control-Allow-Origin',
self.headers.get('Origin'))
self.send_header('Access-Control-Allow-Methods', method)
self.send_header('Access-Control-Allow-Headers', 'Content-Type')
|
self.send_header('Access-Control-Allow-Credentials', 'true')
else:
# Simple method
self.send_header('Access-Control-Allow-Origin',
self.headers.get('Origin'))
self.send_header('Access-Control-Allow-Headers', 'Content-Type')
self.send_header('Access-Control-Allow-Credentials', 'true')
def end_headers(self):
self._send_CORS_header()
SAMPSimpleXMLRPCRequestHandler.end_headers(self)
def _serve_cross_domain_xml(self):
cross_domain = False
if self.path == "/crossdomain.xml":
# Adobe standard
response = CROSS_DOMAIN
self.send_response(200, 'OK')
self.send_header('Content-Type', 'text/x-cross-domain-policy')
self.send_header("Content-Length", "{0}".format(len(response)))
self.end_headers()
self.wfile.write(response.encode('utf-8'))
self.wfile.flush()
cross_domain = True
elif self.path == "/clientaccesspolicy.xml":
# Microsoft standard
response = CLIENT_ACCESS_POLICY
self.send_response(200, 'OK')
self.send_header('Content-Type', 'text/xml')
self.send_header("Content-Length", "{0}".format(len(response)))
self.end_headers()
self.wfile.write(response.encode('utf-8'))
self.wfile.flush()
cross_domain = True
return cross_domain
def do_POST(self):
if self._serve_cross_domain_xml():
return
return SAMPSimpleXMLRPCRequestHandler.do_POST(self)
def do_HEAD(self):
if not self.is_http_path_valid():
self.report_404()
return
if self._serve_cross_domain_xml():
return
def do_OPTIONS(self):
self.send_response(200, 'OK')
self.end_headers()
def do_GET(self):
if not self.is_http_path_valid():
self.report_404()
return
split_path = self.path.split('?')
if split_path[0] in ['/translator/{}'.format(clid) for clid in self.server.clients]:
# Request of a file proxying
urlpath = parse_qs(split_path[1])
try:
proxyfile = urlopen(urlpath["ref"][0])
self.send_response(200, 'OK')
self.end_headers()
self.wfile.write(proxyfile.read())
proxyfile.close()
except IOError:
self.report_404()
return
if self._serve_cross_domain_xml():
return
def is_http_path_valid(self):
valid_paths = (["/clientaccesspolicy.xml", "/crossdomain.xml"] +
['/translator/{}'.format(clid) for clid in self.server.clients])
return self.path.split('?')[0] in valid_paths
class WebProfileXMLRPCServer(ThreadingXMLRPCServer):
"""
XMLRPC server supporting the SAMP Web Profile.
"""
def __init__(self, addr, log=None, requestHandler=WebProfileRequestHandler,
logRequests=True, allow_none=True, encoding=None):
self.clients = []
ThreadingXMLRPCServer.__init__(self, addr, log, requestHandler,
logRequests, allow_none, encoding)
def add_client(self, client_id):
self.clients.append(client_id)
def remove_client(self, client_id):
try:
self.clients.remove(client_id)
except ValueError:
# No warning here because this method gets called for all clients,
# not just web clients, and we expect it to fail for non-web
# clients.
pass
def web_profile_text_dialog(request, queue):
samp_name = "unknown"
if isinstance(request[0], str):
# To support the old protocol version
samp_name = request[0]
else:
samp_name = request[0]["samp.name"]
text = \
"""A Web application which declares to be
Name: {}
Origin: {}
is requesting to be registered with the SAMP Hub.
Pay attention that if you permit its registration, such
application will acquire all current user privileges, like
file read/write.
Do you give your consent? [yes|no]""".format(samp_name, request[2])
print(text)
answer = input(">>> ")
queue.put(answer.lower() in ["yes", "y"])
|
|
jokajak/itweb
|
data/env/lib/python2.6/site-packages/repoze.who_testutil-1.0.1-py2.6.egg/tests/fixture/__init__.py
|
Python
|
gpl-3.0
| 729
| 0.001372
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2009, Gustavo Narea <me@gustavonarea.net>.
# All Rights Reserved.
#
# This software is subject to the provisions of the BSD-like license at
# http://www.repoze.org/LICENSE.txt. A copy of t
|
he license should accompany
# this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL
# EXPRESS OR IMPLIED WARRANTIES AR
|
E DISCLAIMED, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND
# FITNESS FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Fixture collection for the test suite."""
|
claudiob/pypeton
|
pypeton/files/django/deploy/django_wsgi_production.py
|
Python
|
mit
| 344
| 0.026163
|
import os, sys
PATH = os.path.join(os.path.dirname(__file__), '..')
sys.path += [
os.path.join(PATH, 'project/apps'),
os.path.join(PATH, 'project'),
os.path.join(PATH, '..'),
PATH]
os.envir
|
on['DJANGO_SETTINGS_MODULE'] = 'project.settings.production'
import dj
|
ango.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
jtraver/dev
|
python3/globals/test1.py
|
Python
|
mit
| 308
| 0.003247
|
#!/usr/bin/env python3
from framework import do_exit, get_globals, main
def do_work():
glo
|
bal g_test_import
global globals1
print("do_work")
globals1 = get_globals()
g_test_import = globals1["g_test_import"]
print("do_work: g_test_import
|
= %s" % str(g_test_import))
main(do_work)
|
jithinbp/SEELablet
|
SEEL/SENSORS/SHT21.py
|
Python
|
gpl-3.0
| 2,349
| 0.063857
|
from __future__ import print_function
from numpy import int16
import time
def connect(route,**args):
'''
route can either be I.I2C , or a radioLink instance
'''
return SHT21(route,**args)
class SHT21():
RESET = 0xFE
TEMP_ADDRESS = 0xF3
HUMIDITY_ADDRESS = 0xF5
selected=0xF3
NUMPLOTS=1
PLOTNAMES = ['Data']
ADDRESS = 0x40
name = 'Humidity/Te
|
mperature'
def __init__(self,I2C,**args):
self.I2C=I2C
self.ADDRESS = args.get('address',self.ADDRESS)
self.name = 'Humidity/Temperature'
'''
try:
print ('switching baud to 400k')
self.I2C.configI2C(400e3)
except:
print ('FAILED TO CHANGE BAUD RATE')
'''
self.params={'selectParameter':['temperature'
|
,'humidity']}
self.init('')
def init(self,x):
self.I2C.writeBulk(self.ADDRESS,[self.RESET]) #soft reset
time.sleep(0.1)
def rawToTemp(self,vals):
if vals:
if len(vals):
v = (vals[0]<<8)|(vals[1]&0xFC) #make integer & remove status bits
v*=175.72; v/= (1<<16); v-=46.85
return [v]
return False
def rawToRH(self,vals):
if vals:
if len(vals):
v = (vals[0]<<8)|(vals[1]&0xFC) #make integer & remove status bits
v*=125.; v/= (1<<16); v-=6
return [v]
return False
@staticmethod
def _calculate_checksum(data, number_of_bytes):
"""5.7 CRC Checksum using the polynomial given in the datasheet
Credits: https://github.com/jaques/sht21_python/blob/master/sht21.py
"""
# CRC
POLYNOMIAL = 0x131 # //P(x)=x^8+x^5+x^4+1 = 100110001
crc = 0
# calculates 8-Bit checksum with given polynomial
for byteCtr in range(number_of_bytes):
crc ^= (data[byteCtr])
for bit in range(8, 0, -1):
if crc & 0x80:
crc = (crc << 1) ^ POLYNOMIAL
else:
crc = (crc << 1)
return crc
def selectParameter(self,param):
if param=='temperature':self.selected=self.TEMP_ADDRESS
elif param=='humidity':self.selected=self.HUMIDITY_ADDRESS
def getRaw(self):
self.I2C.writeBulk(self.ADDRESS,[self.selected])
if self.selected==self.TEMP_ADDRESS:time.sleep(0.1)
elif self.selected==self.HUMIDITY_ADDRESS:time.sleep(0.05)
vals = self.I2C.simpleRead(self.ADDRESS,3)
if vals:
if self._calculate_checksum(vals,2)!=vals[2]:
return False
print (vals)
if self.selected==self.TEMP_ADDRESS:return self.rawToTemp(vals)
elif self.selected==self.HUMIDITY_ADDRESS:return self.rawToRH(vals)
|
ruibarreira/linuxtrail
|
usr/lib/python3/dist-packages/softwareproperties/gtk/DialogAdd.py
|
Python
|
gpl-3.0
| 3,128
| 0.005754
|
# dialog_add.py.in - dialog to add a new repository
#
# Copyright (c) 2004-2005 Canonical
# 2005 Michiel Sikkes
#
# Authors:
# Michael Vogt <mvo@debian.org>
# Michiel Sikkes <michiels@gnome.org>
# Sebastian Heinlein <glatzor@ubuntu.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
import os
from gi.repository import Gtk
from gettext import gettext as _
from aptsources.sourceslist import SourceEntry
from softwareproperties.gtk.utils import *
class DialogAdd:
def __init__(self, parent, sourceslist, datadir, distro):
"""
Initialize the dialog that allows to add a new source ent
|
ering the
raw apt line
"""
self.sourceslist = sourceslist
self.parent = parent
self.datadir = datadir
# gtk stuff
setup_ui(self, os.path.join(datadir, "gtkbuilder", "dialog-add.ui
|
"), domain="software-properties")
self.dialog = self.dialog_add_custom
self.dialog.set_transient_for(self.parent)
self.entry = self.entry_source_line
self.button_add = self.button_add_source
self.entry.connect("changed", self.check_line)
# Create an example deb line from the currently used distro
if distro:
example = "%s %s %s %s" % (distro.binary_type,
distro.source_template.base_uri,
distro.codename,
distro.source_template.components[0].name)
else:
example = "deb http://ftp.debian.org sarge main"
# L10N: the example is of the format: deb http://ftp.debian.org sarge main
msg = _("The APT line includes the type, location and components of a "
"repository, for example '%s'.") % ("<i>%s</i>" % example)
self.label_example_line.set_label(msg)
def run(self):
res = self.dialog.run()
self.dialog.hide()
if res == Gtk.ResponseType.OK:
line = self.entry.get_text() + "\n"
else:
line = None
return line
def check_line(self, *args):
"""
Check for a valid apt line and set the sensitiveness of the
button 'add' accordingly
"""
line = self.entry.get_text() + "\n"
if line.startswith("ppa:"):
self.button_add.set_sensitive(True)
return
source_entry = SourceEntry(line)
if source_entry.invalid == True or source_entry.disabled == True:
self.button_add.set_sensitive(False)
else:
self.button_add.set_sensitive(True)
|
enthought/etsproxy
|
enthought/util/equivalence.py
|
Python
|
bsd-3-clause
| 56
| 0
|
# proxy
|
mod
|
ule
from codetools.util.equivalence import *
|
kvas-it/cli-mock
|
tests/test_creplay.py
|
Python
|
mit
| 1,051
| 0
|
def test_default_log(creplay, testlog):
ret = creplay('echo', 'foo', creplay_args=[], cwd=testlog.dirname)
assert ret.success
assert ret.stdout == 'foo\n'
assert ret.stderr == ''
def test_echo_n(creplay, logfile):
ret = creplay('echo', '-n', 'foo')
assert ret.success
assert ret.stdout == 'foo'
assert ret.stderr == ''
def test_err(creplay, logfile):
ret = creplay('foo', 'bar')
assert not ret.success
assert ret.stdout == ''
assert ret.stderr == 'Error\n'
def test_order(creplay, tmpdir, logfile):
ret = creplay('./script.py')
assert ret.success
assert ret.stdout == 'foo\n123baz\n'
assert ret.stderr == 'bar\n'
def te
|
st_record_replay(crecord, tmpdir, logfile, testlog):
ret = crecord('creplay', '-l', testlog.strpath, 'foo')
assert ret.success
assert ret.stdout == 'foo\nbaz\n'
assert ret.stderr == 'bar\n'
lines = set(logfile.read().split('\n')[1:-1])
# Unfortunately the order can get messed up.
assert lines == {'>
|
foo', '! bar', '> baz', '= 0'}
|
kasmith/cbmm-project-christmas
|
ContainmentAnalysis/parseData.py
|
Python
|
mit
| 2,805
| 0.003565
|
import os, json
iflnm = os.path.join('..','psiturk-rg-cont','trialdata.csv')
oflnm = "rawdata.csv"
with open(iflnm, 'rU') as ifl, open(oflnm, 'w') as ofl:
ofl.write('WID,Condition,Trial,TrialBase,Class,ContainmentType,ContainmentLevel,TrialNum,MotionDirection,Response,RT,Goal,Switched,RawResponse,WasBad\n')
for rln in ifl:
rln = rln.strip('\n')
wid, _, _, rdat = rln.split(',',3)
dat = json.loads(rdat.strip("\"'").rep
|
lace("\"\"", "\""))
if isinstance(dat[5], bool):
trnm, order, rt, rawresp, mottype, wassw, score, realgoal, wasbad, cond = dat
trspl = trnm.split('_')
dowrite = True
trbase = trspl[0] + '_' + trspl[1]
tnum = trspl[1]
if trspl[0] == 'regular':
trclass = "regular"
|
conttype = "NA"
contlevel = "NA"
else:
trclass = "contained"
conttype = trspl[0]
contlevel = trspl[2]
if not wassw:
wassw = "False"
if rawresp == 201:
actresp = "R"
normresp = "R"
elif rawresp == 202:
actresp = "G"
normresp = "G"
elif rawresp == 299:
actresp = "NA"
normresp = "NA"
else:
dowrite = False
if realgoal == 201:
rg = "R"
elif realgoal == 202:
rg = "G"
else:
dowrite = False
else:
wassw = "True"
if rawresp == 201:
actresp = "R"
normresp = "G"
elif rawresp == 202:
actresp = "G"
normresp = "R"
elif rawresp == 299:
actresp = "NA"
normresp = "NA"
else:
dowrite = False
if realgoal == 201:
rg = "G"
elif realgoal == 202:
rg = "R"
else:
dowrite = False
if mottype == 1:
mot = 'Fwd'
elif mottype == 0:
mot = 'None'
else:
mot = 'Rev'
if wasbad:
wb = 'True'
else:
wb = 'False'
if dowrite:
ofl.write(wid + ',' + str(cond) + ',' + trnm + ',' + trbase + ',' + trclass + ',' + conttype + ',' + contlevel + ',' + tnum + ',')
ofl.write(mot + ',' + normresp + ',' + str(rt) + ',' + rg + ',' + wassw + ',' + actresp + ',' + wb + '\n')
|
rusenask/mirage
|
stubo/__init__.py
|
Python
|
gpl-3.0
| 585
| 0.005128
|
"""
stubo
~~~~~
Stub-O-Matic - Enable automat
|
ed testing by mastering system dependencies.
Use when reality is simply not good enough.
:copyright: (c) 2015 by OpenCredo.
:license: GPLv3, see LICENSE for more details.
"""
import os
import sys
version = "0.8.3"
version_info = tuple(version.split('.'))
def stubo_path():
# Find folder that this module is contained in
module = sys.modules[__name__]
return os.path.dirname(os.path.abspath(module
|
.__file__))
def static_path(*args):
return os.path.join(stubo_path(), 'static', *args)
|
aljungberg/pyle
|
pyle.py
|
Python
|
bsd-3-clause
| 5,025
| 0.004776
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Pyle makes it easy to use Python as a replacement for command line tools such as `sed` or `perl`.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future import standard_library
standard_library.install_aliases()
from future.utils import string_types
import argparse
import io
import re
import sh
import sys
import traceback
__version__ = "0.4.1"
STANDARD_MODULES = {
're': re,
'sh': sh
}
def truncate_ellipsis(line, length=30):
"""Truncate a line to the specified length followed by ``...`` unless its shorter than length already."""
return line if len(line) < length else line[:length - 3] + "..."
def pyle_evaluate(expressions=None, modules=(), inplace=False, files=None, print_traceback=False):
"""The main method of pyle."""
eval_globals = {}
eval_globals.update(STANDARD_MODULES)
for module_arg in modules or ():
for module in module_arg.strip().split(","):
module = module.strip()
if module:
eval_globals[module] = __import__(module)
if not expressions:
# Default 'do nothing' program
expressions = ['line']
encoding = sys.getdefaultencoding()
files = files or ['-']
eval_locals = {}
for file in files:
if file == '-':
file = sys.stdin
out_buf = sys.stdout if not inplace else io.StringIO()
out_line = None
with (io.open(file, 'r', encoding=encoding) if not hasattr(file, '
|
read') else file) as in_file:
for num, line in enumerate(in_file.readlines()):
|
was_whole_line = False
if line[-1] == '\n':
was_whole_line = True
line = line[:-1]
expr = ""
try:
for expr in expressions:
words = [word.strip()
for word in re.split(r'\s+', line)
if word]
eval_locals.update({
'line': line,
'words': words,
'filename': in_file.name,
'num': num
})
out_line = eval(expr, eval_globals, eval_locals)
if out_line is None:
continue
# If the result is something list-like or iterable,
# output each item space separated.
if not isinstance(out_line, string_types):
try:
out_line = u' '.join(str(part) for part in out_line)
except:
# Guess it wasn't a list after all.
out_line = str(out_line)
line = out_line
except Exception as e:
sys.stdout.flush()
sys.stderr.write("At %s:%d ('%s'): `%s`: %s\n" % (
in_file.name, num, truncate_ellipsis(line), expr, e))
if print_traceback:
traceback.print_exc(None, sys.stderr)
else:
if out_line is None:
continue
out_line = out_line or u''
out_buf.write(out_line)
if was_whole_line:
out_buf.write('\n')
if inplace:
with io.open(file, 'w', encoding=encoding) as out_file:
out_file.write(out_buf.getvalue())
out_buf.close()
def pyle(argv=None):
"""Execute pyle with the specified arguments, or sys.argv if no arguments specified."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("-m", "--modules", dest="modules", action='append',
help="import MODULE before evaluation. May be specified more than once.")
parser.add_argument("-i", "--inplace", dest="inplace", action='store_true', default=False,
help="edit files in place. When used with file name arguments, the files will be replaced by the output of the evaluation")
parser.add_argument("-e", "--expression", action="append",
dest="expressions", help="an expression to evaluate for each line")
parser.add_argument('files', nargs='*',
help="files to read as input. If used with --inplace, the files will be replaced with the output")
parser.add_argument("--traceback", action="store_true", default=False,
help="print a traceback on stderr when an expression fails for a line")
args = parser.parse_args() if not argv else parser.parse_args(argv)
pyle_evaluate(args.expressions, args.modules, args.inplace, args.files, args.traceback)
if __name__ == '__main__':
pyle()
|
jonian/kickoff-player
|
apis/streams.py
|
Python
|
gpl-3.0
| 6,137
| 0.017598
|
from operator import itemgetter
from lxml import html
from fuzzywuzzy import fuzz
from helpers.utils import cached_request, thread_pool, replace_all
class StreamsApi:
def __init__(self, data, cache):
self.data = data
self.cache = cache
def get(self, url='', ttl=3600):
base_url = 'livefootballol.me'
response = cached_request(url=url, cache=self.cache, base_url=base_url, ttl=ttl)
try:
response = html.fromstring(response)
except TypeError:
response = None
return response
def get_channels_pages(self):
data = self.get('channels')
items = ['channels']
if data is not None:
for page in data.xpath('//div[@id="system"]//div[@class="pagination"]//a[@class=""]'):
items.append(page.get('href'))
return items
def get_channels_page_links(self, url):
data = self.get(url)
items = []
if data is not None:
for channel in data.xpath('//div[@id="system"]//table//a[contains(@href, "acestream")]'):
items.append(channel.get('href'))
return items
def get_channels_links(self):
pages = self.get_channels_pages()
items = thread_pool(self.get_channels_page_links, pages)
return items
def get_channel_details(self, url):
data = self.get(url)
items = []
if data is None:
return items
try:
root = data.xpath('//div[@id="system"]//table')[0]
name = root.xpath('.//td[text()="Name"]//following-sibling::td[1]')[0]
lang = root.xpath('.//td[text()="Language"]//following-sibling::td[1]')[0]
rate = root.xpath('.//td[text()="Bitrate"]//following-sibling::td[1]')[0]
strm = root.xpath('.//a[starts-with(@href, "acestream:")]')
name = name.text_content().strip()
lang = lang.text_content().strip()
rate = rate.text_content().strip()
name = self.parse_name(name)
lang = 'Unknown' if lang == '' or lang.isdigit() else lang
lang = 'Bulgarian' if lang == 'Bulgaria' else lang
rate = 0 if rate == '' else int(rate.replace('Kbps', ''))
channel = { 'name': name, 'language': lang.title() }
stream = { 'rate': rate, 'language': lang[:3].upper(), 'url': None, 'hd_url': None, 'host': 'Acestream' }
for link in strm:
href = link.get('href')
text = link.getparent().text_content()
if 'HD' in text:
stream['hd_url'] = href
else:
stream['url'] = href
if stream['url'] is not None and lang != 'Unknown':
items.append({ 'channel': channel, 'stream': stream })
except (IndexError, ValueError):
pass
return items
def get_channels(self):
links = self.get_channels_links()
items = thread_pool(self.get_channel_details, links)
return items
def save_channels(self):
data = self.get_channels()
items = []
for item in data:
stream = item['stream']
channel = self.data.set_single('channel', item['channel'], 'name')
ch_id = "%s_%s" % (channel.id, stream['host'].lower())
stream.update({ 'channel': channel.id, 'ch_id': ch_id })
items.append(stream)
self.data.set_multiple('stream', items, 'ch_id')
def get_events_page(self):
data = self.get()
page = None
if data is not None:
link = data.xpath('//div[@id="system"]//a[starts-with(@href, "/live-football")]')
page = link[0].get('href') if len(link) else None
return page
def get_events_page_links(self):
link = self.get_events_page()
data = self.get(url=link, ttl=120)
items = []
if data is not None:
for link in data.xpath('//div[@id="system"]//list[1]//a[contains(@href, "/streaming/")]'):
items.append(link.get('href'))
return items
def get_event_channels(self, url):
data = self.get(url=url, ttl=60)
items = []
if data is None:
return items
try:
root = data.xpath('//div[@id="system"]//table')[0]
comp = root.xpath('.//td[text()="Competition"]//following-sibling::td[1]')[0]
team = root.xpath('.//td[text()="Match"]//following-sibling::td[1]')[0]
comp = comp.text_content().strip()
team = team.text_content().strip().split('-')
home = team[0].strip()
away = team[1].strip()
event = { 'competition': comp, 'home': home, 'away': away }
chann = []
for link in data.xpath('//div[@id="system"]//a[contains(@hre
|
f, "/channels/")]'):
name = link.text_content()
name = self.parse_name(name)
chann.append(name)
if chann:
items.append({ 'event': event, 'channels': chann })
except (IndexError, ValueError):
pass
return items
def get_events(self):
links = self.get_events_page_links()
items = thread_pool(self.get_event_channels, links)
ret
|
urn items
def save_events(self):
fixtures = self.data.load_fixtures(today_only=True)
events = self.get_events()
items = []
for fixture in fixtures:
channels = self.get_fixture_channels(events, fixture)
streams = self.data.get_multiple('stream', 'channel', channels)
for stream in streams:
items.append({
'fs_id': "%s_%s" % (fixture.id, stream.id),
'fixture': fixture.id,
'stream': stream
})
self.data.set_multiple('event', items, 'fs_id')
def get_fixture_channels(self, events, fixture):
chann = []
items = []
for item in events:
evnt = item['event']
comp = fuzz.ratio(fixture.competition.name, evnt['competition'])
home = fuzz.ratio(fixture.home_team.name, evnt['home'])
away = fuzz.ratio(fixture.away_team.name, evnt['away'])
comb = (comp + home + away) / 3
items.append({ 'ratio': comb, 'channels': item['channels'] })
if items:
sort = sorted(items, key=itemgetter('ratio'), reverse=True)[0]
if sort['ratio'] > 70:
chann = self.data.get_multiple('channel', 'name', sort['channels'])
chann = [c.id for c in chann]
return chann
def parse_name(self, name):
find = ['Acestream', 'AceStream']
name = replace_all(name, find, '').strip()
return name
|
quangnguyen-asnet/python-django
|
mymenu/mymenu/wsgi.py
|
Python
|
mit
| 389
| 0
|
"""
WSGI config for mymenu project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this f
|
ile, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.cor
|
e.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mymenu.settings")
application = get_wsgi_application()
|
jazkarta/edx-platform-for-isc
|
cms/djangoapps/contentstore/views/item.py
|
Python
|
agpl-3.0
| 49,511
| 0.003716
|
"""Views for items (modules)."""
from __future__ import absolute_import
import hashlib
import logging
from uuid import uuid4
from datetime import datetime
from pytz import UTC
import json
from collections import OrderedDict
from functools import partial
from static_replace import replace_static_urls
from xmodule_modifiers import wrap_xblock, request_token
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseBadRequest, HttpResponse, Http404
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_http_methods
from xblock.fields import Scope
from xblock.fragment import Fragment
import xmodule
from xmodule.tabs import StaticTab, CourseTabList
from xmodule.modulestore import ModuleStoreEnum, EdxJSONEncoder
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError, InvalidLocationError
from xmodule.modulestore.inheritance import own_metadata
from xmodule.modulestore.draft_and_published import DIRECT_ONLY_CATEGORIES
from xmodule.x_module import PREVIEW_VIEWS, STUDIO_VIEW, STUDENT_VIEW
from xmodule.course_module import DEFAULT_START_DATE
from django.contrib.auth.models import User
from util.date_utils import get_default_time_display
from util.json_request import expect_json, JsonResponse
from student.auth import has_studio_write_access, has_studio_read_access
from contentstore.utils import find_release_date_source, find_staff_lock_source, is_currently_visible_to_students, \
ancestor_has_staff_lock, has_children_visible_to_specific_content_groups
from content
|
store.views.helpers import is_unit, xblock_studio_url, xblock_primary_child_category, \
xblock_type_display_name, get_parent_xblock
from contentstore.views.preview import get_preview_fragment
from edxmako.shortcuts import render_to_string
from models.settings.course_gradi
|
ng import CourseGradingModel
from cms.lib.xblock.runtime import handler_url, local_resource_url
from opaque_keys.edx.keys import UsageKey, CourseKey
from opaque_keys.edx.locator import LibraryUsageLocator
from cms.lib.xblock.authoring_mixin import VISIBILITY_VIEW
__all__ = [
'orphan_handler', 'xblock_handler', 'xblock_view_handler', 'xblock_outline_handler', 'xblock_container_handler'
]
log = logging.getLogger(__name__)
CREATE_IF_NOT_FOUND = ['course_info']
# Useful constants for defining predicates
NEVER = lambda x: False
ALWAYS = lambda x: True
# In order to allow descriptors to use a handler url, we need to
# monkey-patch the x_module library.
# TODO: Remove this code when Runtimes are no longer created by modulestores
xmodule.x_module.descriptor_global_handler_url = handler_url
xmodule.x_module.descriptor_global_local_resource_url = local_resource_url
def hash_resource(resource):
"""
Hash a :class:`xblock.fragment.FragmentResource`.
"""
md5 = hashlib.md5()
md5.update(repr(resource))
return md5.hexdigest()
def usage_key_with_run(usage_key_string):
"""
Converts usage_key_string to a UsageKey, adding a course run if necessary
"""
usage_key = UsageKey.from_string(usage_key_string)
usage_key = usage_key.replace(course_key=modulestore().fill_in_run(usage_key.course_key))
return usage_key
def _filter_entrance_exam_grader(graders):
"""
If the entrance exams feature is enabled we need to hide away the grader from
views/controls like the 'Grade as' dropdown that allows a course author to select
the grader type for a given section of a course
"""
if settings.FEATURES.get('ENTRANCE_EXAMS', False):
graders = [grader for grader in graders if grader.get('type') != u'Entrance Exam']
return graders
# pylint: disable=unused-argument
@require_http_methods(("DELETE", "GET", "PUT", "POST", "PATCH"))
@login_required
@expect_json
def xblock_handler(request, usage_key_string):
"""
The restful handler for xblock requests.
DELETE
json: delete this xblock instance from the course.
GET
json: returns representation of the xblock (locator id, data, and metadata).
if ?fields=graderType, it returns the graderType for the unit instead of the above.
html: returns HTML for rendering the xblock (which includes both the "preview" view and the "editor" view)
PUT or POST or PATCH
json: if xblock locator is specified, update the xblock instance. The json payload can contain
these fields, all optional:
:data: the new value for the data.
:children: the unicode representation of the UsageKeys of children for this xblock.
:metadata: new values for the metadata fields. Any whose values are None will be deleted not set
to None! Absent ones will be left alone.
:nullout: which metadata fields to set to None
:graderType: change how this unit is graded
:publish: can be:
'make_public': publish the content
'republish': publish this item *only* if it was previously published
'discard_changes' - reverts to the last published version
Note: If 'discard_changes', the other fields will not be used; that is, it is not possible
to update and discard changes in a single operation.
The JSON representation on the updated xblock (minus children) is returned.
if usage_key_string is not specified, create a new xblock instance, either by duplicating
an existing xblock, or creating an entirely new one. The json playload can contain
these fields:
:parent_locator: parent for new xblock, required for both duplicate and create new instance
:duplicate_source_locator: if present, use this as the source for creating a duplicate copy
:category: type of xblock, required if duplicate_source_locator is not present.
:display_name: name for new xblock, optional
:boilerplate: template name for populating fields, optional and only used
if duplicate_source_locator is not present
The locator (unicode representation of a UsageKey) for the created xblock (minus children) is returned.
"""
if usage_key_string:
usage_key = usage_key_with_run(usage_key_string)
access_check = has_studio_read_access if request.method == 'GET' else has_studio_write_access
if not access_check(request.user, usage_key.course_key):
raise PermissionDenied()
if request.method == 'GET':
accept_header = request.META.get('HTTP_ACCEPT', 'application/json')
if 'application/json' in accept_header:
fields = request.REQUEST.get('fields', '').split(',')
if 'graderType' in fields:
# right now can't combine output of this w/ output of _get_module_info, but worthy goal
return JsonResponse(CourseGradingModel.get_section_grader_type(usage_key))
# TODO: pass fields to _get_module_info and only return those
with modulestore().bulk_operations(usage_key.course_key):
response = _get_module_info(_get_xblock(usage_key, request.user))
return JsonResponse(response)
else:
return HttpResponse(status=406)
elif request.method == 'DELETE':
_delete_item(usage_key, request.user)
return JsonResponse()
else: # Since we have a usage_key, we are updating an existing xblock.
return _save_xblock(
request.user,
_get_xblock(usage_key, request.user),
data=request.json.get('data'),
children_strings=request.json.get('children'),
metadata=request.json.get('metadata'),
nullout=request.json.get('nullout'),
grader_type=request.json.get('graderType'),
publish=requ
|
aragos/tichu-tournament
|
python/openpyxl/drawing/colors.py
|
Python
|
mit
| 11,201
| 0.002053
|
from __future__ import absolute_import
# Copyright (c) 2010-2016 openpyxl
from openpyxl.compat import basestring, unicode
from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.descriptors import (
Alias,
Typed,
Integer,
Set,
MinMax,
)
from openpyxl.descriptors.excel import Percentage
from openpyxl.descriptors.nested import (
NestedNoneSet,
NestedValue,
NestedInteger,
)
from openpyxl.styles.colors import RGB
from openpyxl.xml.constants import DRAWING_NS
from openpyxl.descriptors.excel import ExtensionList as OfficeArtExtensionList
PRESET_COLORS = [
'aliceBlue', 'antiqueWhite', 'aqua', 'aquamarine',
'azure', 'beige', 'bisque', 'black', 'blanchedAlmond', 'blue',
'blueViolet', 'brown', 'burlyWood', 'cadetBlue', 'chartreuse',
'chocolate', 'coral', 'cornflowerBlue', 'cornsilk', 'crimson', 'cyan',
'darkBlue', 'darkCyan', 'darkGoldenrod', 'darkGray', 'darkGrey',
'darkGreen', 'darkKhaki', 'darkMagenta', 'darkOliveGreen', 'darkOrange',
'darkOrchid', 'darkRed', 'darkSalmon', 'darkSeaGreen', 'darkSlateBlue',
'darkSlateGray', 'darkSlateGrey', 'darkTurquoise', 'darkViolet',
'dkBlue', 'dkCyan', 'dkGoldenrod', 'dkGray', 'dkGrey', 'dkGreen',
'dkKhaki', 'dkMagenta', 'dkOliveGreen', 'dkOrange', 'dkOrchid', 'dkRed',
'dkSalmon', 'dkSeaGreen', 'dkSlateBlue', 'dkSlateGray', 'dkSlateGrey',
'dkTurquoise', 'dkViolet', 'deepPink', 'deepSkyBlue', 'dimGray',
'dimGrey', 'dodgerBlue', 'firebrick', 'floralWhite', 'forestGreen',
'fuchsia', 'gainsboro', 'ghostWhite', 'gold', 'goldenrod', 'gray',
'grey', 'green', 'greenYellow', 'honeydew', 'hotPink', 'indianRed',
'indigo', 'ivory', 'khaki', 'lavender', 'lavenderBlush', 'lawnGreen',
'lemonChiffon', 'lightBlue', 'lightCoral', 'lightCyan',
'lightGoldenrodYellow', 'lightGray', 'lightGrey', 'lightGreen',
'lightPink', 'lightSalmon', 'lightSeaGreen', 'lightSkyBlue',
'lightSlateGray', 'lightSlateGrey', 'lightSteelBlue', 'lightYellow',
'ltBlue', 'ltCoral', 'ltCyan', 'ltGoldenrodYellow', 'ltGray', 'ltGrey',
'ltGreen', 'ltPink', 'ltSalmon', 'ltSeaGreen', 'ltSkyBlue',
'ltSlateGray', 'ltSlateGrey', 'ltSteelBlue', 'ltYellow', 'lime',
'limeGreen', 'linen', 'magenta', 'maroon', 'medAquamarine', 'medBlue',
'medOrchid', 'medPurple', 'medSeaGreen', 'medSlateBlue',
'medSpringGreen', 'medTurquoise', 'medVioletRed', 'mediumAquamarine',
'mediumBlue', 'mediumOrchid', 'mediumPurple', 'mediumSeaGreen',
'mediumSlateBlue', 'mediumSpringGreen', 'mediumTurquoise',
'mediumVioletRed', 'midnightBlue', 'mintCream', 'mistyRose', 'moccasin',
'navajoWhite', 'navy', 'oldLace', 'olive', 'oliveDrab', 'orange',
'orangeRed', 'orchid', 'paleGoldenrod', 'paleGreen', 'paleTurquoise',
'paleVioletRed', 'papayaWhip', 'peachPuff', 'peru', 'pink', 'plum',
'powderBlue', 'purple', 'red', 'rosyBrown', 'royalBlue', 'saddleBrown',
'salmon', 'sandyBrown', 'seaGreen', 'seaShell', 'sienna', 'silver',
'skyBlue', 'slateBlue', 'slateGray', 'slateGrey', 'snow', 'springGreen',
'steelBlue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet',
'wheat', 'white', 'whiteSmoke', 'yellow', 'yellowGreen'
]
SCHEME_COLORS= ['bg1', 'tx1', 'bg2', 'tx2', 'accent1', 'accent2', 'accent3',
'accent4', 'accent5', 'accent6', 'hlink', 'folHlink', 'phClr', 'dk1', 'lt1',
'dk2', 'lt2'
]
class Transform(Serialisable):
pass
class SystemColor(Serialisable):
tagname = "sysClr"
# color transform options
tint = NestedInteger(allow_none=True)
shade = NestedInteger(allow_none=True)
comp = Typed(expected_type=Transform, allow_none=True)
inv = Typed(expected_type=Transform, allow_none=True)
gray = Typed(expected_type=Transform, allow_none=True)
alpha = NestedInteger(allow_none=True)
alphaOff = NestedInteger(allow_none=True)
alphaMod = NestedInteger(allow_none=True)
hue = NestedInteger(allow_none=True)
hueOff = NestedInteger(allow_none=True)
hueMod = NestedInteger(allow_none=True)
sat = NestedInteger(allow_none=True)
satOff = NestedInteger(allow_none=True)
satMod = NestedInteger(allow_none=True)
lum = NestedInteger(allow_none=True)
lumOff = NestedInteger(allow_none=True)
lumMod = NestedInteger(allow_none=True)
red = NestedInteger(allow_none=True)
redOff = NestedInteger(allow_none=True)
redMod = NestedInteger(allow_none=True)
green = NestedInteger(allow_none=True)
greenOff =
|
NestedInteger(allow_none=True)
greenMod = NestedInteger(allow_none=True)
blue = NestedInteger(allow_none=True)
blueOff = NestedInteger(allow_none=True)
blueMod = NestedInteger(allow_none=True)
gamma = Typed(expected_type=Transform, allow_none=True)
invGamma = Typed(expected_type=Transform, allow_none=True)
|
val = Set(values=(["bg1", "tx1", "bg2", "tx2", "accent1", "accent2",
"accent3", "accent4", "accent5", "accent6", "hlink", "folHlink", "phClr",
"dk1", "lt1", "dk2", "lt2", ]))
lastClr = Typed(expected_type=RGB, allow_none=True)
__elements__ = ('tint', 'shade', 'comp', 'inv', 'gray', "alpha",
"alphaOff", "alphaMod", "hue", "hueOff", "hueMod", "hueOff", "sat",
"satOff", "satMod", "lum", "lumOff", "lumMod", "red", "redOff", "redMod",
"green", "greenOff", "greenMod", "blue", "blueOff", "blueMod", "gamma",
"invGamma")
def __init__(self,
val="bg1",
lastClr=None,
tint=None,
shade=None,
comp=None,
inv=None,
gray=None,
alpha=None,
alphaOff=None,
alphaMod=None,
hue=None,
hueOff=None,
hueMod=None,
sat=None,
satOff=None,
satMod=None,
lum=None,
lumOff=None,
lumMod=None,
red=None,
redOff=None,
redMod=None,
green=None,
greenOff=None,
greenMod=None,
blue=None,
blueOff=None,
blueMod=None,
gamma=None,
invGamma=None
):
self.val = val
self.lastClr = lastClr
self.tint = tint
self.shade = shade
self.comp = comp
self.inv = inv
self.gray = gray
self.alpha = alpha
self.alphaOff = alphaOff
self.alphaMod = alphaMod
self.hue = hue
self.hueOff = hueOff
self.hueMod = hueMod
self.sat = sat
self.satOff = satOff
self.satMod = satMod
self.lum = lum
self.lumOff = lumOff
self.lumMod = lumMod
self.red = red
self.redOff = redOff
self.redMod = redMod
self.green = green
self.greenOff = greenOff
self.greenMod = greenMod
self.blue = blue
self.blueOff = blueOff
self.blueMod = blueMod
self.gamma = gamma
self.invGamma = invGamma
class HSLColor(Serialisable):
tagname = "hslClr"
hue = Integer()
sat = MinMax(min=0, max=100)
lum = MinMax(min=0, max=100)
#TODO add color transform options
def __init__(self,
hue=None,
sat=None,
lum=None,
):
self.hue = hue
self.sat = sat
self.lum = lum
class RGBPercent(Serialisable):
tagname = "rgbClr"
r = MinMax(min=0, max=100)
g = MinMax(min=0, max=100)
b = MinMax(min=0, max=100)
#TODO add color transform options
def __init__(self,
r=None,
g=None,
b=None,
):
self.r = r
self.g = g
self.b
|
mylokin/servy
|
servy/utils/dsntool.py
|
Python
|
mit
| 4,496
| 0.001779
|
import collections
import re
import urlparse
class DSN(collections.MutableMapping):
''' Hold the results of a parsed dsn.
This is very similar to urlparse.ParseResult tuple.
http://docs.python.org/2/library/urlparse.html#results-of-urlparse-and-urlsplit
It exposes the following attributes:
scheme
schemes -- if your scheme has +'s in it, then this will contain a list of schemes split by +
path
paths -- the path segment split by /, so "/foo/bar" would be ["foo", "bar"]
host -- same as hostname (I just like host better)
hostname
hostloc -- host:port
username
password
netloc
query -- a dict of the query string
query_str -- the raw query string
port
fragment
'''
DSN_REGEXP = re.compile(r'^\S+://\S+')
FIELDS = ('scheme', 'netloc', 'path', 'params', 'query', 'fragment')
def __init__(self, dsn, **defaults):
''' Parse a dsn to parts similar to urlparse.
This is a nuts function that can serve as a good basis to parsing a custom dsn
:param dsn: the dsn to parse
:type dsn: str
:param defaults: any values you want to have defaults for if they aren't in the dsn
:type defaults: dict
'''
assert self.DSN_REGEXP.match(dsn), \
"{} is invalid, only full dsn urls (scheme://host...) allowed".format(dsn)
first_colon = dsn.find(':')
scheme = dsn[0:first_colon]
dsn_url = dsn[first_colon+1:]
url = urlparse.urlparse(dsn_url)
options = {}
|
if url.query:
for k, kv in urlparse.parse_qs(url.
|
query, True, True).iteritems():
if len(kv) > 1:
options[k] = kv
else:
options[k] = kv[0]
self.scheme = scheme
self.hostname = url.hostname
self.path = url.path
self.params = url.params
self.query = options
self.fragment = url.fragment
self.username = url.username
self.password = url.password
self.port = url.port
self.query_str = url.query
for k, v in defaults.iteritems():
self.set_default(k, v)
def __iter__(self):
for f in self.FIELDS:
yield getattr(self, f, '')
def __len__(self):
return len(iter(self))
def __getitem__(self, field):
return getattr(self, field, None)
def __setitem__(self, field, value):
setattr(self, field, value)
def __delitem__(self, field):
delattr(self, field)
@property
def schemes(self):
'''the scheme, split by plus signs'''
return self.scheme.split('+')
@property
def netloc(self):
'''return username:password@hostname:port'''
s = ''
prefix = ''
if self.username:
s += self.username
prefix = '@'
if self.password:
s += ":{}".format(self.password)
prefix = '@'
s += "{}{}".format(prefix, self.hostloc)
return s
@property
def paths(self):
'''the path attribute split by /'''
return filter(None, self.path.split('/'))
@property
def host(self):
'''the hostname, but I like host better'''
return self.hostname
@property
def hostloc(self):
'''return host:port'''
hostloc = self.hostname
if self.port:
hostloc = '{}:{}'.format(hostloc, self.port)
return hostloc
def set_default(self, key, value):
''' Set a default value for key.
This is different than dict's setdefault because it will set default either
if the key doesn't exist, or if the value at the key evaluates to False, so
an empty string or a None will value will be updated.
:param key: the item to update
:type key: str
:param value: the items new value if key has a current value that evaluates to False
'''
if not getattr(self, key, None):
setattr(self, key, value)
def get_url(self):
'''return the dsn back into url form'''
return urlparse.urlunparse((
self.scheme,
self.netloc,
self.path,
self.params,
self.query_str,
self.fragment,
))
def copy(self):
return DSN(self.get_url())
def __str__(self):
return self.get_url()
|
Yellowen/Sharamaan
|
bin/bootstrap_creator.py
|
Python
|
gpl-2.0
| 327
| 0
|
import virtualenv
import textwrap
output = virtualenv.create_bo
|
otstrap_script(textwr
|
ap.dedent("""
import os, subprocess
def after_install(options, home_dir):
subprocess.call([join(home_dir, 'bin', 'pip'),
'install', 'ipython', 'django', 'psycopg2'])
"""))
f = open('bootstrap.py', 'w').write(output)
|
pschoenfelder/named-dates
|
tests/test_day_of_nth_weekday.py
|
Python
|
mit
| 2,862
| 0
|
import pytest
from named_dates.named_dates import\
day_of_nth_weekday, NoNthWeekdayError
# For reference throughout these tests, October 1, 2015 is
# a Thursday (weekday = 3).
def test_weekday_equals_first_of_month():
# Tests that day_of_nth_weekday works when the requested weekday is the
# first weekday is the month.
assert day_of_nth_weekday(2015, 10, 3, nth=1) == 1
assert day_of_nth_weekday(2015, 10, 3, nth=2) == 8
assert day_of_nth_weekday(2015, 10, 3, nth=3) == 15
assert day_of_nth_weekday(2015, 10, 3, nth=4) == 22
assert day_of_nth_weekday(2015, 10, 3, nth=5) == 29
with pytest.raises(NoNthWeekdayError):
day_of_nth_weekday(2015, 10, 3, nth=0)
with pytest.raises(NoNthWeekdayError):
day_of_nth_weekday(2015, 10, 3, nth=6)
def test_weekday_greater_than_first_of_month():
# Tests that day_of_nth_weekday works when the requested weekday is
# greater than the first weekday of the month.
assert day_of_nth_weekday(2015, 10, 5, nth=1) == 3
assert day_of_nth_weekday(2015, 10, 5, nth=2) == 10
assert day_of_nth_weekday(2015, 10, 5, nth=5) == 31
with pytest.raises(NoNthWeekdayError):
day_of_nth_weekday(2015, 10, 5, nth=6)
def test_weekday_less_than_first_of_month():
|
# Tests that day_of_nth_weekday works when the requested weekday is
# less than the first weekday of the month.
assert day_of_nth_weekday(2015, 10, 1, nth=1) == 6
assert day_of_nth_weekday(2015, 10, 1, nth=2) == 13
assert day_of_nth_weekday(2015, 10, 1, nth=3) == 20
assert day_of_nth_weekday(2015, 10
|
, 1, nth=4) == 27
with pytest.raises(NoNthWeekdayError):
day_of_nth_weekday(2015, 10, 1, nth=5)
def test_from_end():
# October 31 is a Saturday (day 5)
assert day_of_nth_weekday(2015, 10, 5, nth=1, from_end=True) == 31
assert day_of_nth_weekday(2015, 10, 5, nth=2, from_end=True) == 24
assert day_of_nth_weekday(2015, 10, 5, nth=5, from_end=True) == 3
with pytest.raises(NoNthWeekdayError):
assert day_of_nth_weekday(2015, 10, 5, nth=6, from_end=True)
assert day_of_nth_weekday(2015, 10, 3, nth=1, from_end=True) == 29
assert day_of_nth_weekday(2015, 10, 3, nth=2, from_end=True) == 22
assert day_of_nth_weekday(2015, 10, 3, nth=5, from_end=True) == 1
with pytest.raises(NoNthWeekdayError):
assert day_of_nth_weekday(2015, 10, 3, nth=6, from_end=True)
assert day_of_nth_weekday(2015, 10, 6, nth=1, from_end=True) == 25
assert day_of_nth_weekday(2015, 10, 6, nth=2, from_end=True) == 18
assert day_of_nth_weekday(2015, 10, 6, nth=4, from_end=True) == 4
with pytest.raises(NoNthWeekdayError):
assert day_of_nth_weekday(2015, 10, 6, nth=5, from_end=True)
def test_bad_kwargs_disallowed():
with pytest.raises(TypeError):
day_of_nth_weekday(2015, 1, 1, bad_kwarg=1)
|
eduardoedson/scp
|
usuarios/forms.py
|
Python
|
mit
| 8,823
| 0
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Fieldset, Layout
from django import forms
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth.models import User
from django.contrib.auth.password_validation import validate_password
from django.core.exceptions import ValidationError
from django.db import transaction
from django.forms import ModelForm
from django.utils.translation import ugettext_lazy as _
from django_filters import FilterSet
from easy_select2 import Select2
from crispy_layout_mixin import form_actions, to_row
from utils import (TIPO_TELEFONE, YES_NO_CHOICES, get_medicos,
get_or_create_grupo)
from .models import Especialidade, EspecialidadeMedico, Usuario
class EspecialidadeMedicoFilterSet(FilterSet):
class Meta:
model = EspecialidadeMedico
fields = ['especialidade']
def __init__(self, *args, **kwargs):
super(EspecialidadeMedicoFilterSet, self).__init__(*args, **kwargs)
row1 = to_row([('especialidade', 12)])
self.form.helper = FormHelper()
self.form.helper.form_method = 'GET'
self.form.helper.layout = Layout(
Fieldset(_('Pesquisar Médico'),
row1, form_actions(save_label='Filtrar'))
)
class MudarSenhaForm(forms.Form):
nova_senha = forms.CharField(
label="Nova Senha", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control form-control-lg',
'name': 'senha',
'placeholder': 'Nova Senha'}))
confirmar_senha = forms.CharField(
label="Confirmar Senha", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control form-control-lg',
'name': 'confirmar_senha',
'placeholder': 'Confirmar Senha'}))
class LoginForm(AuthenticationForm):
username = forms.CharField(
label="Username", max_length=30,
widget=forms.TextInput(
attrs={'class': 'form-control form-control-lg',
'name': 'username',
'placeholder': 'Usuário'}))
password = forms.CharField(
label="Password", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control',
'name': 'password',
'placeholder': 'Senha'}))
class UsuarioForm(ModelForm):
# Usuário
password = forms.CharField(
max_length=20,
label=_('Senha'),
widget=forms.PasswordInput())
password_confirm = forms.CharField(
max_length=20,
label=_('Confirmar Senha'),
widget=forms.PasswordInput())
class Meta:
model = Usuario
fields = ['username', 'email', 'nome', 'password', 'password_confirm',
'data_nascimento', 'sexo', 'plano', 'tipo', 'cep', 'end',
'numero', 'complemento', 'bairro', 'referencia',
'primeiro_telefone', 'segundo_telefone']
widgets = {'email': forms.TextInput(
attrs={'style': 'text-transform:lowercase;'})}
def __init__(self, *args, **kwargs):
super(UsuarioForm, self).__init__(*args, **kwargs)
self.fields['primeiro_telefone'].widget.attrs['class'] = 'telefone'
self.fields['segundo_telefone'].widget.attrs['class'] = 'telefone'
def valida_igualdade(self, texto1, texto2, msg):
if texto1 != texto2:
raise ValidationError(msg)
return True
def clean(self):
if ('password' not in self.cleaned_data or
'password_confirm' not in self.cleaned_data):
raise ValidationError(_('Favor informar senhas atuais ou novas'))
msg = _('As senhas não conferem.')
self.valida_igualdade(
self.cleaned_data['password'],
self.cleaned_data['password_confirm'],
msg)
try:
validate_password(self.cleaned_data['password'])
except ValidationError as error:
raise ValidationError(error)
return self.cleaned_data
@transaction.atomic
def save(self, commit=False):
usuario = super(UsuarioForm, self).save(commit)
# Cria User
u = User.objects.create(us
|
ername=usuario.username, email=usuario.email)
u.set_password(self.cleaned_data['password'])
u.is_active = True
u.groups.add(get_or_create_g
|
rupo(self.cleaned_data['tipo'].descricao))
u.save()
usuario.user = u
usuario.save()
return usuario
class UsuarioEditForm(ModelForm):
# Primeiro Telefone
primeiro_tipo = forms.ChoiceField(
widget=forms.Select(),
choices=TIPO_TELEFONE,
label=_('Tipo Telefone'))
primeiro_ddd = forms.CharField(max_length=2, label=_('DDD'))
primeiro_numero = forms.CharField(max_length=10, label=_('Número'))
primeiro_principal = forms.TypedChoiceField(
widget=forms.Select(),
label=_('Telefone Principal?'),
choices=YES_NO_CHOICES)
# Primeiro Telefone
segundo_tipo = forms.ChoiceField(
required=False,
widget=forms.Select(),
choices=TIPO_TELEFONE,
label=_('Tipo Telefone'))
segundo_ddd = forms.CharField(required=False, max_length=2, label=_('DDD'))
segundo_numero = forms.CharField(
required=False, max_length=10, label=_('Número'))
segundo_principal = forms.ChoiceField(
required=False,
widget=forms.Select(),
label=_('Telefone Principal?'),
choices=YES_NO_CHOICES)
class Meta:
model = Usuario
fields = ['username', 'email', 'nome', 'data_nascimento', 'sexo',
'plano', 'tipo', 'cep', 'end', 'numero', 'complemento',
'bairro', 'referencia', 'primeiro_telefone',
'segundo_telefone']
widgets = {'username': forms.TextInput(attrs={'readonly': 'readonly'}),
'email': forms.TextInput(
attrs={'style': 'text-transform:lowercase;'}),
}
def __init__(self, *args, **kwargs):
super(UsuarioEditForm, self).__init__(*args, **kwargs)
self.fields['primeiro_telefone'].widget.attrs['class'] = 'telefone'
self.fields['segundo_telefone'].widget.attrs['class'] = 'telefone'
def valida_igualdade(self, texto1, texto2, msg):
if texto1 != texto2:
raise ValidationError(msg)
return True
def clean_primeiro_numero(self):
cleaned_data = self.cleaned_data
telefone = Telefone()
telefone.tipo = self.data['primeiro_tipo']
telefone.ddd = self.data['primeiro_ddd']
telefone.numero = self.data['primeiro_numero']
telefone.principal = self.data['primeiro_principal']
cleaned_data['primeiro_telefone'] = telefone
return cleaned_data
def clean_segundo_numero(self):
cleaned_data = self.cleaned_data
telefone = Telefone()
telefone.tipo = self.data['segundo_tipo']
telefone.ddd = self.data['segundo_ddd']
telefone.numero = self.data['segundo_numero']
telefone.principal = self.data['segundo_principal']
cleaned_data['segundo_telefone'] = telefone
return cleaned_data
@transaction.atomic
def save(self, commit=False):
usuario = super(UsuarioEditForm, self).save(commit)
# Primeiro telefone
tel = usuario.primeiro_telefone
tel.tipo = self.data['primeiro_tipo']
tel.ddd = self.data['primeiro_ddd']
tel.numero = self.data['primeiro_numero']
tel.principal = self.data['primeiro_principal']
tel.save()
usuario.primeiro_telefone = tel
# Segundo telefone
tel = usuario.segundo_telefone
if tel:
tel.tipo = self.data['segundo_tipo']
tel.ddd = self.data['segundo_ddd']
tel.numero = self.data['segundo_numero']
tel.principal = self.data['segundo_principal']
tel.save()
usuario.segundo_telefone = tel
# User
u = usuario.us
|
asascience-open/chisp1_wps
|
wps/models.py
|
Python
|
gpl-3.0
| 3,153
| 0.012369
|
from django.db import models
# Create your models here.
class Server(models.Model):
# Server
title = models.CharField(max_length=1000, help_text="Server Title", blank=False)
abstract = models.CharField(max_length=2000, help_text="Server Abstract", blank=True)
keywords = models.CharField(max_length=2000, help_text="Comma Separated List of Keywords", blank=True)
# Contact
contact_person = models.CharField(max_length=1000, help_text="Person to Contact", blank=True)
contact_organization = models.CharField(max_length=1000, help_text="Contact Organization", blank=True)
contact_position = models.CharField(max_length=1000, help_text="Contact Position (Optional)", blank=True)
contact_street_address = models.CharField(max_length=1000, help_text="Street Address (Optional)", blank=True)
contact_city_address = models.CharField(max_length=1000, help_text="Address: City (Optional)", blank=True)
contact_state_address = models.CharField(max_length=1000, help_text="Address: State or Providence (Optional)", blank=True)
contact_code_address = models.CharField(max_length=1000, help_text="Address: Postal Code (Optional)", blank=True)
contact_country_address = models.CharField(max_length=1000, help_text="Address: Country (Optional)", blank=True)
contact_telephone = models.CharField(max_length=1000, help_text="Contact Telephone Number (Optional)", blank=True)
contact_email = models.CharField(max_length=1000, help_text="Contact Email Address", blank=True)
contact_site = models.CharField(max_length=1000, help_text="Contact Web Site", blank=True)
# This implementation
implementation_site = models.CharField(max_length=1000, help_text="Web Address for This Implementation", blank=False)
def __unicode__(self):
return self.implementation_site
# Add other implementation specific classes here
class StreamGauge(models.Model):
river_segment_id = models.CharField(max_length=1000, help_text="NHN River Segment ID for both US and Canadian River Reaches", blank=False)
sos_endpoint = models.CharField(max_length=1000, help_text="SOS Endpoint for this Stream Gauge and ID", blank=True)
stream_gauge_id = models.CharField(max_length=1000, help_text="Stream gauge ID that corresponds to the station in the SOS endpoint", blank=False, unique=True)
stream_gauge_name = models.CharField(max_length=1000, help_text="Stream gauge name", blank=True)
stream_gauge_offerings = models.CharField(max_length=10000, help_text="Comma separated list of o
|
fferings for this station through SOS endpoint", blank=True)
stream_gauge_parameters = models.CharField(max_length=50000, help_text="Comma separated list of observedProperty parameters for this station through SOS endpoint", blank=True)
stream_gauge_x = models.DecimalField(help_text="Longitude or X coodinate", blank=True, max_digits=20, decimal_places=8)
stream_gauge_y = models.DecimalField(help
|
_text="Latitude or Y coordinate", blank=True, max_digits=20, decimal_places=8)
def __unicode__(self):
return self.stream_gauge_id
|
ResearchSoftwareInstitute/MyHPOM
|
hs_modflow_modelinstance/forms.py
|
Python
|
bsd-3-clause
| 27,274
| 0.003886
|
from django.forms import ModelForm
from django import forms
from crispy_forms import layout
from crispy_forms.layout import Layout, HTML
from hs_core.forms import BaseFormHelper, Helper
from hs_core.hydroshare import users
from hs_modelinstance.models import ModelOutput, ExecutedBy
from hs_modflow_modelinstance.models import StudyArea, GridDimensions, StressPeriod, \
GroundWaterFlow, BoundaryCondition, ModelCalibration, ModelInput, GeneralElements
class MetadataField(layout.Field):
def __init__(self, *args, **kwargs):
kwargs['css_class'] = 'form-control input-sm'
super(MetadataField, self).__init__(*args, **kwargs)
# ModelOutput element forms
class ModelOutputFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('includes_output'),
)
kwargs['element_name_label'] = 'Includes output files?'
super(ModelOutputFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class ModelOutputForm(ModelForm):
includes_output = forms.TypedChoiceField(choices=((True, 'Yes'), (False, 'No')),
widget=forms.RadioSelect(
attrs={'style': 'width:auto;margin-top:-5px'}))
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(ModelOutputForm, self).__init__(*args, **kwargs)
self.helper = ModelOutputFormHelper(allow_edit, res_short_id, element_id,
element_name='ModelOutput')
class Meta:
model = ModelOutput
fields = ('includes_output',)
class ModelOutputValidationForm(forms.Form):
includes_output = forms.TypedChoiceField(choices=((True, 'Yes'), (False, 'No')), required=False)
def clean_includes_output(self):
data = self.cleaned_data['includes_output']
if data == u'False':
return False
else:
return True
# ExecutedBy element forms
class ExecutedByFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# pop the model program shortid out of the kwargs dictionary
mp_id = kwargs.pop('mpshortid')
# get all model program resources and build option HTML elements for each one.
# ModelProgram shortid is concatenated to the selectbox id so that it is accessible in the
# template.
mp_resource = users.get_resource_list(type=['ModelProgramResource'])
options = '\n'.join(['<option value=%s>%s</option>' % (r.short_id, r.title) for r in
mp_resource])
options = '<option value=Unspecified>Unspecified</option>' + options
selectbox = HTML('<div class="div-selectbox">'
' <select class="selectbox" id="selectbox_'+mp_id+'">' + options +
'</select>'
'</div><br>')
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('model_name', style="display:none"),
selectbox,
HTML("""
<div id=program_details_div style="display:none">
<table id="program_details_table" class="modelprogram">
<tr><td>Description: </td><td></td></tr>
<tr><td>Release Date: </td><td></td></tr>
<tr><td>Version: </td><td></td></tr>
<tr><td>Language: </td><td></td></tr>
<tr><td>Operating
|
System: </td><td></td></tr>
<tr><td>Url: </td><td></td></tr>
</table>
</div>
"""),
)
kwargs['element_name_label'] = 'Model Program used for execution'
super(ExecutedByFormHelper, self).__init__(allow_edit, res_short_id, element_id
|
,
element_name, layout, *args, **kwargs)
class ExecutedByForm(ModelForm):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(ExecutedByForm, self).__init__(*args, **kwargs)
# set mpshort id to 'Unspecified' if a foreign key has not been established yet,
# otherwise use mp short id
mpshortid = 'Unspecified'
if self.instance.model_program_fk is not None:
mpshortid = self.instance.model_program_fk.short_id
kwargs = dict(mpshortid=mpshortid)
self.helper = ExecutedByFormHelper(allow_edit, res_short_id, element_id,
element_name='ExecutedBy', **kwargs)
class Meta:
model = ExecutedBy
exclude = ('content_object', 'model_program_fk',)
class ExecutedByValidationForm(forms.Form):
model_name = forms.CharField(max_length=200)
# StudyArea element forms
class StudyAreaFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('totalLength'),
MetadataField('totalWidth'),
MetadataField('maximumElevation'),
MetadataField('minimumElevation'),
)
kwargs['element_name_label'] = 'Study Area'
super(StudyAreaFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class StudyAreaForm(ModelForm):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(StudyAreaForm, self).__init__(*args, **kwargs)
self.helper = StudyAreaFormHelper(allow_edit, res_short_id, element_id,
element_name='StudyArea')
class Meta:
model = StudyArea
fields = ('totalLength',
'totalWidth',
'maximumElevation',
'minimumElevation',
)
class StudyAreaValidationForm(forms.Form):
totalLength = forms.CharField(max_length=100, required=False)
totalWidth = forms.CharField(max_length=100, required=False)
maximumElevation = forms.CharField(max_length=100, required=False)
minimumElevation = forms.CharField(max_length=100, required=False)
# GridDimensions element forms
class GridDimensionsFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('numberOfLayers'),
MetadataField('typeOfRows'),
MetadataField('numberOfRows'),
MetadataField('typeOfColumns'),
MetadataField('numberOfColumns'),
)
kwargs['element_name_label'] = 'Grid Dimensions'
super(GridDimensionsFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class GridDimensionsForm(ModelForm):
grid_type_choices = (('Choose a type', 'Choose a type'),) + GridDimensions.gr
|
JonasWallin/BayesFlow
|
examples/article1/article_simulated_estimate_mpi.py
|
Python
|
gpl-2.0
| 8,046
| 0.048223
|
'''
run with ex: mpiexec -n 10 python article_simulated_estimate_
|
mpi.py
Created on Jul 11, 2014
@author: jonaswallin
'''
from __future__ import division
import time
import scipy.spatial as ss
import article_simulatedata
from mpi4py import MPI
import numpy as np
import BayesFlow as bm
import matplotlib
import matplotlib.pyplot as plt
import numpy.random as npr
import BayesFlow.plot as bm_plot
import matplotlib.ti
|
cker as ticker
from article_plotfunctions import plotQ_joint, plotQ, plot_theta
folderFigs = "/Users/jonaswallin/Dropbox/articles/FlowCap/figs/"
sim = 10**2
nCells = 1500
thin = 2
nPers = 80
save_fig = 0
Y = []
####
# COLLECTING THE DATA
####
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
Y,act_komp, mus, Thetas, Sigmas, P = np.array(article_simulatedata.simulate_data_v1(nCells = nCells, nPersons = nPers))
else:
Y = None
act_komp = None
#npr.seed(123546)
####
# Setting up model
####
hGMM = bm.hierarical_mixture_mpi(K = 4)
hGMM.set_data(Y)
hGMM.set_prior_param0()
hGMM.update_GMM()
hGMM.update_prior()
hGMM.set_p_labelswitch(1.)
hGMM.set_prior_actiavation(10)
hGMM.set_nu_MH_param(10,200)
for i,GMM in enumerate(hGMM.GMMs):
GMM._label =i
for i in range(min(sim,2000)):
hGMM.sample()
np.set_printoptions(precision=3)
#hGMM.reset_prior()
bm.distance_sort_MPI(hGMM)
hGMM.set_p_activation([0.7,0.7])
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
theta_sim = []
Q_sim = []
nu_sim = []
Y_sim = []
Y0_sim = []
##############
# MCMC PART
##############
##############
# BURN IN
##############
for i in range(min(np.int(np.ceil(0.1*sim)),8000)):#burn in
hGMM.sample()
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
mus_vec = np.zeros((len(Y), hGMM.K, hGMM.d))
actkomp_vec = np.zeros((len(Y), hGMM.K))
count = 0
hGMM.set_p_labelswitch(.4)
for i in range(sim):#
# sampling the thining
for k in range(thin):
# simulating
hGMM.sample()
##
# since label switching affects the posterior of mu, and active_komp
# it needs to be estimated each time
##
labels = hGMM.get_labelswitches()
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
for j in range(labels.shape[0]):
if labels[j,0] != -1:
mus_vec[j,labels[j,0],:], mus_vec[j,labels[j,1],:] = mus_vec[j,labels[j,1],:], mus_vec[j,labels[j,0],:]
actkomp_vec[j,labels[j,0]], actkomp_vec[j,labels[j,1]] = actkomp_vec[j,labels[j,1]], actkomp_vec[j,labels[j,0]]
###################
# storing data
# for post analysis
###################
mus_ = hGMM.get_mus()
thetas = hGMM.get_thetas()
Qs = hGMM.get_Qs()
nus = hGMM.get_nus()
if sim - i < nCells * nPers:
Y_sample = hGMM.sampleY()
active_komp = hGMM.get_activekompontent()
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
print "iter =%d"%i
count += 1
mus_vec += mus_
actkomp_vec += active_komp
theta_sim.append(thetas)
Q_sim.append(Qs/(nus.reshape(nus.shape[0],1,1)- Qs.shape[1]-1) )
nu_sim.append(nus)
# storing the samples equal to number to the first indiviual
if sim - i < nCells:
Y0_sim.append(hGMM.GMMs[0].simulate_one_obs().reshape(3))
Y_sim.append(Y_sample)
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
actkomp_vec /= count
mus_vec /= count
mus_ = mus_vec
hGMM.save_to_file("/Users/jonaswallin/Dropbox/temp/")
##
# fixing ploting options
##
matplotlib.rcParams['ps.useafm'] = True
matplotlib.rcParams['pdf.use14corefonts'] = True
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble']=[r"\usepackage{amsmath}"]
#hGMM.plot_GMM_scatter_all([0, 1])
mus_colors = ['r','b','k','m']
f, ax = hGMM.plot_mus([0,1,2], colors =mus_colors, size_point = 5 )
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
######################
#ordering mus
mus_true_mean = []
mus_mean = []
for k in range(hGMM.K):
mus_true_mean.append(np.array(np.ma.masked_invalid(mus[:,k,:]).mean(0)))
mus_mean.append(np.array(np.ma.masked_invalid(mus_[:,k,:].T).mean(0)))
mus_true_mean = np.array(mus_true_mean)
mus_mean = np.array(mus_mean)
ss_mat = ss.distance.cdist( mus_true_mean, mus_mean, "euclidean")
#print ss_mat
col_index = []
for k in range(hGMM.K):
col_index.append( np.argmin(ss_mat[k,:]))
#print col_index
#####################
######################
theta_sim = np.array(theta_sim)
Q_sim = np.array(Q_sim)
nu_sim = np.array(nu_sim)
np.set_printoptions(precision=2)
perc_theta = []
perc_Q_vec = []
for k in range(hGMM.K):
perc_ = np.percentile(theta_sim[:,col_index[k],:] - Thetas[k],[2.5,50,97.5],axis=0)
perc_theta.append(np.array(perc_).T)
#print "%d & %s & %s & %s & \\hline" %(k, np.mean(theta_sim[:,col_index[k],:],0) - Thetas[k],perc_[0],perc_[1])
perc_Q = np.percentile(Q_sim[:,col_index[k],:] - Sigmas[k],[2.5,50,97.5],axis=0)
#print "Q = %s"%(np.mean(Q_sim[:,col_index[k],:],0))
perc_Q_vec.append(perc_Q)
theta_string = ""
Q_string = ""
theta_diff = np.mean(theta_sim[:,col_index[k],:],0) - Thetas[k]
Q_diff = np.mean(Q_sim[:,col_index[k],:] - Sigmas[k] ,0)
for d in range(hGMM.d):
theta_string += " %.2f (%.2f, %.2f) &"%(perc_[1][d], perc_[0][d], perc_[2][d])
for dd in range(hGMM.d):
Q_string += " %.3f (%.3f, %.3f) &"%(perc_Q[1][d,dd],perc_Q[0][d,dd],perc_Q[2][d,dd] )
Q_string = Q_string[:-1]
Q_string +="\\\ \n"
theta_string = theta_string[:-1]
print "theta[%d]= \n%s\n"%(k,theta_string)
print "Q[%d]= \n%s "%(k,Q_string)
perc_nu = np.percentile(nu_sim[:,col_index[k]] - 100,[2.5,50,97.5],axis=0)
print "nu = %.2f (%d, %d)"%(perc_nu[1],perc_nu[0],perc_nu[2])
Y_sim = np.array(Y_sim)
Y0_sim = np.array(Y0_sim)
for k in range(hGMM.K):
k_ = np.where(np.array(col_index)==k)[0][0]
print("k_ == %s"%k_)
mu_k = mus[:,k_,:].T
#print actkomp_vec[:,col_index[k]]
index = np.isnan(mu_k[:,0])==False
ax.scatter(mu_k[index,0],mu_k[index,1],mu_k[index,2], s=50, edgecolor=mus_colors[k],facecolors='none')
ax.view_init(48,22)
fig_nu = plt.figure(figsize=(6,0.5))
ax_nu = fig_nu.add_subplot(111)
for k in range(hGMM.K):
ax_nu.plot(nu_sim[:,col_index[k]])
f_histY = bm_plot.histnd(Y_sim, 50, [0, 100], [0,100])
f_histY0 = bm_plot.histnd(Y0_sim, 50, [0, 100], [0,100])
f_theta = plot_theta(np.array(perc_theta))
figs_Q = plotQ(perc_Q_vec)
fig_Q_joint = plotQ_joint(perc_Q_vec)
np.set_printoptions(precision=4, suppress=True)
for i, GMM in enumerate(hGMM.GMMs):
#print("p[%d,%d] = %s"%(hGMM.comm.Get_rank(),i,GMM.p))
hGMM.comm.Barrier()
if MPI.COMM_WORLD.Get_rank() == 0 and save_fig: # @UndefinedVariable
print col_index
fig_nu.savefig(folderFigs + "nus_simulated.eps", type="eps",transparent=True,bbox_inches='tight')
fig_nu.savefig(folderFigs + "nus_simulated.pdf", type="pdf",transparent=True,bbox_inches='tight')
f.savefig(folderFigs + "dcluster_centers_simulated.eps", type="eps",transparent=True,bbox_inches='tight')
f.savefig(folderFigs + "dcluster_centers_simulated.pdf", type="pdf",transparent=True,bbox_inches='tight')
f_histY.savefig(folderFigs + "hist2d_simulated.eps", type="eps",bbox_inches='tight')
f_histY.savefig(folderFigs + "hist2d_simulated.pdf", type="pdf",bbox_inches='tight')
f_histY0.savefig(folderFigs + "hist2d_indv_simulated.eps", type="eps",bbox_inches='tight')
f_histY0.savefig(folderFigs + "hist2d_indv_simulated.pdf", type="pdf",bbox_inches='tight')
f_theta.savefig(folderFigs + "theta_simulated.pdf", type="pdf",transparent=True,bbox_inches='tight')
f_theta.savefig(folderFigs + "theta_simulated.eps", type="eps",transparent=True,bbox_inches='tight')
fig_Q_joint.savefig(folderFigs + "Qjoint_simulated.pdf", type="pdf",transparent=True,bbox_inches='tight')
fig_Q_joint.savefig(folderFigs + "Qjoint_simulated.eps", type="eps",transparent=True,bbox_inches='tight')
for i,f_Q in enumerate(figs_Q):
f_Q.savefig(folderFigs + "Q%d_simulated.pdf"%(i+1), type="pdf",transparent=True,bbox_inches='tight')
f_Q.savefig(folderFigs + "Q%d_simulated.eps"%(i+1), type="eps",transparent=True,bbox_inches='tight')
else:
plt.show()
|
tst-ahernandez/earthenterprise
|
earth_enterprise/src/fusion/portableglobe/cutter/cgi-bin/common/utils.py
|
Python
|
apache-2.0
| 12,945
| 0.01151
|
#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for support of portable globes."""
import json
import os
import shlex
import subprocess
import sys
import time
import urlparse
import xml.sax.saxutils as saxutils
import distutils.dir_util
import distutils.errors
import errors
BYTES_PER_MEGABYTE = 1024.0 * 1024.0
NAME_TEMPLATE = "%s_%s"
class OsCommandError(Exception):
"""Thrown if os command fails."""
pass
# TODO: consider to use a lib like bleach that is specifically
# aimed at foiling XSS attacks.
# Additional characters that need to be escaped for HTML defined in a dictionary
# the character to its escape string.
# xml.sax.saxutils.escape() takes care of &, < and >.
_HTML_ESCAPE_TABLE = {
'"': """,
"'": "'",
"`": "`",
"|": "|"
}
def HtmlEscape(text):
"""Escapes a string for HTML.
Args:
text: source string that needs to be escaped for HTML.
Returns:
HTML escaped string.
"""
if not text:
return text
return saxutils.escape(text, _HTML_ESCAPE_TABLE)
def FileSize(file_path):
"""Returns size of file in megabytes."""
return os.path.getsize(file_path) / BYTES_PER_MEGABYTE
def SizeAsString(size):
"""Converts megabyte float to a string."""
if size < 1000.0:
return "%0.2fMB" % size
size /= 1024.0
if size < 1000.0:
return "%0.2fGB" % size
else:
return "%0.2fTB" % (size / 1024.0)
def FileSizeAsString(file_path):
"""Returns size of file as a string."""
return SizeAsString(FileSize(file_path))
def DirectorySize(directory):
"""Returns size of directory in megabytes."""
directory_size = 0
if os.path.isdir(directory):
for (path, unused_dirs, files) in os.walk(directory):
for file_name in files:
file_path = os.path.join(path, file_name)
directory_size += os.path.getsize(file_path)
return directory_size / BYTES_PER_MEGABYTE
def DirectorySizeAsString(directory):
"""Returns size of directory as a string."""
return SizeAsString(DirectorySize(directory))
def CreateDirectory(directory):
"""Create entire directory path."""
if os.path.exists(directory):
return
try:
os.makedirs(directory)
except OSError:
PrintAndLog("Raising error: Cannot create directory \'%s\'" % directory)
raise
def CopyDirectory(source, destination, logger):
"""Copy from source to destination, which will be created if it does not exist."""
cmd = "Copying %s to %s" % (source, destination)
PrintAndLog(cmd, logger)
try:
distutils.dir_util.copy_tree(source, destination)
except distutils.errors.DistutilsFileError:
PrintAndLog("Raising error: Cannot copy to directory %s" % destination)
raise
def DiskSpace(path):
"""Returns remaining disk space in Megabytes."""
mount_info = os.statvfs(path)
return mount_info.f_bsize * mount_in
|
fo.f_bavail / BYTES_PER_MEGABYTE
def Uid():
"""Returns a uid for identifying a globe building sequence."""
return "%d_%f" % (os.getpid(), time.time())
def GlobesToText(globes, template, sort_item, reverse=False, is_text=False):
"""Fills in globe template for each globe and returns as array of strings."""
result = []
# If it is text,
|
sort the lower case version of the text.
if is_text:
items = sorted(globes.iteritems(),
key=lambda globe_pair: globe_pair[1][sort_item].lower(),
reverse=reverse)
# If it is NOT text, use default less than comparison.
else:
items = sorted(globes.iteritems(),
key=lambda globe_pair: globe_pair[1][sort_item],
reverse=reverse)
for [unused_key, globe] in iter(items):
next_entry = template
for [globe_term, globe_value] in globe.iteritems():
replace_item = "[$%s]" % globe_term.upper()
if globe_term == "globe" or globe_term == "info_loaded":
pass
elif globe_term == "size":
next_entry = next_entry.replace(replace_item, SizeAsString(globe_value))
else:
next_entry = next_entry.replace(replace_item, globe_value)
result.append(next_entry)
return result
def GlobeNameReplaceParams(globe_name):
"""Returns a single replacement parameter for the globe name."""
return {"[$GLOBE_NAME]": globe_name}
def ReplaceParams(text, replace_params):
"""Replace keys with values in the given text."""
for (key, value) in replace_params.iteritems():
text = text.replace(key, value)
return text
def OutputFile(file_name, replace_params):
"""Outputs a file to standard out with the globe name replaced."""
fp = open(file_name)
text = fp.read()
fp.close()
print ReplaceParams(text, replace_params)
def CreateInfoFile(path, description):
"""Create globe info file."""
content = "Portable Globe\n"
content += GmTimeStamp()
content += "\n%s" % TimeStamp()
content += "Globe description: %s\n" % description
CreateFile(path, content)
def CreateFile(path, content):
"""Create globe info file."""
try:
fp = open(path, "w")
fp.write(content)
fp.close()
except IOError as error:
print error
sys.exit(1)
def TimeStamp():
"""Create timestamp based on local time."""
return time.strftime("%Y-%m-%d %H:%M:%S\n", time.localtime())
def GmTimeStamp():
"""Create timestamp based on Greenwich Mean Time."""
return time.strftime("%Y-%m-%d %H:%M:%S GMT\n", time.gmtime())
def ConvertToQtNode(level, col, row):
"""Converts col, row, and level to corresponding qtnode string."""
qtnode = "0"
half_ndim = 1 << (level - 1)
for unused_ in xrange(level):
if row >= half_ndim and col < half_ndim:
qtnode += "0"
row -= half_ndim
elif row >= half_ndim and col >= half_ndim:
qtnode += "1"
row -= half_ndim
col -= half_ndim
elif row < half_ndim and col >= half_ndim:
qtnode += "2"
col -= half_ndim
else:
qtnode += "3"
half_ndim >>= 1
return qtnode
def JsBoolString(bool_value):
"""Write boolean value as javascript boolean."""
if bool_value:
return "true"
else:
return "false"
def WriteHeader(content_type="text/html"):
"""Output header for web page."""
# Pick up one print from the Python print.
print "Content-Type: %s\n" % content_type
def ExecuteCmd(os_cmd, logger, dry_run=False):
"""Execute os command and log results.
Runs command, waits until it finishes, then analyses the return code, and
reports either "SUCCESS" or "FAILED".
Use if output of command is not desired, otherwise it should be redirected
to a file or use RunCmd below.
Args:
os_cmd: Linux shell command to execute.
logger: Logger responsible for outputting log messages.
dry_run: Whether command should only be printed but not run.
Throws:
OsCommandError
"""
PrintAndLog("Executing: %s" % os_cmd, logger)
if dry_run:
PrintAndLog("-- dry run --", logger)
return
try:
if isinstance(os_cmd, str):
os_cmd = shlex.split(os_cmd)
p = subprocess.Popen(os_cmd, shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
err_data = p.communicate()[1]
return_code = p.returncode
if return_code != 0:
PrintAndLog("Raising error: %s (return code %d)\n"
% (err_data, return_code), logger)
raise OsCommandError()
else:
PrintAndLog("SUCCESS", logger, None)
except Exception, e:
PrintAndLog("FAILED: %s" % e.__str__(), logger)
raise OsCommandError()
def ExecuteCmdInBackground(os_cmd, logger):
"""Execute os command in the background and log results.
Runs command in the background and returns immediately w
|
EmreAtes/spack
|
var/spack/repos/builtin/packages/editres/package.py
|
Python
|
lgpl-2.1
| 1,739
| 0.000575
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
|
Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Editres(AutotoolsPackage):
"""Dynamic resource editor for X Toolkit applications."""
homepage = "http://cgit.freedesktop.org/xorg/app/editres"
url = "https://www.x.org/archive/
|
individual/app/editres-1.0.6.tar.gz"
version('1.0.6', '310c504347ca499874593ac96e935353')
depends_on('libxaw')
depends_on('libx11')
depends_on('libxt')
depends_on('libxmu')
depends_on('pkgconfig', type='build')
depends_on('util-macros', type='build')
|
shu-mutou/pecan-swagger
|
tests/test_utils.py
|
Python
|
bsd-3-clause
| 10,382
| 0.000096
|
import unittest
from pecan_swagger import utils
class TestUtils(unittest.TestCase):
def test_swagger_build(self):
from .resources import example_app
expected = {
"swagger": "2.0",
"info": {
"version": "1.0",
"title": "example_app"
},
"produces": [],
"consumes": [],
"paths": {
"/api": {
"get": {}
},
"/messages": {
"get": {},
"post": {}
},
"/profile": {
"get": {},
"post": {}
},
"/profile/image": {
"get": {},
"post": {}
},
"/profile/stats": {
"get": {}
}
}
}
actual = utils.swagger_build('example_app', '1.0')
self.assertDictEqual(expected, actual)
def test_swagger_build_wsme(self):
from .resources import example_wsme_app
expected = \
{
"consumes": [],
"info": {
"title": "example_wsme_app",
"version": "1.0"
},
"paths": {
"/api": {
"get": {}
},
"/messages": {
"get": {},
"post": {}
},
"/profile": {
"get": {},
"post": {}
},
"/profile/image": {
"get": {},
"post": {}
},
"/profile/stats": {
"get": {}
},
"/wsmemessages": {
"get": {
"description": "",
"parameters": [],
"responses": {
200: {
"description": "",
"schema": {
"items": {
"items": {
"properties": {
"id": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message_from": {
"enum": [
'1.OSOMATSU',
'2.
|
KARAMATSU',
'3.CHOROMATSU',
'4.ICH
|
IMATSU',
'5.JUSHIMATSU',
'6.TODOMATSU'
],
"type": "string"
},
"message_size": {
"minimum": 1,
"type": "integer"
}
}
},
"type": "object"
},
"type": "array"
}
}
}
},
"post": {
"description": "",
"parameters": [
{
"in": "query",
"name": "message",
"required": True,
"type": "string"
}
],
"responses": {
201: {
"description": "",
"schema": {
"items": {
"properties": {
"id": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message_from": {
"enum": [
'1.OSOMATSU',
'2.KARAMATSU',
'3.CHOROMATSU',
'4.ICHIMATSU',
'5.JUSHIMATSU',
'6.TODOMATSU'
],
"type": "string"
},
"message_size": {
"minimum": 1,
"type": "integer"
}
}
},
"type": "object"
}
}
}
}
},
"/wsmemessages/<specifier>": {
"delete": {
"description": "",
"parameters": [
{
"in": "query",
"name": "id",
"required": True,
"type": "string"
}
],
"responses": {
204: {
"description": ""
}
}
},
"get": {
"description": "",
"parameters": [
{
"in": "query",
"name": "id",
"required": True,
"type": "string"
}
],
"responses": {
200: {
"description": "",
"schema": {
"items": {
"properties": {
"id": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message_from": {
"enum": [
'1.OSOMATSU',
'2.KARAMATSU',
'3.CHOROMATSU',
'4.ICHIMATSU',
'5.JUSHIMATSU',
'6.TODOMATSU'
],
"type": "string"
},
"message_size": {
"minimum": 1,
"type": "integer"
}
}
},
"type": "object"
}
}
}
}
},
"/wsmemessages/detail": {
"get": {
"description": "",
|
bitmotive/flask-boilerplate
|
tests/test_basics.py
|
Python
|
mit
| 705
| 0
|
import unittest
from flask import current_app
from app import create_app, db
class BasicsTestCase(unittest.Tes
|
tCase):
# Runs before each test
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
# Runs after each test
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
# Make sure the app exists
def test_app_exists(self):
self.assertFalse(current_app is None)
# Mak
|
e sure the app is running with TESTING config
def test_app_is_testing(self):
self.assertTrue(current_app.config['TESTING'])
|
ingadhoc/odoo-law-tracking
|
law_tracking_x/commission_treatment.py
|
Python
|
agpl-3.0
| 4,111
| 0.006568
|
# -*- coding: utf-8 -*-
#######
|
#######################################################################
#
# Law Follow Up
# Copyright (C) 2013 Sistemas ADHOC
# No email
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed i
|
n the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re
from openerp import netsvc
from openerp.osv import osv, fields
from openerp.tools.translate import _
class commission_treatment(osv.osv):
"""Commission Treatment"""
_inherit = 'law_tracking.commission_treatment'
def _get_name(self, cr, uid, ids, field_names, arg, context=None):
if context is None:
context = {}
if isinstance(ids, (int, long)):
ids = [ids]
res = {}
for data in self.browse(cr, uid, ids, context=context):
chamber = ''
if data.partner_id.chamber == 'deputies':
chamber = _('Deputies')
else:
chamber = _('Senators')
if data.law_project_id:
res[data.id] = data.law_project_id.name + ' - ' + chamber + ' - ' + data.partner_id.name
# elif data.sen_law_project_id:
# res[data.id] = data.sen_law_project_id.name + ' - ' + data.partner_id.chamber + ' - ' + data.partner_id.name
else:
res[data.id] = ''
return res
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
ids = set()
if name:
ids.update(self.search(cr, user, args + [('partner_id.name',operator,name)], limit=(limit and (limit-len(ids)) or False) , context=context))
if not limit or len(ids) < limit:
ids.update(self.search(cr, user, args + [('law_project_id.name',operator,name)], limit=limit, context=context))
ids = list(ids)
else:
ids = self.search(cr, user, args, limit=limit, context=context)
result = self.name_get(cr, user, ids, context=context)
return result
def _get_has_treatments(self, cr, uid, ids, field_names, arg, context=None):
if context is None:
context = {}
if isinstance(ids, (int, long)):
ids = [ids]
res = {}
for data in self.browse(cr, uid, ids, context=context):
res[data.id] = False
if data.treatment_detail_ids:
res[data.id] = True
return res
_columns = {
'name': fields.function(_get_name, type='char', string='Name'),
'has_treatments': fields.function(_get_has_treatments, type='boolean', string='Has Treatments?'),
}
_sql_constraints = [
('unique', 'unique(law_project_id, partner_id)', 'Commission must be unique'),
]
def _check_commission(self, cr, uid, ids, context=None):
record = self.browse(cr, uid, ids, context=context)
for data in record:
for treatment_detail in data.treatment_detail_ids:
if treatment_detail.order_paper_id.commission_id != data.partner_id:
return False
return True
_constraints = [
(_check_commission, 'Error: All commission treatments should be from the same commission', ['En Comisiones']),
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
c0710204/edx-platform
|
lms/envs/test.py
|
Python
|
agpl-3.0
| 12,700
| 0.002756
|
"""
This config file runs the simplest dev environment using sqlite, and db-based
sessions. Assumes structure:
/envroot/
/db # This is where it'll write the database file
/edx-platform # The location of this repo
/log # Where we're going to write log files
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=W0401, W0614
from .common import *
import os
from path import path
from warnings import filterwarnings, simplefilter
from uuid import uuid4
# mongo connection settings
MONGO_PORT_NUM = int(os.environ.get('EDXAPP_TEST_MONGO_PORT', '27017'))
MONGO_HOST = os.environ.get('EDXAPP_TEST_MONGO_HOST', 'localhost')
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = 'localhost:8000-9000'
THIS_UUID = uuid4().hex[:5]
# can't test start dates with this True, but on the other hand,
# can test everything else :)
FEATURES['DISABLE_START_DATES'] = True
# Most tests don't use the discussion service, so we turn it off to speed them up.
# Tests that do can enable this flag, but must use the UrlResetMixin class to force urls.py
# to reload. For consistency in user-experience, keep the value of this setting in sync with
# the one in cms/envs/test.py
FEATURES['ENABLE_DISCUSSION_SERVICE'] = False
FEATURES['ENABLE_SERVICE_STATUS'] = True
FEATURES['ENABLE_HINTER_INSTRUCTOR_VIEW'] = True
FEATURES['ENABLE_INSTRUCTOR_LEGACY_DASHBOARD'] = True
FEATURES['ENABLE_SHOPPING_CART'] = True
FEATURES['ENABLE_VERIFIED_CERTIFICATES'] = True
# Enable this feature for course staff grade downloads, to enable acceptance tests
FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True
FEATURES['ALLOW_COURSE_STAFF_GRADE_DOWNLOADS'] = True
# Toggles embargo on for testing
FEATURES['EMBARGO'] = True
# Need wiki for courseware views to work. TODO (vshnayder): shouldn't need it.
WIKI_ENABLED = True
# Makes the tests run much faster...
SOUTH_TESTS_MIGRATE = False # To disable migrations and use syncdb instead
# Nose Test Runner
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
_system = 'lms'
_report_dir = REPO_ROOT / 'reports' / _system
_report_dir.makedirs_p()
NOSE_ARGS = [
'--id-file', REPO_ROOT / '.testids' / _system / 'noseids',
'--xunit-file', _report_dir / 'nosetests.xml',
]
# Local Directories
TEST_ROOT = path("test_root")
# Want static files in the same dir for running on jenkins.
STATIC_ROOT = TEST_ROOT / "staticfiles"
STATUS_MESSAGE_PATH = TEST_ROOT / "status_message.json"
COURSES_ROOT = TEST_ROOT / "data"
DATA_DIR = COURSES_ROOT
COMMON_TEST_DATA_ROOT = COMMON_ROOT / "test" / "data"
# Where the content data is checked out. This may not exist on jenkins.
GITHUB_REPO_ROOT = ENV_ROOT / "data"
USE_I18N = True
LANGUAGE_CODE = 'en' # tests assume they will get English.
XQUEUE_INTERFACE = {
"url": "http://sandbox-xqueue.edx.org",
"django_auth": {
"username": "lms",
"password": "***REMOVED***"
},
"basic_auth": ('anant', 'agarwal'),
}
XQUEUE_WAITTIME_BETWEEN_REQUESTS = 5 # seconds
# Don't rely on a real staff grading backend
MOCK_STAFF_GRADING = True
MOCK_PEER_GRADING = True
# TODO (cpennington): We need to figure out how envs/test.py can inject things
# into common.py so that we don't have to repeat this sort of thing
STATICFILES_DIRS = [
COMMON_ROOT / "static",
PROJECT_ROOT / "static",
]
STATICFILES_DIRS += [
|
(course_dir, COMMON_TEST_DATA_ROOT / course_dir)
for course_dir in os.listdir(COMMON_TEST_DATA_ROOT)
if os.path.isdir(COMMON_TEST_DATA_ROOT / course_dir)
]
# Avoid having to run collectstatic before the unit test suite
# If we don't add these settings, then Django templates that can't
# find pipelined assets will raise a Value
|
Error.
# http://stackoverflow.com/questions/12816941/unit-testing-with-django-pipeline
STATICFILES_STORAGE='pipeline.storage.NonPackagingPipelineStorage'
PIPELINE_ENABLED=False
update_module_store_settings(
MODULESTORE,
module_store_options={
'fs_root': TEST_ROOT / "data",
},
xml_store_options={
'data_dir': COMMON_TEST_DATA_ROOT,
},
doc_store_settings={
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'db': 'test_xmodule',
'collection': 'test_modulestore{0}'.format(THIS_UUID),
},
)
CONTENTSTORE = {
'ENGINE': 'xmodule.contentstore.mongo.MongoContentStore',
'DOC_STORE_CONFIG': {
'host': MONGO_HOST,
'db': 'xcontent',
'port': MONGO_PORT_NUM,
}
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': TEST_ROOT / 'db' / 'edx.db'
},
}
CACHES = {
# This is the cache used for most things.
# In staging/prod envs, the sessions also live here.
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_loc_mem_cache',
'KEY_FUNCTION': 'util.memcache.safe_key',
},
# The general cache is what you get if you use our util.cache. It's used for
# things like caching the course.xml file for different A/B test groups.
# We set it to be a DummyCache to force reloading of course.xml in dev.
# In staging environments, we would grab VERSION from data uploaded by the
# push process.
'general': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
'KEY_PREFIX': 'general',
'VERSION': 4,
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'mongo_metadata_inheritance': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': os.path.join(tempfile.gettempdir(), 'mongo_metadata_inheritance'),
'TIMEOUT': 300,
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'loc_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
},
}
# Dummy secret key for dev
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
# hide ratelimit warnings while running tests
filterwarnings('ignore', message='No request passed to the backend, unable to rate-limit')
# Ignore deprecation warnings (so we don't clutter Jenkins builds/production)
# https://docs.python.org/2/library/warnings.html#the-warnings-filter
simplefilter('ignore') # Change to "default" to see the first instance of each hit
# or "error" to convert all into errors
######### Third-party auth ##########
FEATURES['ENABLE_THIRD_PARTY_AUTH'] = True
################################## OPENID #####################################
FEATURES['AUTH_USE_OPENID'] = True
FEATURES['AUTH_USE_OPENID_PROVIDER'] = True
################################## SHIB #######################################
FEATURES['AUTH_USE_SHIB'] = True
FEATURES['SHIB_DISABLE_TOS'] = True
FEATURES['RESTRICT_ENROLL_BY_REG_METHOD'] = True
OPENID_CREATE_USERS = False
OPENID_UPDATE_DETAILS_FROM_SREG = True
OPENID_USE_AS_ADMIN_LOGIN = False
OPENID_PROVIDER_TRUSTED_ROOTS = ['*']
############################## OAUTH2 Provider ################################
FEATURES['ENABLE_OAUTH2_PROVIDER'] = True
########################### External REST APIs #################################
FEATURES['ENABLE_MOBILE_REST_API'] = True
FEATURES['ENABLE_VIDEO_ABSTRACTION_LAYER_API'] = True
###################### Payment ##############################3
# Enable fake payment processing page
FEATURES['ENABLE_PAYMENT_FAKE'] = True
# Configure the payment processor to use the fake processing page
# Since both the fake payment page and the shoppingcart app are using
# the same settings, we can generate this randomly and guarantee
# that they are using the same secret.
from random import choice
import string
RANDOM_SHARED_SECRET = ''.join(
choice(string.letters + string.digits + string.punctuation)
for x in range(250)
)
CC_PROCESSOR_NAME = 'CyberSource2'
CC_PROCESSOR['CyberSource2']['SECRET_KEY'] = RANDOM_SHARED_SECRET
CC_PROCESSOR['CyberSource2']['ACCESS_KEY'] = "0123456789012345678901"
CC_PROCESSOR['CyberSource2']['PROFILE_ID'] = "edx"
CC_PROCESSOR['CyberSource2']['PURCHASE_ENDPOINT'] = "/shoppingcart/payment_fake"
FEATURES['STORE_BILLING_INFO'] = True
#########
|
Mercy-Nekesa/sokoapp
|
sokoapp/utils/admin.py
|
Python
|
mit
| 1,954
| 0.006141
|
from django.contrib import admin
from django.contrib.contenttypes import generic
from models import Attribute, BaseModel
from django.utils.translation import ugettext_lazy as _
class MetaInline(generic.GenericTabularInline):
model = Attribute
extra = 0
class BaseAdmin(admin.ModelAdmin):
"""
def get_readonly_fields(self, request, obj=None):
fs = super(BaseAdmin, self).get_readonly_fields(request, obj)
fs += ('created_by', 'last_updated_by',)
return fs
def get_fieldsets(self, request, obj=None):
fs = super(BaseAdmin, self).get_f
|
ieldsets(request, obj)
fs[0][1]['fields'].remove('created_by')
fs[0][1]['fields'].remove('last_updated_by')
fs.ext
|
end([(_('Other informations'), {'fields':['created_by','last_updated_by'], 'classes':['collapse']})])
return fs
def changelist_view(self, request, extra_context=None):
if request.user.has_perm('%s.can_view_deleted' % self.model._meta.app_label):
if not "deleted_flag" in self.list_filter:
self.list_filter += ("deleted_flag",)
return super(BaseAdmin, self).changelist_view(request, extra_context)
def queryset(self, request):
return super(BaseAdmin, self).queryset(request).exclude(deleted_flag=True)
"""
def save_model(self, request, obj, form, change):
if not change:
obj.created_by = request.user
obj.last_updated_by = request.user
obj.save()
def save_formset(self, request, form, formset, change):
instances = formset.save(commit=False)
for instance in instances:
if isinstance(instance, BaseModel): #Check if it is the correct type of inline
if not instance.created_by_id:
instance.created_by = request.user
instance.last_updated_by = request.user
instance.save()
|
waseem18/oh-mainline
|
vendor/packages/Django/tests/regressiontests/settings_tests/tests.py
|
Python
|
agpl-3.0
| 12,386
| 0.00113
|
import os
import warnings
from django.conf import settings, global_settings
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpRequest
from django.test import SimpleTestCase, TransactionTestCase, TestCase, signals
from django.test.utils import override_settings
from django.utils import unittest, six
@override_settings(TEST='override')
class FullyDecoratedTranTestCase(TransactionTestCase):
def test_override(self):
self.assertEqual(settings.TEST, 'override')
@override_settings(TEST='override2')
def test_method_override(self):
self.assertEqual(settings.TEST, 'override2')
def test_decorated_testcase_name(self):
self.assertEqual(FullyDecoratedTranTestCase.__name__, 'FullyDecoratedTranTestCase')
def test_decorated_testcase_module(self):
self.assertEqual(FullyDecoratedTranTestCase.__module__, __name__)
@override_settings(TEST='override')
class FullyDecoratedTestCase(TestCase):
def test_override(self):
self.assertEqual(settings.TEST, 'override')
@override_settings(TEST='override2')
def test_method_override(self):
self.assertEqual(settings.TEST, 'override2')
class ClassDecoratedTestCaseSuper(TestCase):
"""
Dummy class for testing max recursion error in child class call to
super(). Refs #17011.
"""
def test_max_recursion_error(self):
pass
@override_settings(TEST='override')
class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper):
def test_override(self):
self.assertEqual(settings.TEST, 'override')
@override_settings(TEST='override2')
def test_method_override(self):
self.assertEqual(settings.TEST, 'override2')
def test_max_recursion_error(self):
"""
Overriding a method on a super class and then calling that method on
the super class should not trigger infinite recursion. See #17011.
"""
try:
super(ClassDecoratedTestCase, self).test_max_recursion_error()
except RuntimeError:
self.fail()
class SettingsTests(TestCase):
def setUp(self):
self.testvalue = None
signals.setting_changed.connect(self.signal_callback)
def tearDown(self):
signals.setting_changed.disconnect(self.signal_callback)
def signal_callback(self, sender, setting, value, **kwargs):
if setting == 'TEST':
self.testvalue = value
def test_override(self):
settings.TEST = 'test'
self.assertEqual('test', settings.TEST)
with self.settings(TEST='override'):
self.assertEqual('override', settings.TEST)
self.assertEqual('test', settings.TEST)
del settings.TEST
def test_override_change(self):
settings.TEST = 'test'
self.assertEqual('test', settings.TEST)
with self.settings(TEST='override'):
self.assertEqual('override', settings.TEST)
settings.TEST = 'test2'
self.assertEqual('test', settings.TEST)
del settings.TEST
def test_override_doesnt_leak(self):
self.assertRaises(AttributeError, getattr, settings, 'TEST')
with self.settings(TEST='override'):
self.assertEqual('override', settings.TEST)
settings.TEST = 'test'
self.assertRaises(AttributeError, getattr, settings, 'TEST')
@override_settings(TEST='override')
def test_decorator(self):
self.assertEqual('override', settings.TEST)
def test_context_manager(self):
self.assertRaises(AttributeError, getattr, settings, 'TEST')
override = override_settings(TEST='override')
self.assertRaises(AttributeError, getattr, settings, 'TEST')
override.enable()
self.assertEqual('override', settings.TEST)
override.disable()
self.assertRaises(AttributeError, getattr, settings, 'TEST')
def test_class_decorator(self):
# SimpleTestCase can be decorated by override_settings, but not ut.TestCase
class SimpleTestCaseSubclass(SimpleTestCase):
pass
class UnittestTestCaseSubclass(unittest.TestCase):
pass
decorated = override_settings(TEST='override')(SimpleTestCaseSubclass)
self.assertIsInstance(decorated, type)
self.assertTrue(issubclass(decorated, SimpleTestCase))
with six.assertRaisesRegex(self, Exception,
"Only subclasses of Django SimpleTestCase*"):
decorated = override_settings(TEST='override')(UnittestTestCaseSubclass)
def test_signal_callback_context_manager(self):
self.assertRaises(AttributeError, getattr, settings, 'TEST')
with self.settings(TEST='override'):
self.assertEqual(self.testvalue, 'override')
self.assertEqual(self.testvalue, None)
@override_settings(TEST='override')
def test_signal_callback_decorator(self):
self.assertEqual(self.testvalue, 'override')
#
# Regression tests for #10130: deleting settings.
#
def test_settings_delete(self):
settings.TEST = 'test'
self.assertEqual('test', settings.TEST)
del settings.TEST
self.assertRaises(AttributeError, getattr, settings, 'TEST')
def test_settings_delete_wrapped(self):
self.assertRaises(TypeError, delattr, settings, '_wrapped')
def test_override_settings_delete(self):
"""
Allow deletion of a setting in an overriden settings set (#18824)
"""
previous_i18n = settings.USE_I18N
with self.settings(USE_I18N=False):
del settings.USE_I18N
self.assertRaises(AttributeError, getattr, settings, 'USE_I18N')
self.assertEqual(settings.USE_I18N, previous_i18n)
def test_allowed_include_roots_string(self):
"""
ALLOWED_INCLUDE_ROOTS is not allowed to be incorrectly set to a string
rather than a tuple.
"""
self.assertRaises(ValueError, setattr, settings,
'ALLOWED_INCLUDE_ROOTS', '/var/www/ssi/')
class TrailingSlashURLTests(TestCase):
"""
Tests for the MEDIA_URL and STATIC_URL settings.
They must end with a slash to ensure there's a deterministic way to build
paths in templates.
"""
settings_module = settings
def setUp(self):
self._original_media_url = self.settings_module.MEDIA_URL
self._original_static_url = self.settings_module.STATIC_URL
def tearDown(self):
self.settings_module.MEDIA_URL = self._original_media_url
self.settings_module.STATIC_URL = self._original_static_url
def test_blank(self):
"""
The empty string is accepted, even though it doesn't end in a slash.
"""
self.settings_module.MEDIA_URL = ''
self.assertEqual('', self.settings_module.MEDIA_URL)
self.settings_module.STATIC_URL = ''
self.assertEqual('', self.settings_module.STATIC_URL)
def test_end_slash(self):
"""
It works if the value ends in a slash.
"""
self.settings_module.MEDIA_URL = '/foo/'
self.assertEqual('/foo/', self.settings_module.MEDIA_URL)
self.settings_module.MEDIA_URL = 'http://media.foo.com/'
self.assertEqual('http://media.foo.com/',
self.settings_module.MEDIA_URL)
self.settings_module.STATIC_URL = '/foo/'
self.assertEqual('/foo/', self.settings_module.STATIC_URL)
self.settings_module.STATIC_URL = 'http://static.foo.com/'
|
self.assertEqual('http://static.foo.com/',
self.settings_module.STATIC_URL)
def test_no_end_slash(self):
"""
An ImproperlyConfigured exception is raised if the value doesn't end
in a slash.
"""
with self.assertRaises(ImproperlyConfigured):
self.settings_module.MEDIA_URL = '/foo'
|
with self.assertRaises(ImproperlyConfigured):
self.settings_module.MEDIA_URL = 'http://media.foo.com'
with self.assertRaises(ImproperlyConfigured):
self.settings_module.STATIC_URL = '/foo'
with self.assertRaises(ImproperlyConfigured):
|
ICShapy/shapy
|
shapy/scene.py
|
Python
|
mit
| 5,133
| 0.011884
|
# This file is part of the Shapy Project.
# Licensing information can be found in the LICENSE file.
# (C) 2015 The Shapy Team. All rights reserved.
import StringIO
from pyrr.objects import Quaternion, Matrix44, Vector3, Vector4
class Scene(object):
"""Class representing a whole scene."""
class Object(object):
"""Class representing an object in a scene."""
def __init__(self, data={}):
"""Initializes an empty object."""
# Name of the object.
self.id = data.get('id', 'unnamed')
# Translation vector.
self.tx = data.get('tx', 0.0)
self.ty = data.get('ty', 0.0)
self.tz = data.get('tz', 0.0)
# Scaling vector.
self.sx = data.get('sx', 1.0)
self.sy = data.get('sy', 1.0)
self.sz = data.get('sz', 1.0)
# Rotation quaternion.
self.rx = data.get('rx', 0.0)
self.ry = data.get('ry', 0.0)
self.rz = data.get('rz', 0.0)
self.rw = data.get('rw', 0.0)
# Map of vertices.
self.verts = dict(
(int(k), (v[0], v[1], v[2]))
for k, v in (data['verts'] or {}).iteritems()
)
# Map of edges.
self.edges = dict(
(int(k), (v[0], v[1]))
for k, v in (data['edges'] or {}).iteritems()
)
# Map of UV points.
self.uvPoints = dict(
(int(k), (v[0], v[1]))
for k, v in (data['uvPoints'] or {}).iteritems()
)
# Map of UV edges.
self.uvEdges = dict(
(int(k), (v[0], v[1]))
for k, v in (data['uvEdges'] or {}).iteritems()
)
# Map of faces.
self.faces = dict(
(int(k), (v[0], v[1], v[2], v[3], v[4], v[5]))
for k, v in (data['faces'] or {}).iteritems()
)
# Model matrix.
q = Quaternion()
q.x = self.rx
q.y = self.ry
q.z = self.rz
q.w = self.rw
trans = Matrix44.from_translation([self.tx, self.ty, self.tz])
scale = Matrix44([
[self.sx, 0, 0, 0],
[0, self.sy, 0, 0],
[0, 0, self.sz, 0],
[0, 0, 0, 1]
])
self.model = trans * q * scale
@property
def __dict__(self):
"""Converts the object to a serializable dictionary."""
return {
'tx': self.tx, 'ty': self.ty, 'tz': self.tz,
'sx': self.sx, 'sy': self.sy, 'sz': self.sz,
'rx': self.rx, 'ry': self.ry, 'rz': self.rz, 'rw': self.rw
}
def __init__(self, name, data={}):
"""Initializes an empty scene."""
self.objects = dict(
(k, Scene.Object(v)) for k, v in (data['objects'] or {}).iteritems())
@property
def __dict__(self):
"""Converts the scene to a serializable dictionary."""
return {
'objects': dict((k, v.__dict__) for k, v in self.objects.iteritems())
}
def to_stl(self):
"""Converts the scene to STL format."""
s = StringIO.StringIO()
for id, obj in self.objects.iteritems():
print >>s, 'solid %s' % obj.id
for _, v in obj.faces.iteritems():
e0
|
= obj.edges[abs(v[0])]
e1 = obj.edges[abs(v[1])]
e2 = obj.edges[abs(v[2])]
v0 = obj.verts[e0[0] if v[0] >= 0 else e0[1]]
v1 = obj.verts[e1[0] if v[1] >= 0 else e1[1]]
|
v2 = obj.verts[e2[0] if v[2] >= 0 else e2[1]]
v0 = obj.model * Vector4([v0[0], v0[1], v0[2], 1.0])
v1 = obj.model * Vector4([v1[0], v1[1], v1[2], 1.0])
v2 = obj.model * Vector4([v2[0], v2[1], v2[2], 1.0])
a = v1 - v0
b = v2 - v0
n = Vector3([a.x, a.y, a.z]).cross(Vector3([b.x, b.y, b.z]))
n.normalise()
print >>s, 'facet normal %f %f %f' % (n.x, n.y, n.z)
print >>s, 'outer loop'
print >>s, 'vertex %f %f %f' % (v0.x, v0.y, v0.z)
print >>s, 'vertex %f %f %f' % (v1.x, v1.y, v1.z)
print >>s, 'vertex %f %f %f' % (v2.x, v2.y, v2.z)
print >>s, 'end loop'
print >>s, 'endsolid %s' % obj.id
return s.getvalue()
def to_obj(self):
"""Converts the scene to wavefront obj format."""
s = StringIO.StringIO()
for id, obj in self.objects.iteritems():
print >>s, 'o "%s"' % id
vmap = {}
i = 1
for k, v in obj.verts.iteritems():
v = obj.model * Vector4([float(v[0]), float(v[1]), float(v[2]), 1.])
vmap[k] = i
i += 1
print >>s, 'v %f %f %f' % (v.x, v.y, v.z)
uvmap = {}
i = 1
for k, v in obj.uvPoints.iteritems():
uvmap[k] = i
i += 1
print >>s, 'vt %f %f' % v
for _, v in obj.faces.iteritems():
e0 = obj.edges[abs(v[0])]
e1 = obj.edges[abs(v[1])]
e2 = obj.edges[abs(v[2])]
v0 = vmap[e0[0] if v[0] >= 0 else e0[1]]
v1 = vmap[e1[0] if v[1] >= 0 else e1[1]]
v2 = vmap[e2[0] if v[2] >= 0 else e2[1]]
ue0 = obj.uvEdges[abs(v[3])]
ue1 = obj.uvEdges[abs(v[4])]
ue2 = obj.uvEdges[abs(v[5])]
uv0 = uvmap[ue0[0] if v[3] >= 0 else ue0[1]]
uv1 = uvmap[ue1[0] if v[4] >= 0 else ue1[1]]
uv2 = uvmap[ue2[0] if v[5] >= 0 else ue2[1]]
print >>s, 'f %d/%d %d/%d %d/%d' % (v0, uv0, v1, uv1, v2, uv2)
return s.getvalue()
|
vim-scripts/Vim-SQL-Workbench
|
resources/py/lib/__init__.py
|
Python
|
gpl-3.0
| 30
| 0
|
__author__ = 'Cos
|
min Popescu'
|
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/inprocess/bhylee/hlcm_parcel_estimation.py
|
Python
|
gpl-2.0
| 6,635
| 0.008742
|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from urbansim.configs.hlcm_estimation_config import HLCMEstimationConfig
from psrc_parcel.configs.baseline_estimation import BaselineEstimation
from opus_core.session_configuration import SessionConfiguration
from opus_core.store.attribute_cache import AttributeCache
from my_estimation_config import my_configuration
class HlcmParcelEstimation(BaselineEstimation): # comment out for urbansim.configs.hlcm_estimation_config
#class HlcmParcelEstimation(HLCMEstimationConfig): # comment out for psrc_parcel.configs.baseline_estimation
def update_config(self):
# HLCMEstimationConfig.update_config(self) # comment out for psrc_parcel.configs.baseline_estimation
#
self.replace(my_configuration)
estimate_config = {}
# estimate_config["export_estimation_data"]=True
# estimate_config["estimation_data_file_name"]="/tmp/HLCM_building_estimate_data"
# estimate_config["use_biogeme_data_format"]=True
# estimate_config["weights_for_estimation_string"]= "has_eg_1_units=building.residential_units>=1" #"psrc.parcel.residential_units_when_has_eg_1_surveyed_households_and_is_in_county_033"
#"sampling_filter=(building.disaggregate(building_type.building_type_name)=='single_family_residential') + (building.disaggregate(building_type.building_type_name)=='multi_family_residential') + (building.disaggregate(building_type.building_type_name)=='condo_residential')"
#"has_eg_1_units=urbansim.building.residential_units>=1"
# estimate_config["stratum"] = "psrc.parcel.is_in_city_seattle" #"psrc.parcel.stratify_by_is_in_city_seattle_and_is_single_family_unit"
# estimate_config["sample_size_from_each_stratum"] = 5
# estimate_config["sample_size_from_chosen_stratum"] = 4
# estimate_config["include_chosen_choice"] = True
estimate_config['wesml_sampling_correction_variable'] = 'psrc_parcel.building.wesml_sampling_correction_variable'
#estimate_config['submodel_string'] = "None"
# self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]['sample_size_locations'] = 30
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]['sampler']="'opus_core.samplers.weighted_sampler'"#"'opus_core.samplers.stratified_sampler'" #
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["estimate_config"] = estimate_config
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["estimation_weight_string"] = "'has_eg_1_units=building.residential_units>=1'"
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["capacity_string"] = "'has_eg_1_units=building.residential_units>=1'"
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["number_of_agents_string"] = "'(building.building_id < 0).astype(int32)'"
# self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["estimation_weight_string"] = "'urbansim_parcel.building.vacant_residential_units'"
# self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["estimation_weight_string"] = "'psrc_parcel.building.residential_units'"
#{"weights_for_estimation_string":"psrc.parcel.residential_units_when_has_eg_1_surveyed_households_and_is_in_county_033"}
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["location_set"] = "building"
#self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["location_id_string"] = "'household.parcel_id'"
# self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]['submodel_string'] = "'psrc.household.number_of_nonhome_based_workers'"
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["variable_package"]="'urbansim_parcel'"
self
|
["models_configuration"]["household_location_choice_model"]["controller"]["prepare_for_estimate"]["arguments"]["join_datasets"] = 'True'
self["models_configuration"]["household_location_choice_model"]["controller"]["prepare_for_estimate"]["arguments"]["index_to_unplace"] = 'None'
self["models_configuration"]["household_location_choice_model"]["con
|
troller"]["prepare_for_estimate"]["arguments"]["filter"] = "'household.move == 1'"#None #"'psrc.household.customized_filter'"
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]['filter'] = "'urbansim_parcel.building.is_residential'"
# self["datasets_to_preload"].merge({"tour":{}, "person":{}})
# self["datasets_to_cache_after_each_model"] += ["person"]
self["models"] = [
# {"household_relocation_model": ["run"]},
# {"tour_schedule_model": ["run"]},
{"household_location_choice_model": ["estimate"]}
]
if __name__ == '__main__':
from my_estimation_config import my_configuration
from urbansim.estimation.estimator import Estimator
from urbansim.estimation.estimator import update_controller_by_specification_from_module
from opus_core.simulation_state import SimulationState
from opus_core.store.attribute_cache import AttributeCache
run_configuration = HlcmParcelEstimation()
run_configuration.update_config()
run_configuration = update_controller_by_specification_from_module(
run_configuration, "household_location_choice_model",
"inprocess.bhylee.hlcm_parcel_specification")
er = Estimator(run_configuration, save_estimation_results=False)
er.estimate()
# er.create_prediction_success_table()
# er.create_prediction_success_table(choice_geography_id="area_type_id=building.disaggregate(zone.area_type_id, intermediates=[parcel])" )
# er.create_prediction_success_table(choice_geography_id="building_type_id=building.building_type_id" )
# er.create_prediction_success_table(choice_geography_id="large_area_id=building.disaggregate(faz.large_area_id, intermediates=[zone, parcel])" )
# er.reestimate("hlcm_parcel_specification")
|
brenns10/social
|
social/accounts/github.py
|
Python
|
bsd-3-clause
| 2,234
| 0.000895
|
"""
**GitHubAccount**
Represents an account at GitHub.
- Matches a link that looks like it goes to a GitHub profile page.
- Returns the "your site" URL from the user's GitHub profile.
- Use on the command line: ``github:username``.
"""
from __future__ import print_fu
|
nction, division
import re
import requests
from lxml import html
from .
|
import Account
_URL_RE = re.compile(r'https?://(www.)?github.com/(?P<username>\w+)/?\Z')
class GitHubAccount(Account):
def __init__(self, username=None, url=None, **_):
if username is not None:
self._username = username
elif url is not None:
match = _URL_RE.match(url)
if match:
self._username = match.group('username')
else:
raise ValueError('No username match.')
else:
raise ValueError('No usable parameters!')
def expand(self, info):
# Load their profile page.
url = 'https://github.com/%s' % self._username
page = requests.get(url)
tree = html.fromstring(page.text)
# Save info
info['usernames'] = self._username
for span in tree.xpath(r'//*[@itemprop="name"]'):
info['name'] = span.text_content().strip()
for span in tree.xpath(r'//*[@itemprop="worksFor"]'):
info['employer'] = span.text_content().strip()
for span in tree.xpath(r'//*[@itemprop="homeLocation"]'):
info['location'] = span.text_content().strip()
# Search for a website!
for anchor in tree.xpath(r'//a[contains(@class,"url")]'):
yield {'url': anchor.attrib['href']}
for anchor in tree.xpath(r'//a[contains(@class, "email")]'):
yield {'url': anchor.attrib['href']}
@staticmethod
def match(**options):
return (
'url' in options
and _URL_RE.match(options['url'])
)
@staticmethod
def shortname():
return 'github'
def __str__(self):
return 'GitHubAccount(username=%r)' % self._username
def __hash__(self):
return hash(self._username)
def __eq__(self, other):
return type(other) is GitHubAccount and self._username == other._username
|
luqasz/mcm
|
tests/integration/test_compare.py
|
Python
|
gpl-2.0
| 4,109
| 0.010465
|
# -*- coding: UTF-8 -*-
import pytest
from mc
|
m.comparators import UniqueKeyComparator, SingleElementComparator, OrderedComparator
from mcm.datastructures import CmdPathRow
@pytest.fixture
def compare_data(request):
single = {
'wanted':CmdPathRow({"primary-ntp":"1.1.1.1"}),
|
'present':CmdPathRow({"primary-ntp":"213.222.193.35"}),
'difference':CmdPathRow({"primary-ntp":"1.1.1.1"}),
}
default = {
'wanted':CmdPathRow({'name':'admin', 'group':'read'}),
'present':CmdPathRow({'name':'admin', 'group':'full', '.id':'*2'}),
'extra':CmdPathRow({'name':'operator', 'group':'read', '.id':'*3'}),
'difference':CmdPathRow({'group':'read', '.id':'*2'}),
}
if 'single' in request.cls.__name__.lower():
return single
else:
return default
class Test_SingleComparator:
def setup(self):
self.comparator = SingleElementComparator()
def test_difference_in_SET(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'],))
assert SET == (compare_data['difference'],)
def test_empty_SET_when_same_data(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['wanted'],))
assert SET == tuple()
def test_empty_SET_when_empty_wanted(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=tuple(), present=(compare_data['present'],))
assert SET == tuple()
def test_empty_ADD(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'],))
assert ADD == tuple()
def test_empty_DEL(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'],))
assert DEL == tuple()
class Test_OrderedComparator:
def setup(self):
self.comparator = OrderedComparator()
def test_extra_in_DEL(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'],compare_data['extra']))
assert DEL == (compare_data['extra'],)
def test_present_in_DEL_when_empty_wanted(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(), present=(compare_data['present'], compare_data['extra']))
assert DEL == (compare_data['present'], compare_data['extra'])
def test_empty_ADD_when_empty_wanted(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(), present=(compare_data['present'],))
assert ADD == tuple()
def test_difference_in_SET(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'],))
assert SET == (compare_data['difference'],)
class Test_UniqueKeyComparator:
def setup(self):
self.comparator = UniqueKeyComparator( keys=('name',) )
def test_extra_in_DEL(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'], compare_data['extra']))
assert DEL == (compare_data['extra'],)
def test_present_in_DEL_when_empty_wanted(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(), present=(compare_data['present'], compare_data['extra']))
# compare sets instead of tuples. order in witch objects exist in DEL does not matter
assert set(DEL) == set((compare_data['present'], compare_data['extra']))
def test_compare_returns_difference_in_SET(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'],compare_data['extra']))
assert SET == (compare_data['difference'],)
def test_wanted_in_ADD(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=())
assert ADD == (compare_data['wanted'],)
|
simplicitylab/doc2source
|
modules/loader_parsers.py
|
Python
|
gpl-2.0
| 2,064
| 0.001453
|
"""
Module that handles the loading of parsers
written by Glenn De Backer < glenn at simplicity dot be>
License: GPLv2
"""
import glob
import os
class LoaderParsers(object):
""" Parsers loader class """
def __init__(self):
""" Default constructor """
self.available_parsers = {}
self.get_local_parsers()
def validate_parser(self, parser_class):
""" validates parsers """
class_properties_methods = parser_class.__dict__
# check if class has certain methods
if not "define_document_grammar" in class_properties_methods:
return False
if not "parse" in class_properties_methods:
return False
return True
def load_parser(self, py_filename):
""" Load parser """
# create names
parser_name = os.path.splitext(py_filename)[0]
parser_class_name = "%sParser" % parser_name.title()
module_name = "modules.parsers.%s" % parser_name
# load class dynamically
mod = __import__(module_name, fromlist=[parser_class_name])
parser_class = getattr(mod, parser_class_name)
# check if parser is valid class
is_valid_parser
|
_class = self.validate_parser(parser_class)
if is_valid_parser_class:
# store class object in dictionary available_parsers
self.available_parsers[parser_name] = parser_class()
e
|
lse:
raise Exception("Parser %s is invalid parser" % parser_name)
def get_local_parsers(self):
""" Get parsers """
for py_file_path in glob.glob("modules/parsers/*.py"):
# get basename
python_file = os.path.basename(py_file_path)
# skip init python file
if python_file != "__init__.py":
self.load_parser(python_file)
def get_parsers(self):
""" Get available parsers """
return self.available_parsers
def get_parsers_names(self):
""" Get parser names """
return self.available_parsers.keys()
|
openstack-infra/project-config
|
roles/copy-wheels/files/wheel-indexer.py
|
Python
|
apache-2.0
| 4,718
| 0
|
#!/usr/bin/env python3
#
# Copyright 2020 Red Hat,
|
Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Se
|
e the
# License for the specific language governing permissions and limitations
# under the License.
#
# Final all .whl files in a directory, and make a index.html page
# in PEP503 (https://www.python.org/dev/peps/pep-0503/) format
import argparse
import datetime
import email
import hashlib
import html
import logging
import os
import sys
import zipfile
parser = argparse.ArgumentParser()
parser.add_argument('toplevel', help="directory to index")
parser.add_argument('-d', '--debug', dest="debug", action='store_true')
parser.add_argument('-o', '--output', dest="output",
default='index.html', help="Output filename, - for stdout")
args = parser.parse_args()
level = logging.DEBUG if args.debug else logging.INFO
logging.basicConfig(level=level)
class NotAWheelException(Exception):
pass
class NoMetadataException(Exception):
pass
class NoRequirementsException(Exception):
pass
class BadFormatException(Exception):
pass
def get_requirements(filename):
# This is an implementation of the description on finding
# requirements from a wheel provided by chrahunt at:
# https://github.com/pypa/pip/issues/7586#issuecomment-573534655
with zipfile.ZipFile(filename) as zip:
metadata = None
names = zip.namelist()
for name in names:
if name.endswith('.dist-info/METADATA'):
metadata = zip.open(name)
# finish loop and sanity check we got the right one?
break
if not metadata:
return NoMetadataException
parsed = email.message_from_binary_file(metadata)
requirements = parsed.get_all('Requires-Python')
if not requirements:
raise NoRequirementsException
if len(requirements) > 1:
print("Multiple requirements headers found?")
raise BadFormatException
return html.escape(requirements[0])
def get_sha256(filename):
sha256 = hashlib.sha256()
with open(filename, "rb") as f:
for b in iter(lambda: f.read(4096), b''):
sha256.update(b)
return(sha256.hexdigest())
def create_index(path, files):
project = os.path.basename(path)
output = '''<html>
<head>
<title>%s</title>
</head>
<body>
<ul>
''' % (project)
for f in files:
f_full = os.path.join(path, f)
requirements = ''
try:
logging.debug("Checking for requirements of : %s" % f_full)
requirements = get_requirements(f_full)
logging.debug("requirements are: %s" % requirements)
# NOTE(ianw): i'm not really sure if any of these should be
# terminal, as it would mean pip can't read the file anyway. Just
# log for now.
except NoMetadataException:
logging.debug("no metadata")
pass
except NoRequirementsException:
logging.debug("no python requirements")
pass
except BadFormatException:
logging.debug("Could not open")
pass
sha256 = get_sha256(f_full)
logging.debug("sha256 for %s: %s" % (f_full, sha256))
output += ' <li><a href="%s#sha256=%s"' % (f, sha256)
if requirements:
output += ' data-requires-python="%s" ' % (requirements)
output += '>%s</a></li>\n' % (f)
output += ''' </ul>
</body>
</html>
'''
now = datetime.datetime.now()
output += '<!-- last update: %s -->\n' % now.isoformat()
return output
logging.debug("Building indexes from: %s" % args.toplevel)
for root, dirs, files in os.walk(args.toplevel):
# sanity check we are only called from leaf directories by the
# driver script
if dirs:
print("This should only be called from leaf directories")
sys.exit(1)
logging.debug("Processing %s" % root)
output = create_index(root, files)
logging.debug("Final output write")
if args.output == '-':
out_file = sys.stdout
else:
out_path = os.path.join(root, args.output)
logging.debug("Writing index file: %s" % out_path)
out_file = open(out_path, "w")
out_file.write(output)
logging.debug("Done!")
|
youtube/cobalt
|
third_party/web_platform_tests/tools/py/doc/example/genhtmlcss.py
|
Python
|
bsd-3-clause
| 426
| 0.014085
|
import py
html = py.xml.html
class my(html):
"a custom style"
class body(
|
html.body):
style = html.Style(font_size = "120%")
class h2(html.h2):
style = html.Style(background = "grey")
class p(html.p):
style = html.Style(font_weight="bold")
doc = my.html(
my.head(),
my.body(
my.h2("hello world"),
my.p("bold as bold can")
)
|
)
print doc.unicode(indent=2)
|
lino-framework/welfare
|
lino_welfare/projects/gerd/tests/dumps/18.8.0/cal_recurrentevent.py
|
Python
|
agpl-3.0
| 3,162
| 0.099937
|
# -*- coding: UTF-8 -*-
logger.info("Loading 15 objects to table cal_recurrentevent...")
# fields: id, start_date, start_time, end_date, end_time, name, user, every_unit, every, monday, tuesday, wednesday, thursday, friday, saturday, sunday, max_events, event_type, description
loader.save(create_cal_recurrentevent(1,date(2013,1,1),None,None,None,['Neujahr', "Jour de l'an", "New Year's Day"],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(2,date(2013,5,1),None,None,None,['Tag der Arbeit', 'Premier Mai', "International Workers' Day"],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(3,date(2013,7,21),None,None,None,['Nationalfeiertag', 'F\xeate nationale', 'National Day'],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(4,date(2013,8,15),None,None,None,['Mari\xe4 Himmelfahrt', 'Assomption de Marie', 'Assumption of Mary'],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(5,date(2013,10,31),None,None,None,['Allerseelen', 'Comm\xe9moration des fid\xe8les d\xe9funts', "All Souls' Day"],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(6,date(2013,11,1),None,None,None,['Allerheiligen', 'Toussaint', "All Saints' Day"],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(7,date(2013,11,11),None,None,None,['Waffenstillstand', 'Armistice', 'Armistice with Germany'],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(8,date(2013,12,25),None,None,None,['Weihnachten', 'No\xebl', 'Christmas'],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(9,date(2013,3,31),None,None,None,['Ostersonntag', 'P\xe2ques', 'Easter sunday'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.save(create_cal_recurrentevent(10,date(2013,4,1),None,None,None,['Ostermontag', 'Lundi de P\xe2ques', 'Easter monday'],None,u'E',1,False,False,False,False,
|
False,False,False,None,1,u''))
loader.save(create_cal_recurrentevent(11,date(2013,5,9),None,None,None,['Christi Himmelfahrt', 'As
|
cension', 'Ascension of Jesus'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.save(create_cal_recurrentevent(12,date(2013,5,20),None,None,None,['Pfingsten', 'Pentec\xf4te', 'Pentecost'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.save(create_cal_recurrentevent(13,date(2013,3,29),None,None,None,['Karfreitag', 'Vendredi Saint', 'Good Friday'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.save(create_cal_recurrentevent(14,date(2013,2,13),None,None,None,['Aschermittwoch', 'Mercredi des Cendres', 'Ash Wednesday'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.save(create_cal_recurrentevent(15,date(2013,2,11),None,None,None,['Rosenmontag', 'Lundi de carnaval', 'Rosenmontag'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.flush_deferred_objects()
|
mercycorps/TolaTables
|
silo/migrations/0014_formulacolumnmapping.py
|
Python
|
gpl-2.0
| 815
| 0.002454
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-06-16 19:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Mig
|
ration(migrations.Migration):
dependencies = [
('silo', '0013_deletedsilos'),
]
operations = [
migrations.CreateModel(
name='FormulaColumnMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=Fal
|
se, verbose_name='ID')),
('mapping', models.TextField()),
('operation', models.TextField()),
('column_name', models.TextField()),
('silo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='silo.Silo')),
],
),
]
|
belleandindygames/league
|
league/champ_chooser/urls.py
|
Python
|
mit
| 756
| 0.006614
|
from django.conf import settings
from django.conf.urls import url
from .views import get_summoner_v3, live_match, test_something, live_match_detail, FrontendAppView, Api
|
LiveMatch, ChampionInfoView
urlpatterns = [
url(r'^summoner/', get_summoner_v3, name='summoner_lookup'),
url(r'^live/$',
|
live_match, name='live_match'),
url(r'^live/([a-zA-Z0-9]+)/(.+)/$', live_match_detail, name='live-match-detail'),
url(r'^api/live/([a-zA-Z0-9]+)/(.+)/$', ApiLiveMatch.as_view(), name='api-live-match'),
url(r'api/champions/$', ChampionInfoView.as_view(), name='api-champion-info'),
url(r'^summonerprofile/', get_summoner_v3, name='summoner_profile'),
url(r'test/', test_something, name='test'),
url(r'^', FrontendAppView.as_view()),
]
|
MadMac/PyTetris
|
src/main/main.py
|
Python
|
mit
| 1,650
| 0.004848
|
import pygame, sys, os, random
from classes import *
from pygame.locals import *
blocksFile = "blocks.txt"
thisBlock = ""
allBlocks = []
boardWidth = 15
boardHeight = 20
gameOver = False
# Make all the blocks which are in file "blocks.txt"
file = open(blocksFile, "r")
while file:
line = file.readline()
if line.find("END") >= 0:
break
if line.find("/") >= 0:
allBlocks.append(blockStyle(thisBlock))
thisBlock = ""
continue
thisBlock = thisBlock + line
# Make board
gameBoard = board(boardWidth, boardHeight)
# All pygame init
pygame.init()
gameWindow = pygame.display.set_mode((640, 480))
pygame.display.set_caption('PyTetris')
clock = pygame.time.Clock()
playerBlock = block(boardWidth, boardHeight, allBlocks[random.randra
|
nge(len(allBlocks))].getStyle(), gameBoard)
pygame.time.Clock
|
()
pygame.time.set_timer(pygame.USEREVENT + 1, 150)
pygame.time.set_timer(pygame.USEREVENT + 2, 1000)
#Game loop
while gameOver == False:
clock.tick(60)
for event in pygame.event.get():
if event.type == pygame.QUIT:
gameOver = True
elif event.type == KEYDOWN and event.key == K_ESCAPE:
gameOver = True
elif event.type == pygame.USEREVENT + 1:
playerBlock.handlePlayerInput()
elif event.type == pygame.USEREVENT + 2:
playerBlock.updatePlayer()
if playerBlock.isDown == True:
playerBlock.changeStyle(allBlocks[random.randrange(len(allBlocks))].getStyle())
gameWindow.fill((0,0,0))
gameBoard.drawBoard()
gameBoard.update()
playerBlock.drawBlock()
pygame.display.flip()
pygame.quit()
|
MissionCriticalCloud/marvin
|
marvin/cloudstackAPI/resetPasswordForVirtualMachine.py
|
Python
|
apache-2.0
| 24,253
| 0.00099
|
"""Resets the password for virtual machine. The virtual machine must be in a "Stopped" state and the template must already support this feature for this command to take effect. [async]"""
from baseCmd import *
from baseResponse import *
class resetPasswordForVirtualMachineCmd (baseCmd):
typeInfo = {}
def __init__(self):
self.isAsync = "true"
"""The ID of the virtual machine"""
"""Required"""
self.id = None
self.typeInfo['id'] = 'uuid'
self.required = ["id", ]
class resetPasswordForVirtualMachineResponse (baseResponse):
typeInfo = {}
def __init__(self):
"""the ID of the virtual machine"""
self.id = None
self.typeInfo['id'] = 'string'
"""the account associated with the virtual machine"""
self.account = None
self.typeInfo['account'] = 'string'
"""the number of cpu this virtual machine is running with"""
self.cpunumber = None
self.typeInfo['cpunumber'] = 'integer'
"""the speed of each cpu"""
self.cpuspeed = None
self.typeInfo['cpuspeed'] = 'integer'
"""the amount of the vm's CPU currently used"""
self.cpuused = None
self.typeInfo['cpuused'] = 'string'
"""the date when this virtual machine was created"""
self.created = None
self.typeInfo['created'] = 'date'
"""Vm details in key/value pairs."""
self.details = None
self.typeInfo['details'] = 'map'
"""the read (io) of disk on the vm"""
self.diskioread = None
self.typeInfo['diskioread'] = 'long'
"""the write (io) of disk on the vm"""
self.diskiowrite = None
self.typeInfo['diskiowrite'] = 'long'
"""the read (bytes) of disk on the vm"""
self.diskkbsread = None
self.typeInfo['diskkbsread'] = 'long'
"""the write (bytes) of disk on the vm"""
self.diskkbswrite = None
self.typeInfo['diskkbswrite'] = 'long'
"""the ID of the disk offering of the virtual machine"""
self.diskofferingid = None
self.typeInfo['diskofferingid'] = 'string'
"""the name of the disk offering of the virtual machine"""
self.diskofferingname = None
self.typeInfo['diskofferingname'] = 'string'
"""user generated name. The name of the virtual machine is returned if no displayname exists."""
self.displayname = None
self.typeInfo['displayname'] = 'string'
"""an optional field whether to the display the vm to the end user or not."""
self.displayvm = None
self.typeInfo['displayvm'] = 'boolean'
"""the name of the domain in which the virtual machine exists"""
self.domain = None
self.typeInfo['domain'] = 'string'
"""the ID of the domain in which the virtual machine exists"""
self.domainid = None
self.typeInfo['domainid'] = 'string'
"""the virtual network for the service offering"""
self.forvirtualnetwork = None
self.typeInfo['forvirtualnetwork'] = 'boolean'
"""the group name of the virtual machine"""
self.group = None
self.typeInfo['group'] = 'string'
"""the group ID of the virtual machine"""
self.groupid = None
self.typeInfo['groupid'] = 'string'
"""Os type ID of the virtual machine"""
self.guestosid = None
self.typeInfo['guestosid'] = 'string'
"""true if high-availability is enabled, false otherwise"""
self.haenable = None
self.typeInfo['haenable'] = 'boolean'
"""the ID of the host for the virtual machine"""
self.hostid = None
self.typeInfo['hostid'] = 'string'
"""the name of the host for the virtual machine"""
self.hostname = None
self.typeInfo['hostname'] = 'string'
"""the hypervisor on which the template runs"""
self.hypervisor = None
self.typeInfo['hypervisor'] = 'string'
"""instance name of the user vm; this parameter is returned to the ROOT admin only"""
self.instancename = None
self.typeInfo['instancename'] = 'string'
"""true if vm contains XS tools inorder to support dynamic scaling of VM cpu/memory."""
self.isdynamicallyscalable = None
self.typeInfo['isdynamicallyscalable'] = 'boolean'
"""an alternate display text of the ISO attached to the virtual machine"""
self.isodisplaytext = None
self.typeInfo['isodisplaytext'] = 'string'
"""the ID of the ISO attached to the virtual machine"""
self.isoid = None
self.typeInfo['isoid'] = 'string'
"""the name of the ISO attached to the virtual machine"""
self.isoname = None
self.typeInfo['isoname'] = 'string'
"""ssh key-pair"""
self.keypair = None
self.typeInfo['keypair'] = 'string'
"""the memory allocated for the virtual machine"""
self.memory = None
self.typeInfo['memory'] = 'integer'
"""the name of the virtual machine"""
self.name = None
self.typeInfo['name'] = 'string'
"""the incoming network traffic on the vm"""
self.networkkbsread = None
self.typeInfo['networkkbsread'] = 'long'
"""the outgoing network traffic on the host"""
self.networkkbswrite = None
self.typeInfo['networkkbswrite'] = 'long'
"""OS type id of the vm"""
self.ostypeid = None
self.typeInfo['ostypeid'] = 'long'
"""the password (if exists) of the virtual machine"""
self.password = None
self.typeInfo['password'] = 'string'
"""true if the password rest feature is enabled, false otherwise"""
self.passwordenabled = None
self.typeInfo['passwordenabled'] = 'boolean'
"""the project name of the vm"""
self.project = None
self.typeInfo['project'] = 'string'
"""the project id of the vm"""
self.projectid = None
self.typeInfo['projectid'] = 'string'
"""public IP address id associated with vm via Static nat rule"""
self.publicip = None
self.typeInfo['publicip'] = 'string'
"""public IP address id associated with vm via Static nat rule"""
self.publicipid = None
self.typeInfo['publicipid'] = 'string'
"""device ID of the root volume"""
self.rootdeviceid = None
self.typeInfo['rootdeviceid'] = 'long'
"""device type of the root volume"""
self.rootdevicetype = None
self.typeInfo['rootdevicetype'] = 'string'
"""the ID of the service offering of the virtual machine"""
self.serviceofferingid = None
self.typeInfo['serviceofferingid'] = 'string'
"""the name of the service offering of the virtu
|
al machine"""
self.serviceofferingname = None
self.typeInfo['serviceofferingname'] = 'string'
"""State of the Service from LB rule"""
self.servicestate = None
|
self.typeInfo['servicestate'] = 'string'
"""the state of the virtual machine"""
self.state = None
self.typeInfo['state'] = 'string'
"""an alternate display text of the template for the virtual machine"""
self.templatedisplaytext = None
self.typeInfo['templatedisplaytext'] = 'string'
"""the ID of the template for the virtual machine. A -1 is returned if the virtual machine was created from an ISO file."""
self.templateid = None
self.typeInfo['templateid'] = 'string'
"""the name of the template for the virtual machine"""
self.templatename = None
self.typeInfo['templatename'] = 'string'
"""the user's ID who deployed the virtual machine"""
self.userid = None
self.typeInfo['userid'] = 'string'
"""the user's name who deployed the virtual machine"""
self.username = None
self.typeInfo['username'] = 'string'
"""the vgpu type used by the virtual machine"""
self.vgpu = None
self.typeInfo['vgpu'] = 'string'
"""the ID of the availablility zone for the virtual machi
|
5monkeys/blues
|
blues/java.py
|
Python
|
mit
| 1,149
| 0.001741
|
"""
Java
====
Installs Java, currently restricted to version 7.
**Fabric environment:**
.. code-block:: yaml
blueprints:
|
- blues.java
"""
from fabric.decorators import task
from refabric.api import run, info
from refabric.context_managers import sudo
from . import debian
__all__ = ['setup']
@task
def setup():
"""
Install Java
"""
install()
def install():
with sudo():
lbs_release = debian.lbs_release()
if lbs_release == '12.04':
debian.add_apt_ppa('webupd8team/java')
debian.debconf_set_selections('shared/accepted-oracle-license-v1-1 sele
|
ct true',
'shared/accepted-oracle-license-v1-1 seen true')
package = 'oracle-java7-installer'
elif lbs_release >= '16.04':
package = 'default-jdk'
elif lbs_release >= '14.04':
package = 'openjdk-7-jdk'
else:
package = 'java7-jdk'
if package != 'default-jdk':
info('Install Java 7 JDK')
else:
info('Install default Java JDK')
debian.apt_get('install', package)
|
apache/allura
|
Allura/allura/tests/test_tasks.py
|
Python
|
apache-2.0
| 28,836
| 0.001149
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import operator
import shutil
import sys
import unittest
import six
from base64 import b64encode
import logging
import pkg_resources
import tg
import mock
from tg import tmpl_context as c, app_globals as g
from datadiff.tools import assert_equal
from nose.tools import assert_in, assert_less, assert_less_equal
from ming.orm import FieldProperty, Mapper
from ming.orm import ThreadLocalORMSession
from testfixtures import LogCapture
from alluratest.controller import setup_basic_test, setup_global_objects, TestController
from allura import model as M
from allura.command.taskd import TaskdCommand
from allura.lib import helpers as h
from allura.lib import search
from allura.lib.exceptions import CompoundError
from allura.lib.mail_util import MAX_MAIL_LINE_OCTETS
from allura.tasks import event_tasks
from allura.tasks import index_tasks
from allura.tasks import mail_tasks
from allura.tasks import notification_tasks
from allura.tasks import repo_tasks
from allura.tasks import export_tasks
from allura.tasks import admin_tasks
from allura.tests import decorators as td
from allura.lib.decorators import event_handler, task
class TestRepoTasks(unittest.TestCase):
@mock.patch('allura.tasks.repo_tasks.c.app')
@mock.patch('allura.tasks.repo_tasks.g.post_event')
def test_clone_posts_event_on_failure(self, post_event, app):
fake_source_url = 'fake_source_url'
fake_traceback = 'fake_traceback'
app.repo.init_as_clone.side_effect = Exception(fake_traceback)
repo_tasks.clone(None, None, fake_source_url)
assert_equal(post_event.call_args[0][0], 'repo_clone_task_failed')
assert_equal(post_event.call_args[0][1], fake_source_url)
assert_equal(post_event.call_args[0][2], None)
# ignore args[3] which is a traceback string
@mock.patch('allura.tasks.repo_tasks.session', autospec=True)
@mock.patch.object(M, 'MergeRequest')
def test_merge(self, MR, session):
mr = mock.Mock(_id='_id',
activity_name='merge req', activity_url='/fake/url', activity_extras={}, node_id=None)
MR.query.get.return_value = mr
repo_tasks.merge(mr._id)
mr.app.repo.merge.assert_called_once_with(mr)
assert_equal(mr.status, 'merged')
session.assert_called_once_with(mr)
session.return_value.flush.assert_called_once_with(mr)
@mock.patch.object(M, 'MergeRequest')
def test_can_merge(self, MR):
mr = M.MergeRequest(_id='_id')
MR.query.get.return_value = mr
repo_tasks.can_merge(mr._id)
mr.app.repo.can_merge.assert_called_once_with(mr)
val = mr.app.repo.can_merge.return_value
mr.set_can_merge_cache.assert_called_once_with(val)
# used in test_post_event_from_within_task below
@task
def _task_that_creates_event(event_name,):
g.post_event(event_name)
# event does not get flushed to db right away (at end of task, ming middleware will flush it)
assert not M.MonQTask.query.get(task_name='allura.tasks.event_tasks.event', args=[event_name])
class TestEventTasks(unittest.TestCase):
def setUp(self):
setup_basic_test()
setup_global_objects()
self.called_with = []
def test_fire_event(self):
event_tasks.event('my_event', self, 1, 2, a=5)
assert self.called_with == [((1, 2), {'a': 5})], self.called_with
def test_post_event_explicit_flush(self):
g.post_event('my_event1', flush_immediately=True)
assert M.MonQTask.query.get(task_name='allura.tasks.event_tasks.event', args=['my_event1'])
g.post_event('my_event2', flush_immediately=False)
assert not M.MonQTask.query.get(task_name='allura.tasks.event_tasks.event', args=['my_event2'])
ThreadLocalORMSession.flush_all()
assert M.MonQTask.query.get(task_name='allura.tasks.event_tasks.event', args=['my_event2'])
def test_post_event_from_script(self):
# simulate post_event being called from a paster script command:
with mock.patch.dict(tg.request.environ, PATH_INFO='--script--'):
g.post_event('my_event3')
# event task is flushed to db right away:
assert M.MonQTask.query.get(task_name='allura.tasks.event_tasks.event', args=['my_event3'])
def test_post_event_from_within_task(self):
# instead of M.MonQTask.run_ready() run real 'taskd' so we get all the setup we need
taskd = TaskdCommand('taskd')
taskd.parse_args([pkg_resources.resource_filename('allura', '../test.ini')])
taskd.keep_running = True
taskd.restart_when_done = False
_task_that_creates_event.post('my_event4')
with mock.patch('allura.command.taskd.setproctitle') as setproctitle:
def stop_taskd_after_this_task(*args):
taskd.keep_running = False
setproctitle.side_effect = stop_taskd_after_this_task # avoid proc title change; useful hook to stop taskd
taskd.worker()
# after the initial task is done,
|
the event task has been persisted:
assert M.MonQTask.query.get(task_name='allura.tasks.event_tasks.event', args=['my_event4'])
def test_compound_error(self):
t = raise_exc.post()
with LogCapture(level=logging.ERROR) as l, \
mock.patch.dict(tg.config, {'monq.raise_errors': False}): # match normal non-test behavior
t()
# l.check() would be nice, but string is too detailed to check
|
assert_equal(l.records[0].name, 'allura.model.monq_model')
msg = l.records[0].getMessage()
assert_in("AssertionError('assert 0'", msg)
assert_in("AssertionError('assert 5'", msg)
assert_in(' on job <MonQTask ', msg)
assert_in(' (error) P:10 allura.tests.test_tasks.raise_exc ', msg)
for x in range(10):
assert ('assert %d' % x) in t.result
class TestIndexTasks(unittest.TestCase):
def setUp(self):
setup_basic_test()
setup_global_objects()
def test_add_projects(self):
g.solr.db.clear()
old_solr_size = len(g.solr.db)
projects = M.Project.query.find().all()
index_tasks.add_projects.post([p._id for p in projects])
M.MonQTask.run_ready()
new_solr_size = len(g.solr.db)
assert old_solr_size + len(projects) == new_solr_size
@td.with_wiki
def test_del_projects(self):
projects = M.Project.query.find().all()
index_tasks.add_projects([p._id for p in projects])
with mock.patch('allura.tasks.index_tasks.g.solr') as solr:
index_tasks.del_projects([p.index_id() for p in projects])
assert solr.delete.call_count, 1
for project in projects:
assert project.index_id() in solr.delete.call_args[1]['q']
@td.with_wiki
def test_add_artifacts(self):
from allura.lib.search import find_shortlinks
with mock.patch('allura.lib.search.find_shortlinks') as find_slinks:
find_slinks.side_effect = lambda s: find_shortlinks(s)
old_shortlinks = M.Shortlink.query.find().count()
old_solr_size = len(g.solr.db)
artifacts = [_TestArtifact() for x in range(5)]
for i, a in enumerate(artifacts):
a._shorthand_id = 't%d' % i
|
Conedy/Conedy
|
testing/network/expected/sum_setDirected.py
|
Python
|
gpl-2.0
| 76
| 0
|
00000 0
|
output/setDirected.py.err
13678 1 output/setDirec
|
ted.py.out
|
furious-luke/django-ajax
|
django_ajax/decorators.py
|
Python
|
mit
| 2,485
| 0.000805
|
"""
Decorators
"""
from __future__ import unicode_literals
from functools import wraps
from django.http import HttpResponseBadRequest
from django.utils.decorators import available_attrs
from django_ajax.shortcuts import render_to_json
def ajax(function=None, mandatory=True, **ajax_kwargs):
"""
Decorator who guesses the user response type and translates to a serialized
JSON response. Usage::
@ajax
def my_view(request):
do_something()
# will send {'status': 200, 'statusText': 'OK', 'content': null}
@ajax
def my_view(request):
return {'key': 'value'}
# will send {'status': 200, 'statusText': 'OK',
'content': {'key': 'value'}}
@ajax
def my_view(request):
return HttpResponse('<h1>Hi!</h1>')
# will send {'status': 200, 'statusText': 'OK',
'content': '<h1>Hi!</h1>'}
@ajax
def my_view(request):
return redirect('home')
# will send {'status': 302, 'statusText': 'FOUND', 'content': '/'}
# combination with others d
|
ecorators:
@ajax
@login_required
@require_POST
def my_view(request):
pass
# if request user is not authenticated then the @login_required
# decorator redirect to
|
login page.
# will send {'status': 302, 'statusText': 'FOUND',
'content': '/login'}
# if request method is 'GET' then the @require_POST decorator return
# a HttpResponseNotAllowed response.
# will send {'status': 405, 'statusText': 'METHOD NOT ALLOWED',
'content': null}
"""
def decorator(func):
@wraps(func, assigned=available_attrs(func))
def inner(request, *args, **kwargs):
if mandatory and not request.is_ajax():
return HttpResponseBadRequest()
if request.is_ajax():
# return json response
try:
return render_to_json(func(request, *args, **kwargs), **ajax_kwargs)
except Exception as exception:
return render_to_json(exception)
else:
# return standard response
return func(request, *args, **kwargs)
return inner
if function:
return decorator(function)
return decorator
|
googleapis/python-dialogflow
|
samples/generated_samples/dialogflow_v2beta1_generated_entity_types_delete_entity_type_sync.py
|
Python
|
apache-2.0
| 1,440
| 0.000694
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for DeleteEntityType
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package
|
dependency, execute the following:
# python3 -m pip install google-cloud-dialogflow
# [START dialogflow_v2beta1_generated_EntityTypes_DeleteEntityType_sync]
from google.cloud import dialogflow_v2beta1
def sample_delete_entity_type():
# Create a client
client = dialogflow_v2beta1.EntityTypesClient()
# Initialize request argument(s)
request = dialogflow_v2beta1.DeleteEntityTypeRequest(
name="name_value",
)
# Make the request
client.delete_entity
|
_type(request=request)
# [END dialogflow_v2beta1_generated_EntityTypes_DeleteEntityType_sync]
|
rohitwaghchaure/frappe
|
frappe/email/queue.py
|
Python
|
mit
| 16,756
| 0.028408
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import HTMLParser
import smtplib, quopri
from frappe import msgprint, throw, _
from frappe.email.smtp import SMTPServer, get_outgoing_email_account
from frappe.email.email_body import get_email, get_formatted_html
from frappe.utils.verified_command import get_signed_params, verify_request
from html2text import html2text
from frappe.utils import get_url, nowdate, encode, now_datetime, add_days, split_emails, cstr, cint
from rq.timeouts import JobTimeoutException
from frappe.utils.scheduler import log
class EmailLimitCrossedError(frappe.ValidationError): pass
def send(recipients=None, sender=None, subject=None, message=None, reference_doctype=None,
reference_name=None, unsubscribe_method=None, unsubscribe_params=None, unsubscribe_message=None,
attachments=None, reply_to=None, cc=[], message_id=None, in_reply_to=None, send_after=None,
expose_recipients=None, send_priority=1, communication=None, now=False, read_receipt=None,
queue_separately=False, is_notification=False, add_unsubscribe_link=1):
"""
|
Add email to sending queue (Email Queue)
:param recipients: List of recipients.
:param sender: Email sender.
:param subject: Email subject.
:param message: Email message.
:param reference_doctype: Reference DocType of caller document.
:param reference_name: Reference name of caller document.
:param send_priority: Priority for Email Queue, default 1.
:param unsubscribe_method: URL method for unsubscribe. Default is `/api/method/frappe.email.queue.unsubscribe`.
:param unsub
|
scribe_params: additional params for unsubscribed links. default are name, doctype, email
:param attachments: Attachments to be sent.
:param reply_to: Reply to be captured here (default inbox)
:param in_reply_to: Used to send the Message-Id of a received email back as In-Reply-To.
:param send_after: Send this email after the given datetime. If value is in integer, then `send_after` will be the automatically set to no of days from current date.
:param communication: Communication link to be set in Email Queue record
:param now: Send immediately (don't send in the background)
:param queue_separately: Queue each email separately
:param is_notification: Marks email as notification so will not trigger notifications from system
:param add_unsubscribe_link: Send unsubscribe link in the footer of the Email, default 1.
"""
if not unsubscribe_method:
unsubscribe_method = "/api/method/frappe.email.queue.unsubscribe"
if not recipients and not cc:
return
if isinstance(recipients, basestring):
recipients = split_emails(recipients)
if isinstance(cc, basestring):
cc = split_emails(cc)
if isinstance(send_after, int):
send_after = add_days(nowdate(), send_after)
email_account = get_outgoing_email_account(True, append_to=reference_doctype)
if not sender or sender == "Administrator":
sender = email_account.default_sender
check_email_limit(recipients)
formatted = get_formatted_html(subject, message, email_account=email_account)
try:
text_content = html2text(formatted)
except HTMLParser.HTMLParseError:
text_content = "See html attachment"
if reference_doctype and reference_name:
unsubscribed = [d.email for d in frappe.db.get_all("Email Unsubscribe", "email",
{"reference_doctype": reference_doctype, "reference_name": reference_name})]
unsubscribed += [d.email for d in frappe.db.get_all("Email Unsubscribe", "email",
{"global_unsubscribe": 1})]
else:
unsubscribed = []
recipients = [r for r in list(set(recipients)) if r and r not in unsubscribed]
email_content = formatted
email_text_context = text_content
if add_unsubscribe_link and reference_doctype and (unsubscribe_message or reference_doctype=="Newsletter") and add_unsubscribe_link==1:
unsubscribe_link = get_unsubscribe_message(unsubscribe_message, expose_recipients)
email_content = email_content.replace("<!--unsubscribe link here-->", unsubscribe_link.html)
email_text_context += unsubscribe_link.text
# add to queue
add(recipients, sender, subject,
formatted=email_content,
text_content=email_text_context,
reference_doctype=reference_doctype,
reference_name=reference_name,
attachments=attachments,
reply_to=reply_to,
cc=cc,
message_id=message_id,
in_reply_to=in_reply_to,
send_after=send_after,
send_priority=send_priority,
email_account=email_account,
communication=communication,
add_unsubscribe_link=add_unsubscribe_link,
unsubscribe_method=unsubscribe_method,
unsubscribe_params=unsubscribe_params,
expose_recipients=expose_recipients,
read_receipt=read_receipt,
queue_separately=queue_separately,
is_notification = is_notification,
now=now)
def add(recipients, sender, subject, **kwargs):
"""Add to Email Queue"""
if kwargs.get('queue_separately') or len(recipients) > 20:
email_queue = None
for r in recipients:
if not email_queue:
email_queue = get_email_queue([r], sender, subject, **kwargs)
if kwargs.get('now'):
email_queue(email_queue.name, now=True)
else:
duplicate = email_queue.get_duplicate([r])
duplicate.insert(ignore_permissions=True)
if kwargs.get('now'):
send_one(duplicate.name, now=True)
frappe.db.commit()
else:
email_queue = get_email_queue(recipients, sender, subject, **kwargs)
if kwargs.get('now'):
send_one(email_queue.name, now=True)
def get_email_queue(recipients, sender, subject, **kwargs):
'''Make Email Queue object'''
e = frappe.new_doc('Email Queue')
e.priority = kwargs.get('send_priority')
try:
mail = get_email(recipients,
sender=sender,
subject=subject,
formatted=kwargs.get('formatted'),
text_content=kwargs.get('text_content'),
attachments=kwargs.get('attachments'),
reply_to=kwargs.get('reply_to'),
cc=kwargs.get('cc'),
email_account=kwargs.get('email_account'),
expose_recipients=kwargs.get('expose_recipients'))
mail.set_message_id(kwargs.get('message_id'),kwargs.get('is_notification'))
if kwargs.get('read_receipt'):
mail.msg_root["Disposition-Notification-To"] = sender
if kwargs.get('in_reply_to'):
mail.set_in_reply_to(kwargs.get('in_reply_to'))
e.message_id = mail.msg_root["Message-Id"].strip(" <>")
e.message = cstr(mail.as_string())
e.sender = mail.sender
except frappe.InvalidEmailAddressError:
# bad Email Address - don't add to queue
frappe.log_error('Invalid Email ID Sender: {0}, Recipients: {1}'.format(mail.sender,
', '.join(mail.recipients)), 'Email Not Sent')
e.set_recipients(recipients + kwargs.get('cc', []))
e.reference_doctype = kwargs.get('reference_doctype')
e.reference_name = kwargs.get('reference_name')
e.add_unsubscribe_link = kwargs.get("add_unsubscribe_link")
e.unsubscribe_method = kwargs.get('unsubscribe_method')
e.unsubscribe_params = kwargs.get('unsubscribe_params')
e.expose_recipients = kwargs.get('expose_recipients')
e.communication = kwargs.get('communication')
e.send_after = kwargs.get('send_after')
e.show_as_cc = ",".join(kwargs.get('cc', []))
e.insert(ignore_permissions=True)
return e
def check_email_limit(recipients):
# if using settings from site_config.json, check email limit
# No limit for own email settings
smtp_server = SMTPServer()
if (smtp_server.email_account
and getattr(smtp_server.email_account, "from_site_config", False)
or frappe.flags.in_test):
monthly_email_limit = frappe.conf.get('limits', {}).get('emails')
if frappe.flags.in_test:
monthly_email_limit = 500
if not monthly_email_limit:
return
# get count of mails sent this month
this_month = get_emails_sent_this_month()
if (this_month + len(recipients)) > monthly_email_limit:
throw(_("Cannot send this email. You have crossed the sending limit of {0} emails for this month.").format(monthly_email_limit),
EmailLimitCrossedError)
def get_emails_sent_this_month():
return frappe.db.sql("""select count(name) from `tabEmail Queue` where
status='Sent' and MONTH(creation)=MONTH(CURDATE())""")[0][0]
def get_unsubscribe_message(unsubscribe_message, expose_recipients):
if not unsubs
|
arenadata/ambari
|
ambari-server/src/main/resources/scripts/configs.py
|
Python
|
apache-2.0
| 13,921
| 0.014726
|
#!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import optparse
from optparse import OptionGroup
import sys
import urllib2
import time
import json
import base64
import xml
import xml.etree.ElementTree as ET
import os
import logging
logger = logging.getLogger('AmbariConfig')
HTTP_PROTOCOL = 'http'
HTTPS_PROTOCOL = 'https'
SET_ACTION = 'set'
GET_ACTION = 'get'
DELETE_ACTION = 'delete'
GET_REQUEST_TYPE = 'GET'
PUT_REQUEST_TYPE = 'PUT'
# JSON Keywords
PROPERTIES = 'properties'
ATTRIBUTES = 'properties_attributes'
CLUSTERS = 'Clusters'
DESIRED_CONFIGS = 'desired_configs'
TYPE = 'type'
TAG = 'tag'
ITEMS = 'items'
TAG_PREFIX = 'version'
CLUSTERS_URL = '/api/v1/clusters/{0}'
DESIRED_CONFIGS_URL = CLUSTERS_URL + '?fields=Clusters/desired_configs'
CONFIGURATION_URL = CLUSTERS_URL + '/configurations?type={1}&tag={2}'
FILE_FORMAT = \
"""
"properties": {
"key1": "value1"
"key2": "value2"
},
"properties_attributes": {
"attribute": {
"key1": "value1"
"key2": "value2"
}
}
"""
class UsageException(Exception):
pass
def api_accessor(host, login, password, protocol, port):
def do_request(api_url, request_type=GET_REQUEST_TYPE, request_body=''):
try:
url = '{0}://{1}:{2}{3}'.format(protocol, host, port, api_url)
admin_auth = base64.encodestring('%s:%s' % (login, password)).replace('\n', '')
request = urllib2.Request(url)
request.add_header('Authorization', 'Basic %s' % admin_auth)
request.add_header('X-Requested-By', 'ambari')
request.add_data(request_body)
request.get_method = lambda: request_type
response = urllib2.urlopen(request)
response_body = response.read()
except Exception as exc:
raise Exception('Problem with accessing api. Reason: {0}'.format(exc))
return response_body
return do_request
def get_config_tag(cluster, config_type, accessor):
response = accessor(DESIRED_CONFIGS_URL.format(cluster))
try:
desired_tags = json.loads(response)
current_config_tag = desired_tags[CLUSTERS][DESIRED_CONFIGS][config_type][TAG]
except Exception as exc:
raise Exception('"{0}" not found in server response. Response:\n{1}'.format(config_type, response))
return current_config_tag
def create_new_desired_config(cluster, config_type, properties, attributes, accessor):
new_tag = TAG_PREFIX + str(int(time.time() * 1000000))
new_config = {
CLUSTERS: {
DESIRED_CONFIGS: {
TYPE: config_type,
TAG: new_tag,
PROPERTIES: properties
}
}
}
if len(attributes.keys()) > 0:
new_config[CLUSTERS][DESIRED_CONFIGS][ATTRIBUTES] = attributes
request_body = json.dumps(new_config)
new_file = 'doSet_{0}.json'.format(new_tag)
logger.info('### PUTting json into: {0}'.format(new_file))
output_to_file(new_file)(new_config)
accessor(CLUSTERS_URL.format(cluster), PUT_REQUEST_TYPE, request_body)
logger.info('### NEW Site:{0}, Tag:{1}'.format(config_type, new_tag))
def get_current_config(cluster, config_type, accessor):
config_tag = get_config_tag(cluster, config_type, accessor)
logger.info("### on (Site:{0}, Tag:{1})".format(config_type, config_tag))
response = accessor(CONFIGURATION_URL.format(cluster, config_type, config_tag))
config_by_tag = json.loads(response)
current_config = config_by_tag[ITEMS][0]
return current_config[PROPERTIES], current_config.get(ATTRIBUTES, {})
def update_config(cluster, config_type, config_updater, accessor):
properties, attributes = config_updater(cluster, config_type, accessor)
create_new_desired_config(cluster, config_type, properties, attributes, accessor)
def update_specific_property(config_name, config_value):
def update(cluster, config_type, accessor):
properties, attributes = get_current_config(cluster, config_type, accessor)
properties[config_name] = config_value
return properties, attributes
return update
def update_from_xml(config_file):
def update(cluster, config_type, accessor):
return read_xml_data_to_map(config_file)
return update
# Used DOM parser to read data into a map
def read_xml_data_to_map(path):
configurations = {}
properties_attributes = {}
tree = ET.parse(path)
root = tree.getroot()
for properties in root.getiterator('property'):
name = properties.find('name')
value = pr
|
operties.find('value')
final = properties.find('final')
if name != None:
name_text = name.text if name.text else ""
else:
logger.warn("No name is found for one of the properties in {0}, ignoring it".format(path))
|
continue
if value != None:
value_text = value.text if value.text else ""
else:
logger.warn("No value is found for \"{0}\" in {1}, using empty string for it".format(name_text, path))
value_text = ""
if final != None:
final_text = final.text if final.text else ""
properties_attributes[name_text] = final_text
configurations[name_text] = value_text
return configurations, {"final" : properties_attributes}
def update_from_file(config_file):
def update(cluster, config_type, accessor):
try:
with open(config_file) as in_file:
file_content = in_file.read()
except Exception as e:
raise Exception('Cannot find file "{0}" to PUT'.format(config_file))
try:
file_properties = json.loads(file_content)
except Exception as e:
raise Exception('File "{0}" should be in the following JSON format ("properties_attributes" is optional):\n{1}'.format(config_file, FILE_FORMAT))
new_properties = file_properties.get(PROPERTIES, {})
new_attributes = file_properties.get(ATTRIBUTES, {})
logger.info('### PUTting file: "{0}"'.format(config_file))
return new_properties, new_attributes
return update
def delete_specific_property(config_name):
def update(cluster, config_type, accessor):
properties, attributes = get_current_config(cluster, config_type, accessor)
properties.pop(config_name, None)
for attribute_values in attributes.values():
attribute_values.pop(config_name, None)
return properties, attributes
return update
def output_to_file(filename):
def output(config):
with open(filename, 'w') as out_file:
json.dump(config, out_file, indent=2)
return output
def output_to_console(config):
print json.dumps(config, indent=2)
def get_config(cluster, config_type, accessor, output):
properties, attributes = get_current_config(cluster, config_type, accessor)
config = {PROPERTIES: properties}
if len(attributes.keys()) > 0:
config[ATTRIBUTES] = attributes
output(config)
def set_properties(cluster, config_type, args, accessor):
logger.info('### Performing "set":')
if len(args) == 1:
config_file = args[0]
root, ext = os.path.splitext(config_file)
if ext == ".xml":
updater = update_from_xml(config_file)
elif ext == ".json":
updater = update_from_file(config_file)
else:
logger.error("File extension {0} doesn't supported".format(ext))
return -1
logger.info('### from file {0}'.format(config_file))
else:
config_name = args[0]
config_value = args[1]
updater = update_specific_property(config_name, config_value)
logger.info('### new property - "{0}":"{1}"'.format(config_name, config_value))
update_config(cluster, config_type, updater, accessor)
return 0
def delete_properties(cluster, config_type, args, accessor):
logger.info('### Performing "delete":')
if
|
peter1000/SpeedIT
|
SpeedIT/ProfileIT.py
|
Python
|
bsd-3-clause
| 7,430
| 0.012115
|
""" Profile module
"""
from operator import itemgetter
from cProfile import Profile
from SpeedIT.ProjectErr import Err
from SpeedIT.Utils import (
format_time,
get_table_rst_formatted_lines
)
def _profile_it(func, func_positional_arguments, func_keyword_arguments, name, profileit__max_slashes_fileinfo, profileit__repeat):
""" Returns a dictionary with the profile result: the function runs only once.
.. note:: excludes a couple of not relative functions/methods
- excludes: profiler.enable()
- exclude: profiler.disable()
- exclude: cProfile.Profile.runcall()
Args:
func (function):
func_positional_arguments (list): positional arguments for the function
func_keyword_arguments (dict): any keyword arguments for the function
name (str): the name used for the output `name` part
profileit__max_slashes_fileinfo (int): to adjust max path levels in the profile info
profileit__repeat (int): how often the function is repeated: the result will be the sum of all: similar to the code below
.. code-block:: python
for repeat in range(profileit__repeat):
profiler.enable()
profiler.runcall(func, *func_positional_arguments, **func_keyword_arguments)
profiler.disable()
Returns:
tuple: format: (summary_dict, table): table = list_of_dictionaries (sorted profile result lines dict)
"""
profiler = Profile()
for repeat in range(profileit__repeat):
profiler.enable()
profiler.runcall(func, *func_positional_arguments, **func_keyword_arguments)
profiler.disable()
profiler.create_stats()
total_calls = 0
primitive_calls = 0
total_time = 0
table = []
for func_tmp, (cc, nc, tt, ct, callers) in profiler.stats.items():
temp_dict = {
'number_of_calls': '{:,}'.format(cc) if cc == nc else '{:,}/{:,}'.format(cc, nc),
'func_time': tt, 'func_cumulative_time': ct
}
if func_tmp[0] == '~':
# exclude the profiler.enable()/disable() functions
if '_lsprof.Profiler' in func_tmp[2]:
continue
else:
temp_dict['func_txt'] = func_tmp[2]
else:
# exclude: cProfile.py runcall()
if func_tmp[2] == 'runcall':
if 'cProfile' in func_tmp[0]:
continue
# adjust path levels
temp_path_file_ect = func_tmp[0]
temp_slashes = temp_path_file_ect.count('/')
if temp_slashes > profileit__max_slashes_fileinfo:
temp_dict['func_txt'] = '{}:{}({})'.format(temp_path_file_ect.split('/', temp_slashes - profileit__max_slashes_fileinfo)[-1], func_tmp[1], func_tmp[2])
else:
temp_dict['func_txt'] = '{}:{}({})'.format(temp_path_file_ect, func_tmp[1], func_tmp[2])
table.append(temp_dict)
total_calls += nc
primitive_calls += cc
total_time += tt
if ("jprofile", 0, "profiler") in callers:
raise Err('ERROR NOT SURE WHAT To DO HERE: SEE pstate.py: get_top_level_stats()', func)
summary_dict = {
'name': name,
'total_calls': total_calls,
'primitive_calls': primitive_calls,
'total_time': total_time
}
return summary_dict, table
def speedit_profile(func_dict, use_func_name=True, output_in_sec=False, profileit__max_slashes_fileinfo=2, profileit__repeat=1):
""" Returns one txt string for: table format is conform with reStructuredText
Args:
func_dict (dict): mapping function names to functions
value format: tuple (function, list_of_positional_arguments, dictionary_of_keyword_arguments)
use_func_name (bool): if True the function name will be used in the output `name` if False the `func_dict key` will be used in the the output `name`
output_in_sec (int): if true the output is keep in seconds if false it is transformed to:
second (s)
millisecond (ms) One thousandth of one second
microsecond (µs) One millionth of one second
nanosecond (ns) One billionth of one second
profileit__max_slashes_fileinfo (int): to adjust max path levels in the profile info
profileit__repeat (int): how often the function is repeated: the result will be the sum of all: similar to the code below
.. code-block:: python
for repeat in range(profileit__repeat):
profiler.enable()
profiler.runcall(func, *func_positional_arguments, **func_keyword_arguments)
profiler.disable()
Returns:
str: ready to print or write to file: table format is conform with reStructuredText
- rank: starts with the part which takes the longest
- compare: % of the total execution time
- func_time: the total time spent in the given function (and excluding time made in calls to sub-
|
functions)
- number_of_calls: the number of calls
- func_txt: provides the respective data of each function
"""
all_final_lines = []
for func_name, (function_, func_positional_arguments, func_keyword_arguments) in sorted(func_dict.items()):
if use_func_name:
name = getattr(function_, "__name__", function_)
else:
name = func_name
summary_dict, table = _profile_it(function_, func_positional_arguments, func_keyword_argu
|
ments, name, profileit__max_slashes_fileinfo, profileit__repeat)
table = sorted(table, key=itemgetter('func_time'), reverse=True)
compare_reference = summary_dict['total_time']
if compare_reference == 0:
# add ranking ect...
for idx, dict_ in enumerate(table):
dict_['compare'] = 'TOO-FAST-NOT-MEASURED'
dict_['rank'] = '{:,}'.format(idx + 1)
if output_in_sec:
dict_['func_time'] = '{:.11f}'.format(dict_['func_time'])
else:
dict_['func_time'] = format_time(dict_['func_time'])
else:
# add ranking ect...
for idx, dict_ in enumerate(table):
dict_['compare'] = '{:,.3f}'.format((dict_['func_time'] * 100.0) / compare_reference)
dict_['rank'] = '{:,}'.format(idx + 1)
if output_in_sec:
dict_['func_time'] = '{:.11f}'.format(dict_['func_time'])
else:
dict_['func_time'] = format_time(dict_['func_time'])
header_mapping = [
('rank', 'rank'),
('compare %', 'compare'),
('func_time', 'func_time'),
('number_of_calls', 'number_of_calls'),
('func_txt', 'func_txt')
]
# add Title Summary
if output_in_sec:
title_line = '`ProfileIT` name: <{}> profileit__repeat: <{}> || total_calls: <{}> primitive_calls: <{}> total_time: <{:.11f}>'.format(summary_dict['name'], profileit__repeat, summary_dict['total_calls'], summary_dict['primitive_calls'], summary_dict['total_time'])
else:
title_line = '`ProfileIT` name: <{}> profileit__repeat: <{}> || total_calls: <{}> primitive_calls: <{}> total_time: <{}>'.format(summary_dict['name'], profileit__repeat, summary_dict['total_calls'], summary_dict['primitive_calls'], format_time(summary_dict['total_time']))
all_final_lines.extend(get_table_rst_formatted_lines(table, header_mapping, title_line))
all_final_lines.extend([
'',
'',
])
return '\n'.join(all_final_lines)
|
goddardl/gaffer
|
python/GafferSceneUI/SceneSwitchUI.py
|
Python
|
bsd-3-clause
| 2,393
| 0.009611
|
##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without spe
|
cific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PART
|
ICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
import GafferScene
Gaffer.Metadata.registerNode(
GafferScene.SceneSwitch,
"description",
"""
Chooses between multiple input scene, passing through the
chosen input to the output.
""",
plugs = {
"index" : [
"description",
"""
The index of the input which is passed through. A value
of 0 chooses the first input, 1 the second and so on. Values
larger than the number of available inputs wrap back around to
the beginning.
"""
]
}
)
GafferUI.PlugValueWidget.registerCreator( GafferScene.SceneSwitch, "in[0-9]*", None )
|
cloudify-cosmo/cloudify-plugins-common
|
cloudify/tests/test_context.py
|
Python
|
apache-2.0
| 26,770
| 0
|
########
# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
|
Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import logging
import mock
import sys
import os
import shutil
import tempfile
import unittest
from os.path import dirname
import testtools
from mock import patch, MagicMock
|
from cloudify_rest_client.exceptions import CloudifyClientError
from cloudify.utils import create_temp_folder
from cloudify.decorators import operation
from cloudify.manager import NodeInstance
from cloudify.workflows import local
from cloudify import constants, state, context, exceptions, conflict_handlers
import cloudify.tests as tests_path
from cloudify.test_utils import workflow_test
class CloudifyContextTest(testtools.TestCase):
file_server_process = None
@classmethod
def setUpClass(cls):
state.current_ctx.set(context.CloudifyContext({}), {})
resources_path = os.path.join(dirname(tests_path.__file__))
from cloudify.tests.file_server import FileServer
from cloudify.tests.file_server import PORT
cls.file_server_process = FileServer(resources_path)
cls.file_server_process.start()
os.environ[constants.MANAGER_FILE_SERVER_URL_KEY] = \
"http://localhost:{0}".format(PORT)
_, os.environ[constants.LOCAL_REST_CERT_FILE_KEY] = tempfile.mkstemp()
cls.context = context.CloudifyContext({
'blueprint_id': '',
'tenant': {'name': 'default_tenant'}
})
# the context logger will try to publish messages to rabbit, which is
# not available here. instead, we redirect the output to stdout.
cls.redirect_log_to_stdout(cls.context.logger)
@classmethod
def tearDownClass(cls):
cls.file_server_process.stop()
state.current_ctx.clear()
def setup_tenant_context(self):
self.context = context.CloudifyContext(
{'blueprint_id': 'test_blueprint',
'tenant': {'name': 'default_tenant'}})
self.redirect_log_to_stdout(self.context.logger)
@staticmethod
def redirect_log_to_stdout(logger):
stdout_log_handler = logging.StreamHandler(sys.stdout)
stdout_log_handler.setLevel(logging.DEBUG)
logger.handlers = [stdout_log_handler]
@mock.patch('cloudify.manager.get_rest_client', return_value=MagicMock())
def test_get_resource(self, _):
resource = self.context.get_resource(
resource_path='for_test_bp_resource.txt')
self.assertEquals(resource, 'Hello from test')
def test_get_deployment_resource_priority_over_blueprint_resource(self):
deployment_context_mock = MagicMock()
deployment_context_mock.id = 'dep1'
self.context.deployment = deployment_context_mock
resource = self.context.get_resource(resource_path='for_test.txt')
self.assertEquals(resource, 'belongs to dep1')
def test_get_deployment_resource_no_blueprint_resource(self):
deployment_context_mock = MagicMock()
deployment_context_mock.id = 'dep1'
self.context.deployment = deployment_context_mock
resource = self.context.get_resource(
resource_path='for_test_only_dep.txt')
self.assertEquals(resource, 'belongs to dep1')
@mock.patch('cloudify.manager.get_rest_client', return_value=MagicMock())
def test_download_resource(self, _):
resource_path = self.context.download_resource(
resource_path='for_test.txt')
self.assertIsNotNone(resource_path)
self.assertTrue(os.path.exists(resource_path))
@mock.patch('cloudify.manager.get_rest_client', return_value=MagicMock())
def test_download_blueprint_from_tenant(self, _):
self.setup_tenant_context()
resource_path = self.context.download_resource(
resource_path='blueprint.yaml')
self.assertIsNotNone(resource_path)
self.assertTrue(os.path.exists(resource_path))
@mock.patch('cloudify.manager.get_rest_client', return_value=MagicMock())
def test_download_resource_to_specific_file(self, _):
target_path = "{0}/for_test_custom.log".format(create_temp_folder())
resource_path = self.context.download_resource(
resource_path='for_test.txt',
target_path=target_path)
self.assertEqual(target_path, resource_path)
self.assertTrue(os.path.exists(resource_path))
@mock.patch('cloudify.manager.get_rest_client', return_value=MagicMock())
def test_download_resource_to_non_writable_location(self, _):
self.assertRaises(IOError, self.context.download_resource,
'for_test.txt',
'/non-existing-folder')
@mock.patch('cloudify.manager.get_rest_client', return_value=MagicMock())
def test_get_non_existing_resource(self, _):
self.assertRaises(exceptions.HttpException, self.context.get_resource,
'non_existing.log')
def test_ctx_instance_in_relationship(self):
ctx = context.CloudifyContext({
'node_id': 'node-instance-id',
'related': {
'node_id': 'related-instance-id',
'is_target': True
},
'relationships': ['related-instance-id']
})
self.assertEqual('node-instance-id', ctx.source.instance.id)
self.assertEqual('related-instance-id', ctx.target.instance.id)
e = self.assertRaises(exceptions.NonRecoverableError,
lambda: ctx.node)
self.assertIn('ctx.node/ctx.instance can only be used in a '
'node-instance context but used in a '
'relationship-instance context.', str(e))
e = self.assertRaises(exceptions.NonRecoverableError,
lambda: ctx.instance)
self.assertIn('ctx.node/ctx.instance can only be used in a '
'node-instance context but used in a '
'relationship-instance context.', str(e))
def test_source_target_not_in_relationship(self):
ctx = context.CloudifyContext({})
e = self.assertRaises(exceptions.NonRecoverableError,
lambda: ctx.source)
self.assertIn('ctx.source/ctx.target can only be used in a '
'relationship-instance context but used in a '
'deployment context.', str(e))
e = self.assertRaises(exceptions.NonRecoverableError,
lambda: ctx.target)
self.assertIn('ctx.source/ctx.target can only be used in a '
'relationship-instance context but used in a '
'deployment context.', str(e))
def test_ctx_type(self):
ctx = context.CloudifyContext({})
self.assertEqual(constants.DEPLOYMENT, ctx.type)
ctx = context.CloudifyContext({'node_id': 'node-instance-id'})
self.assertEqual(constants.NODE_INSTANCE, ctx.type)
ctx = context.CloudifyContext({
'node_id': 'node-instance-id',
'related': {
'node_id': 'related-instance-id',
'is_target': True
},
'relationships': ['related-instance-id']
})
self.assertEqual(constants.RELATIONSHIP_INSTANCE, ctx.type)
class NodeContextTests(testtools.TestCase):
test_blueprint_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"resources/blueprints/test-context-node.yaml")
@workflow_test(blueprint_path=test_blueprint_path,
resources_to_copy=[
'r
|
yeleman/snisi
|
snisi_maint/management/commands/entities_to_cascades.py
|
Python
|
mit
| 2,414
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
from __future__ import (unicode_literals, absolute_import,
division, print_function)
import logging
from django.core.management.base import BaseCommand
from optparse import make_option
from py3compat import PY2
from snisi_core.models.Entities import AdministrativeEntity as AEntity
if PY2:
import unicodecsv as csv
else:
import csv
logger = logging.getLogger(__name__)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-f',
help='CSV file',
action='store',
dest='filename'),
)
def handle(self, *args, **options):
headers = ['name', 'region', 'cercle_commune', 'commune_quartier']
f = open(options.get('filename'), 'w')
csv_writer = csv.DictWriter(f, fieldnames=headers)
csv_writer.writeheader()
csv_writer.writerow({
'name': "label",
'region': "Région",
'cercle_commune': "Cercle",
'commune_quartier': "Commune",
})
for region in AEntity.objects.filter(type__slug='region'):
logger.info(region)
|
is_bko = region.name == 'BAMAKO'
for cercle in AEntity.objects.filter(parent=region):
logger.info(cercle)
for commune in AEntity.objects.filter(parent=cercle):
logger.info(commune)
if not is_bko:
csv_writer.writerow({
'name': "choice_label",
'region': region.name,
|
'cercle_commune': cercle.name,
'commune_quartier': commune.name
})
continue
for vfq in AEntity.objects.filter(parent=commune):
for v in (region, cercle, commune, vfq):
if not len(v.name.strip()):
continue
csv_writer.writerow({
'name': "choice_label",
'region': region.name,
'cercle_commune': commune.name,
'commune_quartier': vfq.name
})
f.close()
|
joergullrich/virl-lab
|
library/ntc_reboot.py
|
Python
|
gpl-3.0
| 6,997
| 0.001572
|
#!/usr/bin/env python
# Copyright 2015 Jason Edelman <jason@networktocode.com>
# Network to Code, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DOCUMENTATION = '''
---
module: ntc_reboot
short_description: Reboot a network device.
description:
- Reboot a network device, optionally on a timer.
- Supported platforms include Cisco Nexus switches with NX-API, Cisco IOS switches or routers, Arista switches with eAPI.
Notes:
- The timer is only supported for IOS devices.
author: Jason Edelman (@jedelman8)
version_added: 1.9.2
requirements:
- pyntc
options:
platform:
description:
- Switch platform
required: true
choices: ['cisco_nxos_nxapi', 'arista_eos_eapi', 'cisco_ios']
timer:
description:
- Time in minutes after which the device will be rebooted.
required: false
default: null
confirm:
description:
- Safeguard boolean. Set to true if you're sure you want to reboot.
required: false
default: false
host:
description:
- Hostame or IP address of switch.
required: true
username:
description:
- Username used to login to the target device
required: true
password:
description:
- Password used to login to the target device
required: true
secret:
description:
- Enable secret for devices connecting over SSH.
required: false
transport:
description:
- Transport protocol for API-based devices.
required: false
default: https
choices: ['http', 'https']
port:
description:
- TCP/UDP port to connect to target device. If omitted standard port numbers will be used.
80 for HTTP; 443 for HTTPS; 22 for SSH.
required: false
default: null
ntc_host:
description:
- The name of a host as specified in an NTC configuration file.
required: false
default: null
ntc_conf_file:
description:
- The path to a local NTC configuration file. If omitted, and ntc_host is specified,
the system will look for a file given by the path in the environment variable PYNTC_CONF,
and then in the users home directory for a file called .ntc.conf.
required: false
default: null
'''
EXAMPLES = '''
- ntc_reboot:
platform: cisco_nxos_nxapi
confirm: true
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
transport: http
- ntc_reboot:
ntc_host: n9k1
ntc_conf_file: .ntc.conf
confirm: true
- ntc_file_copy:
platform: arista_eos_eapi
confirm: true
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
- ntc_file_copy:
platform: cisco_ios
confirm: true
timer: 5
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
secret: "{{ secret }}"
'''
RETURN = '''
rebooted:
description: Whether the device was instructed to reboot.
returned: success
type: boolean
sample: true
'''
try:
HAS_PYNTC = True
from pyntc import ntc_device, ntc_device_by_name
except ImportError:
HAS_PYNTC = False
PLATFORM_NXAPI = 'cisco_nxos_nxapi'
PLATFORM_IOS = 'cisco_ios_ssh'
PLATFORM_EAPI = 'arista_eos_eapi'
PLATFORM_JUNOS = 'juniper_junos_netconf'
def main():
module = AnsibleModule(
argument_spec=dict(
platform=dict(choices=[PLATFORM_NXAPI, PLATFORM_IOS, PLATFORM_EAPI, PLATFORM_JUNOS],
required=False),
host=dict(required=False),
username=dict(required=False, type='str'),
password=dict(required=False, type='str'),
secret=dict(required=False),
transport=dict(required=False, choices=['http', 'https']),
port=dict(required=False, type='int'),
ntc_host=dict(required=False),
ntc_conf_file=dict(required=False),
confirm=dict(required=False, default=False, type='bool', choices=BOOLEANS),
timer=dict(requred=False, type='int'),
),
mutually_exclusive=[['host', 'ntc_host'],
['ntc_host', 'secret'],
['ntc_host', 'transport'],
['ntc_host', 'port'],
['ntc_conf_file', 'secret'],
['ntc_conf_file', 'transport'],
['ntc_conf_file', 'port'],
],
required_one_of=[['host', 'ntc_host']],
required_together=[[
|
'host', 'username', 'password', 'platform']],
supports_check_mode=False
)
if not HAS_PYNTC:
module.fail_json(msg='pyntc Pyth
|
on library not found.')
platform = module.params['platform']
host = module.params['host']
username = module.params['username']
password = module.params['password']
ntc_host = module.params['ntc_host']
ntc_conf_file = module.params['ntc_conf_file']
transport = module.params['transport']
port = module.params['port']
secret = module.params['secret']
if ntc_host is not None:
device = ntc_device_by_name(ntc_host, ntc_conf_file)
else:
kwargs = {}
if transport is not None:
kwargs['transport'] = transport
if port is not None:
kwargs['port'] = port
if secret is not None:
kwargs['secret'] = secret
device_type = platform
device = ntc_device(device_type, host, username, password, **kwargs)
confirm = module.params['confirm']
timer = module.params['timer']
if not confirm:
module.fail_json(msg='confirm must be set to true for this module to work.')
supported_timer_platforms = [PLATFORM_IOS, PLATFORM_JUNOS]
if timer is not None \
and device.device_type not in supported_timer_platforms:
module.fail_json(msg='Timer parameter not supported on platform %s.' % platform)
device.open()
changed = False
rebooted = False
if timer is not None:
device.reboot(confirm=True, timer=timer)
else:
device.reboot(confirm=True)
changed = True
rebooted = True
device.close()
module.exit_json(changed=changed, rebooted=rebooted)
from ansible.module_utils.basic import *
main()
|
peterbe/headsupper
|
headsupper/base/migrations/0001_initial.py
|
Python
|
mpl-2.0
| 2,093
| 0.002389
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migr
|
ations
import django.utils.timezone
import jsonfield.fields
from django.conf imp
|
ort settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Payload',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('payload', jsonfield.fields.JSONField()),
('http_error', models.IntegerField()),
('messages', jsonfield.fields.JSONField()),
('date', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('github_full_name', models.CharField(max_length=200)),
('trigger_word', models.CharField(default=b'Headsup', max_length=100)),
('case_sensitive_trigger_word', models.BooleanField(default=False)),
('github_webhook_secret', models.CharField(max_length=100)),
('send_to', models.TextField()),
('send_cc', models.TextField(null=True, blank=True)),
('send_bcc', models.TextField(null=True, blank=True)),
('cc_commit_author', models.BooleanField(default=False)),
('on_tag_only', models.BooleanField(default=False)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('creator', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='payload',
name='project',
field=models.ForeignKey(to='base.Project', null=True),
),
]
|
blackmad/snippets
|
models/group.py
|
Python
|
apache-2.0
| 556
| 0.017986
|
from flask_sqlalchemy import SQLAlche
|
my
from sqlalchemy import Table, Column, Integer, ForeignKey
from sqlalchemy.orm import relationship
from models.base import Base, get_or_create
db = SQLAlchemy()
class Group(Base):
__tablename__ = 'group'
id = db.Column(db.String(50), unique=True, primary_key=True)
subscribers = db.relationship('GroupSubscription', back_populates='group')
members = db.relationship('GroupMembership', bac
|
k_populates='group')
def __init__(self, id):
self.id = id
@property
def name(self):
return self.id
|
ygenc/onlineLDA
|
wikirandom.py
|
Python
|
gpl-3.0
| 5,021
| 0.00478
|
# wikirandom.py: Functions for downloading random articles from Wikipedia
#
# Copyright (C) 2010 Matthew D. Hoffman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, urllib2, re, string, time, threading
def get_random_wikipedia_article():
"""
Downloads a randomly selected Wikipedia article (via
http://en.wikipedia.org/wiki/Special:Random) and strips out (most
of) the formatting, links, etc.
This function is a bit simpler and less robust than the code that
was used for the experiments in "Online VB for LDA."
"""
failed = True
while failed:
articletitle = None
failed = False
try:
req = urllib2.Request('http://en.wikipedia.org/wiki/Special:Random',
None, { 'User-Agent' : 'x'})
f = urllib2.urlopen(req)
while not articletitle:
line = f.readline()
result = re.search(r'title="Edit this page" href="/w/index.php\?title=(.*)\&action=edit" /\>', line)
if (result):
articletitle = result.group(1)
break
elif (len(line) < 1):
sys.exit(1)
req = urllib2.Request('http://en.wikipedia.org/w/index.php?title=Special:Export/%s&action=submit' \
% (articletitle),
None, { 'User-Agent' : 'x'})
f = urllib2.urlopen(req)
all = f.read()
except (urllib2.HTTPError, urllib2.URLError):
print 'oops. there was a failure downloading %s. retrying...' \
% articletitle
failed = True
continue
print 'downloaded %s. parsing...' % articletitle
try:
all = re.search(r'<text.*?>(.*)</text', all, flags=re.DOTALL).group(1)
all = re.sub(r'\n', ' ', all)
all = re.sub(r'\{\{.*?\}\}', r'', all)
all = re.sub(r'\[\[Category:.*', '', all)
|
all = re.sub(r'==\s*[Ss]ource\s*==.*', '', all)
all = re.sub(r'==\s*[Rr]eferences\s*==.*', '', all)
al
|
l = re.sub(r'==\s*[Ee]xternal [Ll]inks\s*==.*', '', all)
all = re.sub(r'==\s*[Ee]xternal [Ll]inks and [Rr]eferences==\s*', '', all)
all = re.sub(r'==\s*[Ss]ee [Aa]lso\s*==.*', '', all)
all = re.sub(r'http://[^\s]*', '', all)
all = re.sub(r'\[\[Image:.*?\]\]', '', all)
all = re.sub(r'Image:.*?\|', '', all)
all = re.sub(r'\[\[.*?\|*([^\|]*?)\]\]', r'\1', all)
all = re.sub(r'\<.*?>', '', all)
except:
# Something went wrong, try again. (This is bad coding practice.)
print 'oops. there was a failure parsing %s. retrying...' \
% articletitle
failed = True
continue
return(all, articletitle)
class WikiThread(threading.Thread):
articles = list()
articlenames = list()
lock = threading.Lock()
def run(self):
(article, articlename) = get_random_wikipedia_article()
WikiThread.lock.acquire()
WikiThread.articles.append(article)
WikiThread.articlenames.append(articlename)
WikiThread.lock.release()
def get_random_wikipedia_articles(n):
"""
Downloads n articles in parallel from Wikipedia and returns lists
of their names and contents. Much faster than calling
get_random_wikipedia_article() serially.
"""
maxthreads = 8
WikiThread.articles = list()
WikiThread.articlenames = list()
wtlist = list()
for i in range(0, n, maxthreads):
'''
YEGIN: commented out for test
'''
# print 'downloaded %d/%d articles...' % (i, n)
for j in range(i, min(i+maxthreads, n)):
wtlist.append(WikiThread())
wtlist[len(wtlist)-1].start()
for j in range(i, min(i+maxthreads, n)):
wtlist[j].join()
# '''
# YEGIN: added for test
# '''
# print WikiThread.articles
# print WikiThread.articlenames
return (WikiThread.articles, WikiThread.articlenames)
if __name__ == '__main__':
t0 = time.time()
(articles, articlenames) = get_random_wikipedia_articles(1)
for i in range(0, len(articles)):
print articlenames[i]
t1 = time.time()
print 'took %f' % (t1 - t0)
|
openstack/compute-hyperv
|
compute_hyperv/nova/conf.py
|
Python
|
apache-2.0
| 3,933
| 0
|
# Copyright 2017 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import nova.conf
hyperv_opts = [
cfg.IntOpt('evacuate_task_state_timeout',
default=600,
help='Number of seconds to wait for an instance to be '
'evacuated during host maintenance.'),
cfg.IntOpt('cluster_event_check_interval',
deprecated_for_removal=True,
deprecated_since="5.0.1",
default=2),
cfg.BoolOpt('instance_automatic_shutdown',
default=False,
help='Automatically shutdown instances when the host is '
'shutdown.
|
By default, instances will be saved, which '
'adds a disk overhead. Changing this option will not '
'affect existing instances.'),
cfg.IntOpt('instance_live_migration_timeout',
default=300,
min=0,
help='Number of seconds to wait for an instance to be '
|
'live migrated (Only applies to clustered instances '
'for the moment).'),
cfg.IntOpt('max_failover_count',
default=1,
min=1,
help="The maximum number of failovers that can occur in the "
"failover_period timeframe per VM. Once a VM's number "
"failover reaches this number, the VM will simply end up "
"in a Failed state."),
cfg.IntOpt('failover_period',
default=6,
min=1,
help="The number of hours in which the max_failover_count "
"number of failovers can occur."),
cfg.BoolOpt('recreate_ports_on_failover',
default=True,
help="When enabled, the ports will be recreated for failed "
"over instances. This ensures that we're not left with "
"a stale port."),
cfg.BoolOpt('auto_failback',
default=True,
help="Allow the VM the failback to its original host once it "
"is available."),
cfg.BoolOpt('force_destroy_instances',
default=False,
help="If this option is enabled, instance destroy requests "
"are executed immediately, regardless of instance "
"pending tasks. In some situations, the destroy "
"operation will fail (e.g. due to file locks), "
"requiring subsequent retries."),
cfg.BoolOpt('move_disks_on_cold_migration',
default=True,
help="Move the instance files to the instance dir configured "
"on the destination host. You may consider disabling "
"this when using multiple CSVs or shares and you wish "
"the source location to be preserved."),
]
coordination_opts = [
cfg.StrOpt('backend_url',
default='file:///C:/OpenStack/Lock',
help='The backend URL to use for distributed coordination.'),
]
CONF = nova.conf.CONF
CONF.register_opts(coordination_opts, 'coordination')
CONF.register_opts(hyperv_opts, 'hyperv')
def list_opts():
return [('coordination', coordination_opts),
('hyperv', hyperv_opts)]
|
be-ndee/bubi-lang
|
tests/test_default_mapper.py
|
Python
|
mit
| 1,127
| 0
|
import unittest
import os
from bubi.mapper import DefaultMapper
class DefaultMapperTestCase(unittest.TestCase):
def setUp(self):
self.mapper = DefaultMapper(colorize=False)
self.color_mapper = DefaultMapper(colori
|
ze=True)
# test the map
|
method without color
def test_mapping(self):
self.assertEquals(self.mapper.map('a'), '+')
self.assertEquals(self.mapper.map('z'), '+')
self.assertEquals(self.mapper.map('0'), '-')
self.assertEquals(self.mapper.map('9'), '-')
self.assertEquals(self.mapper.map('\n'), '\n')
self.assertEquals(self.mapper.map('@'), '.')
# test the map method with color
def test_color_mapping(self):
self.assertEquals(self.color_mapper.map('a'), '\033[32m+\033[0m')
self.assertEquals(self.color_mapper.map('z'), '\033[32m+\033[0m')
self.assertEquals(self.color_mapper.map('0'), '\033[31m-\033[0m')
self.assertEquals(self.color_mapper.map('9'), '\033[31m-\033[0m')
self.assertEquals(self.color_mapper.map('\n'), '\n')
self.assertEquals(self.color_mapper.map('@'), '.')
|
vasiliykochergin/euca2ools
|
euca2ools/commands/iam/listaccountpolicies.py
|
Python
|
bsd-2-clause
| 3,688
| 0
|
# Copyright 2009-2015 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BU
|
SINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from requestbuilder import Arg
from requestbuilder.response import PaginatedResponse
from euca2ools.commands.iam import IAMRequest, arg_account_name
from euca2ools.commands.iam.getaccountpolicy import GetAccountPolicy
class ListAccountPolicies(IAMRequest):
DESCRIPTION = ('[Eucalyptus only] List one or all policies '
'policies attached to an account')
ARGS = [arg_account_name(help='''name or ID of the account owning
the policies to list (required)'''),
Arg('-p', '--policy-name', metavar='POLICY', route_to=None,
help='display a specific policy'),
Arg('-v', '--verbose', action='store_true', route_to=None,
help='''display the contents of the resulting policies (in
addition to their names)'''),
Arg('--pretty-print', action='store_true', route_to=None,
help='''when printing the contents of policies, reformat them
for easier reading''')]
LIST_TAGS = ['PolicyNames']
def main(self):
return PaginatedResponse(self, (None,), ('PolicyNames',))
def prepare_for_page(self, page):
# Pages are defined by markers
self.params['Marker'] = page
def get_next_page(self, response):
if response.get('IsTruncated') == 'true':
return response['Marker']
def print_result(self, result):
if self.args.get('policy_name'):
# Look for the specific policy the user asked for
for policy_name in result.get('PolicyNames', []):
if policy_name == self.args['policy_name']:
if self.args['verbose']:
self.print_policy(policy_name)
else:
print policy_name
break
else:
for policy_name in result.get('PolicyNames', []):
print policy_name
if self.args['verbose']:
self.print_policy(policy_name)
def print_policy(self, policy_name):
req = GetAccountPolicy(
service=self.service, AccountName=self.args['AccountName'],
PolicyName=policy_name, pretty_print=self.args['pretty_print'])
response = req.main()
req.print_result(response)
|
vrde/logstats
|
logstats/base.py
|
Python
|
mit
| 3,761
| 0.000532
|
'''Base module to handle the collection and the output of statistical data.'''
import logging
import time
import multiprocessing as mp
import queue
from collections import Counter
log = logging.getLogger(__name__)
current_milli_time = lambda: int(round(time.time() * 1000))
def is_number(val):
'''Function to check if the value is a number.'''
try:
float(val)
return True
except ValueError:
return False
class Logstats(object):
'''This class briges the data in input (provided by the `stats` param)
to a generic output (`log`, by default).
'''
def __init__(self, msg=None, emit_func=None, logger=log, level='INFO',
timeout=1, queue=None):
'''Initialize the instance.
If `emit_func` is defined, `logger` and `level` are ignored.
Keyword arguments:
stats -- a dict-like object storing values to output
msg -- a string to use to format `stats` (by default it outputs a
list of comma separated values)
emit_func -- a function to emit the formatted output
(default: logging.log)
logger -- the logger to use to log the formatted output (default:
a `log` instance
level -- the log level (default: INFO)
'''
self.stats = Counter()
self.msg = msg
self.logger = logger
self.level = level
self.old_stats = {}
self.emit_func = emit_func
self.last = current_milli_time()
self.timeout = timeout
self.queue = queue
self.main_queue = None
if not logger.isEnabledFor(logging.getLevelName(level)):
logger.warning('Logger is not enabled to log at level {}.'.format(level))
def __getitem__(self, key):
return self.stats[key]
def __setitem__(self, key, val):
self.stats[key] = val
def update(self, *args, **kwargs):
self.stats.update(*args, **kwargs)
def _get_speed(self, new, old, delta):
return int(round(float((new - old)) / (delta / 1e3)))
def _consume_queue(self):
if self.main_queue:
while True:
try:
self.stats.update(self.main_queue.get_nowait())
except queue.Empty:
return
def get_stats(self, delta):
self._consume_queue()
stats = self.stats
if hasattr(self.stats, '__call__'):
stats = self.stats(delta)
else:
stats = stats.copy()
speed = dict(('{}.speed'.format(k),
self._get_speed(stats[k],
self.old_stats.get(k, 0),
delta))
for k in stats if is_number(stats[k]))
self.old_stats = stats
stats.update(speed)
return stats
def get_child(self):
if not self.main_queue:
self.main_queue = mp.Queue()
return Logstats(queue=self.main_queue)
def format_msg(self, stats):
if self.msg:
msg = self.msg.format(**stats)
else:
msg = ', '.join('{}: {}'.format(k, stats[k])
for k in sorted(stats))
return msg
def emit(self, msg):
if self.emit_func:
self.emit_func(msg)
else:
self.logger.log(getattr(logging, self.level), msg)
def __call__(self):
if self.queue:
self.queue.put(self.stats)
self.st
|
ats = Counter()
else:
delta = current_milli_time(
|
) - self.last
stats = self.get_stats(delta)
if stats:
self.emit(self.format_msg(stats))
self.last = current_milli_time()
|
rob-nn/python
|
first_book/message_analyzer.py
|
Python
|
gpl-2.0
| 365
| 0.005479
|
# Message Analyzer
# Demonstrates the len() function and the in oper
|
ator
message = input("Enter a message: ")
print("\nThe length of your message is:", len(message))
print("\nThe most common letter in the English language, 'e',")
if "e" in message:
print("is in your message.")
else:
print("is not in your message.")
input("\n\nPress the en
|
ter key to exit.")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.