repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
CLVsol/clvsol_odoo_addons
|
clv_document_export/models/model_export_document_item.py
|
Python
|
agpl-3.0
| 2,203
| 0.001362
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, fields, models
class ModelExportDocumentItem(models.Model):
_description = 'Model Export Document Item'
_name = "clv.model_export.document_item"
_order = "sequence"
name = fields.Char(string='Alias', index=False, required=False)
model_export_id = fields.Many2one(
comodel_name='clv.model_export',
string='Model Export',
ondelete='restrict'
)
document_item_id = fields.Many2one(
comodel_name='clv.document.item',
string='Document Item',
ondelete='restrict',
domain="[('document_type_id','!=','False')]"
)
document_item_code = fields.Char(
string='Item Code',
related='document_item_id.code',
store=False
)
document_item_document_type_id = fields.Many2one(
string='Item Type',
related='document_item_id.document_type_id',
store=True
)
document_item_name = fields.Char(
string='Item',
related='document_item_id.name'
|
,
store=False
)
sequence = fields.Integer(
string='Sequence',
default=10
)
model_export_display = fields.Boolean(string='Display in Export', default=True)
class ModelExport(models.Model):
_inherit = 'clv.model_export'
use_document_items = fields.Boolean(string='Use Document Items', default=False)
model_export_document_item_ids = fields.One2many(
comodel_name='clv.model_export.document_item',
inverse_name='model
|
_export_id',
string='Model Export Document Items'
)
count_model_export_document_items = fields.Integer(
string='Model Export Document Items (count)',
compute='_compute_count_model_export_document_item',
store=True
)
@api.depends('model_export_document_item_ids')
def _compute_count_model_export_document_item(self):
for r in self:
r.count_model_export_document_items = len(r.model_export_document_item_ids)
|
eubr-bigsea/stand
|
stand/util/dummy_executor.py
|
Python
|
apache-2.0
| 7,975
| 0.000502
|
# -*- coding: utf-8 -*-}
"""
A dummy executor that process Lemonade jobs and only returns fake statuses and
data. Used to test Stand clients in an integration test.
"""
import datetime
import json
import logging
import logging.config
import random
import eventlet
import socketio
from flask_script import Manager
# Logging configuration
from sqlalchemy import and_
from stand.factory import create_app, create_redis_store
from stand.models import Job, StatusExecution, db, JobStep, JobStepLog
app = create_app(log_level=logging.WARNING)
redis_store = create_redis_store(app)
manager = Manager(app)
MESSAGES = [
"The greatest discovery of my generation is that a human being can alter "
"his life by altering his attitudes of mind.",
"Human beings, by changing the inner attitudes of their minds, can change "
"the outer aspects of their lives.",
"Complaining is good for you as long as you're not complaining to the "
"person you're complaining about.",
"Education is what survives when what has been learned has been forgotten.",
"A man's character is his fate.",
"The farther behind I leave the past, the closer I am to forging my own "
"character.",
"All our dreams can come true, if we have the courage to pursue them.",
"Always remember that you are absolutely unique. Just like everyone else. ",
"A woman's mind is cleaner than a man's: She changes it more often. ",
"I can resist everything except temptation. "
]
@manager.command
def simulate():
logging.config.fileConfig('logging_config.ini')
logger = logging.getLogger(__name__)
# ap = argparse.ArgumentParser()
# ap.add_argument('-c', '')
mgr = socketio.RedisManager(app.config.get('REDIS_URL'), 'job_output')
statuses = [StatusExecution.RUNNING,
# StatusExecution.CANCELED, StatusExecution.ERROR,
# StatusExecution.PENDING, StatusExecution.INTERRUPTED,
StatusExecution.WAITING, StatusExecution.COMPLETED]
while True:
try:
_, job_json = redis_store.blpop('queue_start')
job = json.loads(job_json)
logger.debug('Simulating workflow %s with job %s',
job.get('workflow_id'), job.get('job_id'))
eventlet.sleep(3)
for k in ['job_id']:
if k in job:
logger.info('Room for %s', k)
room = str(job[k])
mgr.emit('update job',
data={'message': random.choice(MESSAGES),
'status': StatusExecution.RUNNING,
|
'id': job['workflow_id']},
room=room, namespace="/stand")
job_entity = Job.query.get(job.get('job_id'))
job_entity.sta
|
tus = StatusExecution.RUNNING
job_entity.finished = datetime.datetime.utcnow()
db.session.add(job_entity)
db.session.commit()
for task in job.get('workflow', {}).get('tasks', []):
if task['operation']['id'] == 25: # comment
continue
job_step_entity = JobStep.query.filter(and_(
JobStep.job_id == job.get('job_id'),
JobStep.task_id == task['id'])).first()
# Updates task in database
try:
job_step_entity.status = StatusExecution.RUNNING
job_step_entity.logs.append(JobStepLog(
level='WARNING', date=datetime.datetime.now(),
message=random.choice(MESSAGES)))
db.session.add(job_step_entity)
db.session.commit()
except Exception as ex:
logger.error(ex)
for k in ['job_id']:
if k in job:
logger.info('Room for %s and task %s', k,
task.get('id'))
room = str(job[k])
mgr.emit('update task',
data={'message': random.choice(MESSAGES),
'status': random.choice(statuses[:-2]),
'id': task.get('id')}, room=room,
namespace="/stand")
eventlet.sleep(random.randint(2, 5))
for k in ['job_id']:
if k in job:
room = str(job[k])
mgr.emit('update task',
data={'message': random.choice(MESSAGES),
'status': StatusExecution.COMPLETED,
'id': task.get('id')}, room=room,
namespace="/stand")
# Updates task in database
try:
# Visualizations
if task['operation']['id'] in [35, 68, 69, 70, 71]:
# import pdb
# pdb.set_trace()
for k in ['job_id']:
room = str(job[k])
mgr.emit('task result',
data={'msg': 'Result generated',
'status': StatusExecution.COMPLETED,
'id': task['id'],
'task': {'id': task['id']},
'title': 'Table with results',
'type': 'VISUALIZATION',
'operation': {
'id': task['operation']['id']},
'operation_id':
task['operation']['id']},
room=room,
namespace="/stand")
#
# result = JobResult(task_id=task['id'],
# title="Table with results",
# operation_id=task['operation']['id'],
# type=ResultType.VISUALIZATION, )
# logger.info('Result created for job %s', job['job_id'])
# job_entity.results.append(result)
job_step_entity.status = StatusExecution.COMPLETED
job_step_entity.logs.append(JobStepLog(
level='WARNING', date=datetime.datetime.now(),
message=random.choice(MESSAGES)))
db.session.add(job_step_entity)
except Exception as ex:
logger.error(ex)
# eventlet.sleep(5)
for k in ['job_id']:
if k in job:
logger.info('Room for %s', k)
room = str(job[k])
mgr.emit('update job',
data={'message': random.choice(MESSAGES),
'status': StatusExecution.COMPLETED,
'finished': job_entity.finished.isoformat(),
'id': job['job_id']},
room=room, namespace="/stand")
if job_entity:
job_entity.status = StatusExecution.COMPLETED
db.session.add(job_entity)
db.session.commit()
except KeyError as ke:
logger.error('Invalid json? KeyError: %s', ke)
raise
except Exception as ex:
logger.error(ex.message)
raise
logger.info('Simulation finished')
if __name__ == "__main__":
manager.run()
|
fossfreedom/coverart-browser
|
coverart_listview.py
|
Python
|
gpl-3.0
| 2,367
| 0
|
# -*- Mode: python; coding: utf-8; tab-width: 4; indent-tabs-mode: nil; -*-
#
# Copyright (C) 2012 - fossfreedom
# Copyright (C) 2012 - Agustin Carrasco
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
from gi.repository import GObject
from gi.repository import GLib
from coverart_widgets import AbstractView
class ListShowingPolicy(GObject.Object):
'''
Policy that mostly takes care of how and when things should be showed on
the view that makes use of the `AlbumsModel`.
'''
def __init__(self, list_view):
super(ListShowingPolicy, self).__init__()
self.counter = 0
self._has_initialised = False
def initialise(self, album_manager):
if self._has_initialised:
return
self._has_initialised = True
class ListView(AbstractView):
__gtype_name__ =
|
"ListView"
name = 'listview'
use_plugin_window = False
def __init__(self):
super(ListView, self).__init__()
self.view = self
self._has_initialised = False
self.show_policy = ListShowingPolicy(self)
def initialise(self, source):
if self._has_initialised:
return
|
self._has_initialised = True
self.view_name = "list_view"
super(ListView, self).initialise(source)
# self.album_manager = source.album_manager
self.shell = source.shell
def switch_to_view(self, source, album):
self.initialise(source)
GLib.idle_add(self.shell.props.display_page_tree.select,
self.shell.props.library_source)
def get_selected_objects(self):
'''
finds what has been selected
returns an array of `Album`
'''
return []
|
andersonsilvade/python_C
|
Python32/aulas/letras.py
|
Python
|
mit
| 199
| 0.045226
|
letras=[]
i=1
while i <= 10:
letras.append(input("letras : "))
i+=1
i=0
cont=0
|
whil
|
e i <=9:
if letras[i] not in 'aeiou':
cont+=1
i+=1
print("foram lidos %d consoantes" %cont)
|
pranner/CMPUT410-Lab6-Django
|
v1/bin/django-admin.py
|
Python
|
apache-2.0
| 164
| 0
|
#!/cshome/pranjali/410/CMPUT410-Lab6-Django/v1/bin/python
from django.core
|
import ma
|
nagement
if __name__ == "__main__":
management.execute_from_command_line()
|
dlutxx/memo
|
data/nation_parse.py
|
Python
|
mit
| 3,087
| 0.000984
|
# -*- coding: utf8 -*-
#
# 国家统计局 行政区域代码 文本:
#
# http://www.stats.gov.cn/tjsj/tjbz/xzqhdm/
#
from __future__ import print_function
from collections import OrderedDict
class Area(object):
level = None
def __init__(self, code, name, parent=None):
'''use unicode name'''
self.code = int(code)
try:
self.name = name.decode('utf8')
except UnicodeEncodeError:
self.name = name
self.parent = parent
def __unicode__(self):
return u'%6d %1d %6d %s' % (self.code,
|
self.level, self.parent.code, self.name)
def __str__(self):
return unicode(self).encode('utf8')
class ContainerArea(Area):
sub_unit_kls = None
def __init__(self, code, name, parent=None):
super(ContainerArea, self).__init__(code, name, parent)
self.subunits = OrderedDict()
def add_sub_unit(self, code, name):
code
|
= int(code)
unit = self.sub_unit_kls(code, name, self)
self.subunits[code] = unit
def get_sub_unit(self, code):
code = int(code)
return self.subunits[code]
def __unicode__(self):
ret = []
if self.name == u'市辖区':
ret.append(u'%6d %1d %6d %s' % (self.code,
self.level, self.parent.code, self.parent.name))
elif self.name != u'县':
ret.append(u'%6d %1d %6d %s' % (self.code,
self.level, self.parent.code, self.name))
for code, unit in self.subunits.items():
ret.append(unicode(unit))
return u'\n'.join(ret)
class County(Area):
level = 3
class City(ContainerArea):
level = 2
sub_unit_kls = County
class Province(ContainerArea):
level = 1
sub_unit_kls = City
class Nation(ContainerArea):
sub_unit_kls = Province
def __init__(self):
super(Nation, self).__init__(0, u'中国', None)
def load(self, istream):
for line in istream:
self.parse_entry(line)
def parse_entry(self, line):
line = line.strip()
# ignore comments
if not line or line.startswith('#'):
return
code, name = line.split()
prov_part = int(code[:2])
city_part = int(code[2:4])
county_part = int(code[4:])
if county_part == 0:
if city_part == 0: # province
self.add_sub_unit(code, name)
else: # city
prov_code = prov_part * 10000
prov = self.get_sub_unit(prov_code)
prov.add_sub_unit(code, name)
else:
prov_code = prov_part * 10000
prov = self.get_sub_unit(prov_code)
city_code = prov_code + city_part * 100
city = prov.get_sub_unit(city_code)
city.add_sub_unit(code, name)
def __unicode__(self):
return u'\n'.join([unicode(u) for u in self.subunits.values()])
if __name__ == '__main__':
with open('nation.txt') as f:
cn = Nation()
cn.load(f)
print(cn)
|
ghorn/debian-casadi
|
docs/examples/python/vdp_indirect_single_shooting.py
|
Python
|
lgpl-3.0
| 4,479
| 0.013842
|
#
# This file is part of CasADi.
#
# CasADi -- A symbolic framework for dynamic optimization.
# Copyright (C) 2010-2014 Joel Andersson, Joris Gillis, Moritz Diehl,
# K.U. Leuven. All rights reserved.
# Copyright (C) 2011-2014 Greg Horn
#
# CasADi is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# CasADi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with CasADi; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from casadi import *
import numpy as NP
import matplotlib.pyplot as plt
# Declare variables (use simple, efficient DAG)
x0=SX.sym("x0"); x1=SX.sym("x1")
x = vertcat((x0,x1))
# Control
u = SX.sym("u")
# ODE right hand side
xdot = vertcat([(1 - x1*x1)*x0 - x1 + u, x0])
# Lagrangian function
L = x0*x0 + x1*x1 + u*u
# Costate
lam = SX.sym("lam",2)
# Hamiltonian function
H = inner_prod(lam,xdot) + L
# Costate equations
ldot = -gradient(H,x)
## The control must minimize the Hamiltonian, which is:
print "Hamiltonian: ", H
# H is of a convex quadratic form in u: H = u*u + p*u + q, let's get the coefficient p
p = gradient(H,u) # this gives us 2*u + p
p = substitute(p,u,0) # replace u with zero: gives us p
# H's unconstrained minimizer is: u = -p/2
u_opt = -p/2
# We must constrain u to the interval [-0.75, 1.0], convexity of H ensures that the optimum is obtain at the bound when u_opt is outside the interval
u_opt = min(u_opt,1.0)
u_opt = max(u_opt,-0.75)
print "optimal control: ", u_opt
# Augment f with lam_dot and substitute in the value for the optimal control
f = vertcat((xdot,ldot))
f = substitute(f,u,u_opt)
# Create the right hand side function
rhs_in = daeIn(x=vertcat((x,lam)))
rhs = SXFunction(rhs_in,daeOut(ode=f))
# Create an integrator (CVodes)
I = Integrator("cvodes", rhs)
I.setOption("abstol",1e-8) # abs. tolerance
I.setOption("reltol",1e-8) # rel. tolerance
I.setOption("t0",0.0)
I.setOption("tf",10.0)
I.i
|
nit()
# The initial state
x_init = NP.array([0.,1.])
# The initial costate
l_init = MX.sym("l_init"
|
,2)
# The initial condition for the shooting
X = vertcat((x_init,l_init))
# Call the integrator
X, = integratorOut(I.call(integratorIn(x0=X)),"xf")
# Costate at the final time should be zero (cf. Bryson and Ho)
lam_f = X[2:4]
g = lam_f
# Formulate root-finding problem
rfp = MXFunction([l_init],[g])
# Select a solver for the root-finding problem
Solver = "nlp"
#Solver = "newton"
#Solver = "kinsol"
# Allocate an implict solver
solver = ImplicitFunction(Solver, rfp)
if Solver=="nlp":
solver.setOption("nlp_solver", "ipopt")
solver.setOption("nlp_solver_options",{"hessian_approximation":"limited-memory"})
elif Solver=="newton":
solver.setOption("linear_solver",CSparse)
elif Solver=="kinsol":
solver.setOption("linear_solver_type","user_defined")
solver.setOption("linear_solver",CSparse)
solver.setOption("max_iter",1000)
# Initialize the solver
solver.init()
# Pass initial guess
#solver.setInput([ 0, 0], "x0")
# Solve the problem
solver.evaluate()
# Retrieve the optimal solution
l_init_opt = NP.array(solver.output().data())
# Time grid for visualization
tgrid = NP.linspace(0,10,100)
# Output functions
output_fcn = SXFunction(rhs_in,[x0,x1,u_opt])
# Simulator to get optimal state and control trajectories
simulator = Simulator(I, output_fcn, tgrid)
simulator.init()
# Pass initial conditions to the simulator
simulator.setInput(NP.concatenate((x_init,l_init_opt)),"x0")
# Simulate to get the trajectories
simulator.evaluate()
# Get optimal control
x_opt = simulator.getOutput(0).T
y_opt = simulator.getOutput(1).T
u_opt = simulator.getOutput(2).T
# Plot the results
plt.figure(1)
plt.clf()
plt.plot(tgrid,x_opt,'--')
plt.plot(tgrid,y_opt,'-')
plt.plot(tgrid,u_opt,'-.')
plt.title("Van der Pol optimization - indirect single shooting")
plt.xlabel('time')
plt.legend(['x trajectory','y trajectory','u trajectory'])
plt.grid()
plt.show()
|
chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend
|
tests/unit/dataactvalidator/test_a26_appropriations.py
|
Python
|
cc0-1.0
| 1,546
| 0.003881
|
from tests.unit.dataactcore.factories.staging import AppropriationFactory
from tests.unit.dataactcore.factories.domain import SF133Factory
from tests.unit.dataactvalidator.utils import number_of_errors, query_columns
_FILE = 'a26_appropriations'
_TAS = 'a26_appropriations_tas'
def test_column_headers(database):
expected_subset = {'row_number', 'contract_authority_amount_cpe',
'lines', 'amounts'}
actual = set(query_columns(_FILE, database))
assert (actual & expected_subset) == expected_subset
def test_success(database):
""" Tests that ContractAuthorityAmountTotal_CPE is provided if TAS has contract authority value
provided in GTAS """
tas = "".join([_TAS, "_success"])
sf1 = SF133Factory(tas=tas, period=1, fiscal_year=2016, line=1540, amount=1)
sf2 = SF
|
133Factory(tas=tas, period=1, fiscal_year=2016, line=1640, amount=1)
ap = AppropriationFactory(tas=tas, contract_authority_amount_cpe=1)
assert number_of_errors(_FILE, database, models=[sf1, sf2, ap]) == 0
def test_failure(database)
|
:
""" Tests that ContractAuthorityAmountTotal_CPE is not provided if TAS has contract authority value
provided in GTAS """
tas = "".join([_TAS, "_failure"])
sf1 = SF133Factory(tas=tas, period=1, fiscal_year=2016, line=1540, amount=1)
sf2 = SF133Factory(tas=tas, period=1, fiscal_year=2016, line=1640, amount=1)
ap = AppropriationFactory(tas=tas, contract_authority_amount_cpe=0)
assert number_of_errors(_FILE, database, models=[sf1, sf2, ap]) == 1
|
fedora-conary/conary
|
conary/repository/netrepos/netauth.py
|
Python
|
apache-2.0
| 50,922
| 0.003613
|
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distribute
|
d on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import itertools
import os
import time
import urllib, urllib2
import xml
from conary import conarycfg, versions
from conary.repository import errors
from conary.lib import digestlib, sha1helper
|
, tracelog
from conary.dbstore import sqlerrors
from conary.repository.netrepos import items, versionops, accessmap
from conary.server.schema import resetTable
# FIXME: remove these compatibilty error classes later
UserAlreadyExists = errors.UserAlreadyExists
GroupAlreadyExists = errors.GroupAlreadyExists
MAX_ENTITLEMENT_LENGTH = 255
nameCharacterSet = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._-\\@'
class UserAuthorization:
def __init__(self, db, pwCheckUrl = None, cacheTimeout = None):
self.db = db
self.pwCheckUrl = pwCheckUrl
self.cacheTimeout = cacheTimeout
self.pwCache = {}
def addUserByMD5(self, cu, user, salt, password):
for letter in user:
if letter not in nameCharacterSet:
raise errors.InvalidName(user)
try:
cu.execute("INSERT INTO Users (userName, salt, password) "
"VALUES (?, ?, ?)",
(user, cu.binary(salt), cu.binary(password)))
uid = cu.lastrowid
except sqlerrors.ColumnNotUnique:
raise errors.UserAlreadyExists, 'user: %s' % user
# make sure we don't conflict with another entry based on case; this
# avoids races from other processes adding case differentiated
# duplicates
cu.execute("SELECT userId FROM Users WHERE LOWER(userName)=LOWER(?)",
user)
if len(cu.fetchall()) > 1:
raise errors.UserAlreadyExists, 'user: %s' % user
return uid
def changePassword(self, cu, user, salt, password):
if self.pwCheckUrl:
raise errors.CannotChangePassword
cu.execute("UPDATE Users SET password=?, salt=? WHERE userName=?",
cu.binary(password), cu.binary(salt), user)
def _checkPassword(self, user, salt, password, challenge, remoteIp = None):
if challenge is ValidPasswordToken:
# Short-circuit for shim-using code that does its own
# authentication, e.g. through one-time tokens or session
# data.
return True
if self.cacheTimeout:
cacheEntry = sha1helper.sha1String("%s%s" % (user, challenge))
timeout = self.pwCache.get(cacheEntry, None)
if timeout is not None and time.time() < timeout:
return True
if self.pwCheckUrl:
try:
url = "%s?user=%s;password=%s" \
% (self.pwCheckUrl, urllib.quote(user),
urllib.quote(challenge))
if remoteIp is not None:
url += ';remote_ip=%s' % urllib.quote(remoteIp)
f = urllib2.urlopen(url)
xmlResponse = f.read()
except:
return False
p = PasswordCheckParser()
p.parse(xmlResponse)
isValid = p.validPassword()
else:
m = digestlib.md5()
m.update(salt)
m.update(challenge)
isValid = m.hexdigest() == password
if isValid and self.cacheTimeout:
# cacheEntry is still around from above
self.pwCache[cacheEntry] = time.time() + self.cacheTimeout
return isValid
def deleteUser(self, cu, user):
userId = self.getUserIdByName(user)
# First delete the user from all the groups
sql = "DELETE from UserGroupMembers WHERE userId=?"
cu.execute(sql, userId)
# Now delete the user itself
sql = "DELETE from Users WHERE userId=?"
cu.execute(sql, userId)
def getAuthorizedRoles(self, cu, user, password, allowAnonymous = True,
remoteIp = None):
"""
Given a user and password, return the list of roles that are
authorized via these credentials
"""
if isinstance(user, ValidUser):
# Short-circuit for shim-using code that knows what roles
# it wants.
roles = set()
if '*' in user.roles:
cu.execute("SELECT userGroupId FROM UserGroups")
else:
roles = set([x for x in user.roles if isinstance(x, int)])
names = set([x for x in user.roles if not isinstance(x, int)])
if not names:
return roles
places = ', '.join('?' for x in names)
cu.execute("""SELECT userGroupId FROM UserGroups
WHERE userGroup IN ( %s )""" % (places,), *names)
roles.update(x[0] for x in cu)
return roles
cu.execute("""
SELECT Users.salt, Users.password, UserGroupMembers.userGroupId,
Users.userName, UserGroups.canMirror
FROM Users
JOIN UserGroupMembers USING(userId)
JOIN UserGroups USING(userGroupId)
WHERE Users.userName = ? OR Users.userName = 'anonymous'
""", user)
result = [ x for x in cu ]
if not result:
return set()
canMirror = (sum(x[4] for x in result) > 0)
# each user can only appear once (by constraint), so we only
# need to validate the password once. we don't validate the
# password for 'anonymous'. Using a bad password still allows
# anonymous access
userPasswords = [ x for x in result if x[3] != 'anonymous' ]
# mirror users do not have an anonymous fallback
if userPasswords and canMirror:
allowAnonymous = False
if not allowAnonymous:
result = userPasswords
if userPasswords and not self._checkPassword(
user,
cu.frombinary(userPasswords[0][0]),
cu.frombinary(userPasswords[0][1]),
password, remoteIp):
result = [ x for x in result if x[3] == 'anonymous' ]
return set(x[2] for x in result)
def getRolesByUser(self, user):
cu = self.db.cursor()
cu.execute("""SELECT userGroup FROM Users
JOIN UserGroupMembers USING (userId)
JOIN UserGroups USING (userGroupId)
WHERE Users.userName = ?""", user)
return [ x[0] for x in cu ]
def getUserIdByName(self, userName):
cu = self.db.cursor()
cu.execute("SELECT userId FROM Users WHERE userName=?", userName)
ret = cu.fetchall()
if len(ret):
return ret[0][0]
raise errors.UserNotFound(userName)
def getUserList(self):
cu = self.db.cursor()
cu.execute("SELECT userName FROM Users")
return [ x[0] for x in cu ]
class EntitlementAuthorization:
def __init__(self, entCheckUrl = None, cacheTimeout = None):
self.entCheckUrl = entCheckUrl
self.cacheTimeout = cacheTimeout
self.cache = {}
def getAuthorizedRoles(self, cu, serverName, remoteIp,
entitlementClass, entitlement):
"""
Given an entitlement, return the list of roles that the
credentials authorize.
"""
cacheEntry = sha1helper.sha1String("%s%s%s" % (
serverName, entitlementClass, entitlement))
roleIds, timeout, autoRetry = \
self.
|
Velmont/digital-signage-server
|
lib/utils/imagekit.py
|
Python
|
mit
| 4,036
| 0.006442
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Image utilities
Cr
|
eated by: Rui Carmo
License: MIT (see LICENSE for details)
"""
from operator import itemgetter
def linear_partition(seq, k):
if k <= 0:
return []
n = len(seq) - 1
if k > n:
return map(lambda x: [x], seq)
table, solution = linear_partition_table(seq, k)
k, ans = k-2, []
while k >= 0:
ans = [[seq[i] for i in xrange(solution[n-1][k]+1, n+1)]] + ans
n, k = solution[n-1][k], k-1
return [[seq
|
[i] for i in xrange(0, n+1)]] + ans
def linear_partition_table(seq, k):
n = len(seq)
table = [[0] * k for x in xrange(n)]
solution = [[0] * (k-1) for x in xrange(n-1)]
for i in xrange(n):
table[i][0] = seq[i] + (table[i-1][0] if i else 0)
for j in xrange(k):
table[0][j] = seq[0]
for i in xrange(1, n):
for j in xrange(1, k):
table[i][j], solution[i-1][j-1] = min(
((max(table[x][j-1], table[i][0]-table[x][0]), x) for x in xrange(i)),
key=itemgetter(0))
return (table, solution)
def get_info(data):
"""Parses a small buffer and attempts to return basic image metadata"""
data = str(data)
size = len(data)
height = -1
width = -1
content_type = ''
# handle GIFs
if (size >= 10) and data[:6] in ('GIF87a', 'GIF89a'):
# Check to see if content_type is correct
content_type = 'image/gif'
w, h = struct.unpack("<HH", data[6:10])
width = int(w)
height = int(h)
# See PNG 2. Edition spec (http://www.w3.org/TR/PNG/)
# Bytes 0-7 are below, 4-byte chunk length, then 'IHDR'
# and finally the 4-byte width, height
elif ((size >= 24) and data.startswith('\211PNG\r\n\032\n')
and (data[12:16] == 'IHDR')):
content_type = 'image/png'
w, h = struct.unpack(">LL", data[16:24])
width = int(w)
height = int(h)
# Maybe this is for an older PNG version.
elif (size >= 16) and data.startswith('\211PNG\r\n\032\n'):
# Check to see if we have the right content type
content_type = 'image/png'
w, h = struct.unpack(">LL", data[8:16])
width = int(w)
height = int(h)
# Check for a JPEG
elif (size >= 4):
jpeg = StringIO.StringIO(data)
b = jpeg.read(4)
if b.startswith('\xff\xd8\xff\xe0'):
content_type = 'image/jpeg'
bs = jpeg.tell()
b = jpeg.read(2)
bl = (ord(b[0]) << 8) + ord(b[1])
b = jpeg.read(4)
if b.startswith("JFIF"):
bs += bl
while(bs < len(data)):
jpeg.seek(bs)
b = jpeg.read(4)
bl = (ord(b[2]) << 8) + ord(b[3])
if bl >= 7 and b[0] == '\xff' and b[1] == '\xc0':
jpeg.read(1)
b = jpeg.read(4)
height = (ord(b[0]) << 8) + ord(b[1])
width = (ord(b[2]) << 8) + ord(b[3])
break
bs = bs + bl + 2
return width, height, content_type
|
FrogLogics/mget
|
build/lib.linux-x86_64-3.5/Mget/downloader/common.py
|
Python
|
gpl-2.0
| 3,835
| 0.041982
|
#!/usr/bin/env python3
import os, sys
import time
from ..utils import (std, strip_site, MGet, urlparse, HTMLParser)
class FileDownloader(MGet):
def __init__(self, info = {}):
self.last_len = 0
self.alt_prog = 0.0
def getLocalFilesize(self, filename):
tmp_name = self.temp_name(filename)
if os.path.exists(filename): return os.path.getsize(os.path.join('.', filename))
elif os.path.exists(tmp_name): return os.path.getsize(os.path.join('.', tmp_name))
else: return None
def flush_bar (self, result = []):
line = "".join(["%s" % x for x in result])
if self.info.get('newline'): sys.stdout.write('\n')
else: sys.stdout.write('\r')
if self.last_len: sys.stdout.write('\b' * self.last_len)
sys.stdout.write("\r")
sys.stdout.write(line)
sys.stdout.flush()
self.last_len = len(line)
def _progress_bar(self, s_dif = None, progress = None, bytes = None, dif = None, width = 46):
width = self.get_term_width() - width
data_len = (self.cursize - self.resume_len)
quit_size = (self.quit_size / 100.0)
if progress > quit_size: quit_size = progress
prog = int(progress * width)
prog_bal = width - int(progress * width)
if self.quit_size != 100.0:
expect = int(quit_size * width)
ex_bal = int((width - expect) - 2)
ex_prog_bal = int(expect - int(progress * width))
progress_bar = "["+"="*(prog)+">"+" "*(ex_prog_bal)+"]["+" "*(ex_bal)+"] "
else:
progress_bar = "["+"="*(prog)+">"+" "*(prog_bal)+"] "
_res = ["%-6s" % ("{0:.1f}%".format(float(progress) * 100.0)), progress_bar,
"%-12s " % ("{:02,}".format(self.cursize)),
"%9s " % (self.calc_speed(dif,bytes).decode()),
"eta "+ self.calc_eta(s_dif, bytes, data_len, self.remaining).decode()]
self.flush_bar (_res)
def progress_bar_2(self, s_dif = None, progress = None, bytes = None, dif = None, width = 48):
width = self.get_term_width() - width
pr
|
og = int(self.alt_prog * width)
pro
|
g_bal = width - int(self.alt_prog * width)
progress_bar = "[" + " " * (prog) + "<=>" + " " * (prog_bal) + "] "
_res = [ "(^_^) " if int(self.alt_prog * 10) in list(range(0, 10, 4)) else "(0_0) ",
progress_bar, "%-12s " % ("{:02,}".format(self.cursize)),
"%9s%12s" % (self.calc_speed(dif,bytes).decode()," ")]
self.flush_bar (_res)
if self.alt_prog < 0.1: self.reach_end = False
if self.alt_prog == 1.0: self.reach_end = True
if self.alt_prog < 1.0 and not self.reach_end: self.alt_prog += 0.1
else: self.alt_prog -= 0.1
def _progress(self): return self.get_progress(self.cursize, self.filesize)
def temp_name (self, filename):
if self.info.get('nopart', False) or\
(os.path.exists(filename) and not os.path.isfile(filename)):
return filename
return filename + ".part"
def undo_temp_name (self, filename):
if filename.endswith (".part"): return filename[:-len(".part")]
return filename
def try_rename (self, old_filename, new_filename):
try:
if old_filename == new_filename: return
os.rename (old_filename, new_filename)
except (IOError, OSError) as err:
common.report ('Unable to rename file: %s' % str(err))
class MyHTMLParser(HTMLParser):
def __init__(self, html, tag = {}, hostname = None):
HTMLParser.__init__(self)
self.data = {}
self.start_tag = tag
self.hostname = hostname
self.html = html
def load(self):
self.feed(self.html)
self.close()
def handle_starttag(self, tag, attrs):
if tag not in self.start_tag: return
for name, value in attrs:
if name in self.name or value in self.value:
hostname, site = strip_site(value)
if hostname in std.site_list:
self.data[self.hostname] = value
def get_result(self, tag, name=None, value=None):
self.start_tag = tag
self.name = name or ''
self.value = value or ''
self.load()
if self.hostname in self.data:
return self.data[self.hostname]
else: return
|
daTokenizer/quickstorm
|
storm/virtualenvs/_resources/resources/bolts/action_bolt.py
|
Python
|
apache-2.0
| 1,292
| 0.023994
|
# [\0]
|
#
# #
# This code is confidential and proprietary, All rights reserved. #
# #
# Tamar Labs 2015. #
# #
# @author: Adam Lev-Libfeld (adam@tamarlabs.com) #
# #
from __future__ import absolute_import, print_function, unicode_literals
from kombu import simple, Connection
from streamparse.bolt import Bolt
#import common.logger
|
from external.actions import ActionDB
class ActionBolt(Bolt):
def initialize(self, storm_conf, context):
actionDB = ActionDB()
def process(self, tup):
try:
action_num = tup.values[0]
if action_num < len(actionDB.actions):
data = actionDB.actions[action_num](tup.values[1])
self.emit([action_num+1 , data], stream = "next_action")
else:
self.emit([data], stream = "output_to_queue")
except:
import sys, traceback
msg = "Unexpected ActionBolt (action: %d) error:%s" % (action_num, "\n".join(traceback.format_exception(*sys.exc_info())))
#self.logger.error(msg)
|
leppa/home-assistant
|
homeassistant/components/pi_hole/const.py
|
Python
|
apache-2.0
| 1,452
| 0.002066
|
"""Constants
|
for the pi_hole intergration."""
from datetime import timedelta
DOMAIN = "pi_hole"
CONF_LOCATION = "location"
CON
|
F_SLUG = "slug"
DEFAULT_LOCATION = "admin"
DEFAULT_METHOD = "GET"
DEFAULT_NAME = "Pi-Hole"
DEFAULT_SSL = False
DEFAULT_VERIFY_SSL = True
SERVICE_DISABLE = "disable"
SERVICE_DISABLE_ATTR_DURATION = "duration"
SERVICE_DISABLE_ATTR_NAME = "name"
SERVICE_ENABLE = "enable"
SERVICE_ENABLE_ATTR_NAME = SERVICE_DISABLE_ATTR_NAME
ATTR_BLOCKED_DOMAINS = "domains_blocked"
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5)
SENSOR_DICT = {
"ads_blocked_today": ["Ads Blocked Today", "ads", "mdi:close-octagon-outline"],
"ads_percentage_today": [
"Ads Percentage Blocked Today",
"%",
"mdi:close-octagon-outline",
],
"clients_ever_seen": ["Seen Clients", "clients", "mdi:account-outline"],
"dns_queries_today": [
"DNS Queries Today",
"queries",
"mdi:comment-question-outline",
],
"domains_being_blocked": ["Domains Blocked", "domains", "mdi:block-helper"],
"queries_cached": ["DNS Queries Cached", "queries", "mdi:comment-question-outline"],
"queries_forwarded": [
"DNS Queries Forwarded",
"queries",
"mdi:comment-question-outline",
],
"unique_clients": ["DNS Unique Clients", "clients", "mdi:account-outline"],
"unique_domains": ["DNS Unique Domains", "domains", "mdi:domain"],
}
SENSOR_LIST = list(SENSOR_DICT)
|
bmcfee/librosa
|
librosa/filters.py
|
Python
|
isc
| 37,764
| 0.001059
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Filters
=======
Filter bank construction
------------------------
.. autosummary::
:toctree: generated/
mel
chroma
constant_q
semitone_filterbank
Window functions
----------------
.. autosummary::
:toctree: generated/
window_bandwidth
get_window
Miscellaneous
-------------
.. autosummary::
:toctree: generated/
constant_q_lengths
cq_to_chroma
mr_frequencies
window_sumsquare
diagonal_filter
"""
import warnings
import numpy as np
import scipy
import scipy.signal
import scipy.ndimage
from numba import jit
from ._cache import cache
from . import util
from .util.exceptions import ParameterError
from .core.convert import note_to_hz, hz_to_midi, midi_to_hz, hz_to_octs
from .core.convert import fft_frequencies, mel_frequencies
__all__ = [
"mel",
"chroma",
"constant_q",
"constant_q_lengths",
"cq_to_chroma",
"window_bandwidth",
"get_window",
"mr_frequencies",
"semitone_filterbank",
"window_sumsquare",
"diagonal_filter",
]
# Dictionary of window function bandwidths
WINDOW_BANDWIDTHS = {
"bart": 1.3334961334912805,
"barthann": 1.4560255965133932,
"bartlett": 1.3334961334912805,
"bkh": 2.0045975283585014,
"black": 1.7269681554262326,
"blackharr": 2.0045975283585014,
"blackman": 1.7269681554262326,
"blackmanharris": 2.0045975283585014,
"blk": 1.7269681554262326,
"bman": 1.7859588613860062,
"bmn": 1.7859588613860062,
"bohman": 1.7859588613860062,
"box": 1.0,
"boxcar": 1.0,
"brt": 1.3334961334912805,
"brthan": 1.4560255965133932,
"bth": 1.4560255965133932,
"cosine": 1.2337005350199792,
"flat": 2.7762255046484143,
"flattop": 2.7762255046484143,
"flt": 2.7762255046484143,
"halfcosine": 1.2337005350199792,
"ham": 1.3629455320350348,
"hamm": 1.3629455320350348,
"hamming": 1.3629455320350348,
"han": 1.50018310546875,
"hann": 1.50018310546875,
"hanning": 1.50018310546875,
"nut": 1.9763500280946082,
"nutl": 1.9763500280946082,
"nuttall": 1.9763500280946082,
"ones": 1.0,
"par": 1.9174603174603191,
"parz": 1.9174603174603191,
"parzen": 1.9174603174603191,
"rect": 1.0,
"rectangular": 1.0,
"tri": 1.3331706523555851,
"triang": 1.3331706523555851,
"triangle": 1.3331706523555851,
}
@cache(level=10)
def mel(
sr,
n_fft,
n_mels=128,
fmin=0.0,
fmax=None,
htk=False,
norm="slaney",
dtype=np.float32,
):
"""Create a Mel filter-bank.
This produces a linear transformation matrix to project
FFT bins onto Mel-frequency bins.
Parameters
----------
sr : number > 0 [scalar]
sampling rate of the incoming signal
n_fft : int > 0 [scalar]
number of FFT components
n_mels : int > 0 [scalar]
number of Mel bands to generate
fmin : float >= 0 [scalar]
lowest frequency (in Hz)
fmax : float >= 0 [scalar]
highest frequency (in Hz).
If `None`, use ``fmax = sr / 2.0``
htk : bool [scalar]
use HTK formula instead of Slaney
norm : {None, 'slaney', or number} [scalar]
If 'slaney', divide the triangular mel weights by the width of the mel band
(area normalization).
If numeric, use `librosa.util.normalize` to normalize each filter by to unit l_p norm.
See `librosa.util.normalize` for a full description of supported norm values
(including `+-np.inf`).
Otherwise, leave all the triangles aiming for a peak value of 1.0
dtype : np.dtype
The data type of the output basis.
By default, uses 32-bit (single-precision) floating point.
Returns
-------
M : np.ndarray [shape=(n_mels, 1 + n_fft/2)]
Mel transform matrix
See also
--------
librosa.util.normalize
Notes
-----
This function caches at level 10.
Examples
--------
>>> melfb = librosa.filters.mel(22050, 2048)
>>> melfb
array([[ 0. , 0.016, ..., 0. , 0. ],
[ 0. , 0. , ..., 0. , 0. ],
...,
[ 0. , 0. , ..., 0. , 0. ],
[ 0. , 0. , ..., 0. , 0. ]])
Clip the maximum frequency to 8KHz
>>> librosa.filters.mel(22050, 2048, fmax=8000)
array([[ 0. , 0.02, ...,
|
0. , 0. ],
[ 0. , 0. , ..., 0. , 0. ],
...,
[ 0. , 0. , ..., 0. , 0. ],
[ 0. , 0. , ..., 0. , 0. ]])
>>> import matplotlib.pyplot as plt
>>> fig, ax = plt.subplots()
>>> img = librosa.display.specshow(melfb, x_axis='linear', ax=ax)
>>> ax.set(ylabel='M
|
el filter', title='Mel filter bank')
>>> fig.colorbar(img, ax=ax)
"""
if fmax is None:
fmax = float(sr) / 2
# Initialize the weights
n_mels = int(n_mels)
weights = np.zeros((n_mels, int(1 + n_fft // 2)), dtype=dtype)
# Center freqs of each FFT bin
fftfreqs = fft_frequencies(sr=sr, n_fft=n_fft)
# 'Center freqs' of mel bands - uniformly spaced between limits
mel_f = mel_frequencies(n_mels + 2, fmin=fmin, fmax=fmax, htk=htk)
fdiff = np.diff(mel_f)
ramps = np.subtract.outer(mel_f, fftfreqs)
for i in range(n_mels):
# lower and upper slopes for all bins
lower = -ramps[i] / fdiff[i]
upper = ramps[i + 2] / fdiff[i + 1]
# .. then intersect them with each other and zero
weights[i] = np.maximum(0, np.minimum(lower, upper))
if norm == "slaney":
# Slaney-style mel is scaled to be approx constant energy per channel
enorm = 2.0 / (mel_f[2 : n_mels + 2] - mel_f[:n_mels])
weights *= enorm[:, np.newaxis]
else:
weights = util.normalize(weights, norm=norm, axis=-1)
# Only check weights if f_mel[0] is positive
if not np.all((mel_f[:-2] == 0) | (weights.max(axis=1) > 0)):
# This means we have an empty channel somewhere
warnings.warn(
"Empty filters detected in mel frequency basis. "
"Some channels will produce empty responses. "
"Try increasing your sampling rate (and fmax) or "
"reducing n_mels."
)
return weights
@cache(level=10)
def chroma(
sr,
n_fft,
n_chroma=12,
tuning=0.0,
ctroct=5.0,
octwidth=2,
norm=2,
base_c=True,
dtype=np.float32,
):
"""Create a chroma filter bank.
This creates a linear transformation matrix to project
FFT bins onto chroma bins (i.e. pitch classes).
Parameters
----------
sr : number > 0 [scalar]
audio sampling rate
n_fft : int > 0 [scalar]
number of FFT bins
n_chroma : int > 0 [scalar]
number of chroma bins
tuning : float
Tuning deviation from A440 in fractions of a chroma bin.
ctroct : float > 0 [scalar]
octwidth : float > 0 or None [scalar]
``ctroct`` and ``octwidth`` specify a dominance window:
a Gaussian weighting centered on ``ctroct`` (in octs, A0 = 27.5Hz)
and with a gaussian half-width of ``octwidth``.
Set ``octwidth`` to `None` to use a flat weighting.
norm : float > 0 or np.inf
Normalization factor for each filter
base_c : bool
If True, the filter bank will start at 'C'.
If False, the filter bank will start at 'A'.
dtype : np.dtype
The data type of the output basis.
By default, uses 32-bit (single-precision) floating point.
Returns
-------
wts : ndarray [shape=(n_chroma, 1 + n_fft / 2)]
Chroma filter matrix
See Also
--------
librosa.util.normalize
librosa.feature.chroma_stft
Notes
-----
This function caches at level 10.
Examples
--------
Build a simple chroma filter bank
>>> chromafb = librosa.filters.chroma(22050, 4096)
array([[ 1.689e-05, 3.024e-04, ..., 4.639e-17, 5.327e-17],
[ 1.716e-05, 2.652e-04, ..., 2.674e-25, 3.176e-25],
...,
[ 1.578e-05, 3.619e-04
|
wbap/hackathon-2017-sample
|
agent/ml/experience.py
|
Python
|
apache-2.0
| 3,373
| 0.004447
|
# coding: utf-8
import numpy as np
from chainer import cuda
from builtins import range
class Experience:
def __init__(self, use_gpu=0, data_size=10**5, replay_size=32, hist_size=1, initial_exploration=10**3, dim=10240):
self.use_gpu = use_gpu
self.data_size = data_size
self.replay_size = replay_size
self.hist_size = hist_size
# self.initial_exploration = 10
self.initial_exploration = initial_exploration
self.dim = dim
self.d = [np.zeros((self.data_size, self.hist_size, self.dim), dtype=np.uint8),
np.zeros(self.data_size, dtype=np.uint8),
np.zeros((self.data_size, 1), dtype=np.int8),
np.zeros((self.data_size, self.hist_size, self.dim), dtype=np.uint8),
np.zeros((self.data_size, 1), dtype=np.bool)]
def stock(self, time, state, action, reward, state_dash, episode_end_flag):
data_index = time % self.data_size
if episode_end_flag is True:
self.d[0][data_index] = state
self.d[1][data_index] = action
self.d[2][data_index] = reward
else:
self.d[0][data_index] = state
self.d[1][data_index] = action
self.d[2][data_index] = reward
self.d[3][data_index] = state_dash
self.d[4][data_index] = episode_end_flag
def replay(self, time):
replay_start = False
if self.initial_exploration < time:
replay_start = True
# Pick up replay_size number of samples from the Data
if time < self.data_size: # during the first sweep of the History Data
replay_index = np.random.randint(0, time, (self.replay_size, 1))
else:
replay_index = np.random.randint(0, self.data_size, (self.replay_size, 1))
s_replay = np.ndarray(shape=(self.replay_size, self.hist_size, self.dim), dtype=np.float32)
a_replay = np.ndarray(shape=(self.replay_size, 1), dtype=np.uint8)
r_replay = np.ndarray(shape=(self.replay_size, 1), dtype=np.float32)
s_dash_replay = np.ndarray(shape=(self.replay_size, self.hist_size, self.dim), dtype=np.float32)
episode_end_replay = np.ndarray(shape=(self.replay_size, 1), dtype=np.bool)
for i in range(self.replay_size):
s_replay[i] = np.asarray(self.d[0][replay_index[i]], dtype=np.float32)
a_replay[i] = self.d[1][replay_index[i]]
r_replay[i] = self.d[2][replay_index[i]]
s_dash_replay[i] = np.array(self.d[3][replay_index[i]], dtype=np.float32)
episode_end_replay[i] = self.d[4][replay_index[i]]
if self.use_gpu >= 0:
|
s_replay = cuda.to_gpu(s_replay)
s_dash_replay = cuda.to_gpu(s_dash_replay)
return replay_start, s_replay, a_replay, r_replay, s_dash_repla
|
y, episode_end_replay
else:
return replay_start, 0, 0, 0, 0, False
def end_episode(self, time, last_state, action, reward):
self.stock(time, last_state, action, reward, last_state, True)
replay_start, s_replay, a_replay, r_replay, s_dash_replay, episode_end_replay = \
self.replay(time)
return replay_start, s_replay, a_replay, r_replay, s_dash_replay, episode_end_replay
|
ava-project/ava-website
|
website/apps/user/migrations/0003_emailvalidationtoken.py
|
Python
|
mit
| 973
| 0.002055
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-16 21:51
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user', '0002_profile_validated'),
]
operations = [
migrations.CreateModel(
n
|
ame='EmailValidationToken',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('token', models.CharField(max_length=100, unique=True)),
('expire', models.DateTimeField()),
('consumed', models.BooleanField(default=False)),
('user', models.ForeignKey(on_delete=django.d
|
b.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
ZerpaTechnology/AsenZor
|
apps/votSys/user/vistas/templates/inicio.py
|
Python
|
lgpl-3.0
| 147
| 0.020408
|
# -*- coding: utf-8 -*-
print '''<!DOCTYPE html><html>'''
incluir(data,"he
|
ad")
print
|
'''<body>'''
incluir(data,"header")
print '''</body></html>'''
|
weblabdeusto/weblablib
|
weblablib/backends/__init__.py
|
Python
|
agpl-3.0
| 326
| 0
|
# Copyright 2017 onwards LabsLan
|
d Experimentia S.L.
# This software is licensed under the GNU AGPL v3:
# GNU Affero General Public License version 3 (see the file LICENSE)
# Read in the documentation about the license
from __future__ import unicode_literals, print_function, division
from .redis_manager imp
|
ort RedisManager
|
SimplyCo/cityproblems
|
cityproblems/accounts/urls.py
|
Python
|
mit
| 1,489
| 0.006716
|
from django.conf.urls import patterns, url
urlpatterns = patterns('cityproblems.accounts.views',
url(r'^register/$', 'register', name="accounts_register"),
url(r'^profile/edit/$', 'accounts_profile_edit', name="accounts_profile_edit"),
url(r'^profile/$', 'accounts_profile_view', name="accounts_profile_view"),
url(r'^profile/(\w+)/$', 'accounts_profile_view', name="accounts_profile_view"),
url(r'^send_email_confirm_link/$', 'accounts_send_email_confirm_link', name="accounts_send_email_confirm_link"),
url(r'^send_passwd_reset_link/$', 'accounts_send_passwd_reset_link', name="accounts_send_passwd_reset_link"),
url(r'^process_email_confirm/(\d+)/$', 'accounts_process_e
|
mail_confirm', name="accounts_process_email
|
_confirm"),
url(r'^passwd_reset/(\d+)/$', 'accounts_passwd_reset', name="accounts_passwd_reset"),
url(r'^passwd_change/$', 'accounts_passwd_change', name="accounts_passwd_change"),
)
urlpatterns += patterns('',
url(r'^logout/$',
'django.contrib.auth.views.logout',
{'next_page': '/'}, name="logout"),
url(r'^login/$', 'django.contrib.auth.views.login', {'template_name': 'accounts_login.html'}, name="login"),
)
|
anduslim/codex
|
codex_project/actors/haversine.py
|
Python
|
mit
| 554
| 0.001805
|
from math import r
|
adians, cos, sin, asin, sqrt
def haversine(lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
c = 2 * asin(sqrt(a))
# 6367 k
|
m is the radius of the Earth
km = 6367 * c
return km
|
SpotOnInc/espwrap
|
tests/test_sendgrid_v3.py
|
Python
|
mit
| 6,880
| 0.003351
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, unicode_literals, absolute_import
import sys
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
import pytest
from espwrap.base import batch
from espwrap.adaptors.sendgrid_v3 import SendGridMassEmail, _HTTP_EXC_MSG
from espwrap.adaptors.sendgrid_common import breakdown_recipients
from python_http_client.exceptions import BadRequestsError # this is a dependency of sendgrid-python
if sys.version_info < (3,):
range = xrange
API_KEY = 'unit_test'
def test_breakdown_recipients():
me = SendGridMassEmail(API_KEY)
# This function will split straight up duplicates
me.add_recipient(name='Test', email='test@test.com')
me.add_recipient(name='Test', email='test@test.com')
# This function will split aliases we've already seen
# the base of
me.add_recipient(name='Test', email='test+1@test.com')
broken = breakdown_recipients(me.get_recipients())
# So it should send as three separate batches
assert len(broken) == 3
def test_delimiters():
me = SendGridMassEmail(API_KEY)
start = '*|'
end = '|*'
me.set_variable_delimiters(start, end)
delimiters = me.get_variable_delimiters(True)
assert delimiters.get('start') == start
assert delimiters.get('end') == end
assert me.get_variable_delimiters() == (start, end)
def test_add_tags():
me = SendGridMassEmail(API_KEY)
with pytest.raises(Exception) as err:
me.add_tags(*[str(tag) for tag in range(11)])
assert 'Too many tags' in str(err)
me.add_tags(*[str(tag) for tag in range(9)])
with pytest.raises(Exception) as err:
me.add_tags(*['foo', 'bar'])
assert 'limit' in str(err)
me.add_tags('tenth')
def test_message_construction():
me = SendGridMassEmail(API_KEY)
template_name = 'test template'
ip_pool = 'testPool'
company_name = 'UnitTest Spam Corp the Second'
|
tags = ['unit', 'test', 'for', 'the', 'win']
webhook_data = {
'm_id': '56f2c1341a89ddc8c04d5407',
'env': 'local',
'p_id': '56f2c1571a89ddc8c04d540a',
}
me.set_reply_to_addr('custsupport@spam.com')
me.set_from_addr('donotreply@spam.com')
me.add_recipients([
{
'name': 'Josh',
'email': 'spam@spam.com',
'merge_vars': {
'CUSTOMER_NAME': 'Josh',
|
},
},
{
'name': 'Jim',
'email': 'spam2@spam.com',
'merge_vars': {
'CUSTOMER_NAME': 'Jim',
'SOMETHING_UNIQUE': 'tester',
},
},
{
'name': '姓',
'email': 'test@test.com',
'merge_vars': {
'CUSTOMER_NAME': '姓',
'SOMETHING_UNIQUE': '独特'
}
}
])
me.add_global_merge_vars(COMPANY_NAME=company_name)
me.set_variable_delimiters('*|', '|*')
me.set_ip_pool(ip_pool)
me.set_template_name(template_name)
me.enable_click_tracking()
me.enable_open_tracking()
me.set_webhook_data(webhook_data)
me.add_tags(*tags)
me.set_subject('test subject')
delims = me.get_variable_delimiters()
grouped_recipients = batch(list(me.recipients), me.partition)
for grp in grouped_recipients:
# Note: The order of recipients in this test case is reversed from what's listed above
to_send = breakdown_recipients(grp)
message = me.message_constructor(to_send)
message_dict = message.get()
print (message_dict)
assert set(message_dict['categories']) == set(tags)
assert message_dict['tracking_settings']['open_tracking']['enable'] == True
assert message_dict['tracking_settings']['click_tracking']['enable'] == True
print(message_dict['personalizations'])
assert message_dict['personalizations'][0]['to'][0]['name'] == '姓'
assert message_dict['personalizations'][0]['to'][0]['email'] == 'test@test.com'
assert message_dict['personalizations'][1]['to'][0]['name'] == 'Jim'
assert message_dict['personalizations'][1]['to'][0]['email'] == 'spam2@spam.com'
assert message_dict['personalizations'][2]['to'][0]['name'] == 'Josh'
assert message_dict['personalizations'][2]['to'][0]['email'] == 'spam@spam.com'
company_name_key = delims[0] + 'COMPANY_NAME' + delims[1]
assert message_dict['personalizations'][0]['substitutions'][company_name_key] == 'UnitTest Spam Corp the Second'
assert message_dict['personalizations'][1]['substitutions'][company_name_key] == 'UnitTest Spam Corp the Second'
customer_name_key = delims[0] + 'CUSTOMER_NAME' + delims[1]
# assert message_dict['personalizations'][0]['substitutions'][customer_name_key] == '姓'
assert message_dict['personalizations'][1]['substitutions'][customer_name_key] == 'Jim'
assert message_dict['personalizations'][2]['substitutions'][customer_name_key] == 'Josh'
something_unique_key = delims[0] + 'SOMETHING_UNIQUE' + delims[1]
# assert message_dict['personalizations'][0]['substitutions'][something_unique_key] == '独特'
assert something_unique_key not in message_dict['personalizations'][2]['substitutions'].keys()
assert message_dict['ip_pool_name'] == ip_pool
assert message_dict['custom_args']['template_name'] == template_name
def test_send_error_400(caplog):
"""
Test the handling of HTTP 400 Bad Request responses. The Sendgrid V3 API will return data
along with a 400 response that has details on why it was rejected. Make sure this data
makes it back to the caller, pretty pretty please.
"""
subject = 'subject'
resp_status_code = 400
resp_reason = 'main reason for error'
resp_body = 'the body of the response'
me = SendGridMassEmail(API_KEY)
me.subject = subject
me.from_addr = 'noreply@mailinator.com'
me.add_recipient(email='recip@mailinator.com')
with patch('sendgrid.SendGridAPIClient.send') as mock_send:
error = Mock()
error.code = resp_status_code
error.reason = resp_reason
error.read = lambda: resp_body
mock_send.side_effect = BadRequestsError(error)
me.send()
assert mock_send.called, 'It should have made it to the send method in the sendgrid lib.'
assert len(caplog.record_tuples) == 1, 'There should a log message in the exception block.'
severity = caplog.record_tuples[0][1]
msg = caplog.record_tuples[0][2]
assert severity == 40, 'The log should be an Error level log.'
assert msg == _HTTP_EXC_MSG % (subject, resp_status_code, resp_reason, resp_body),\
'The log message should contain details from the response.'
|
PyQwt/PyQwt5
|
Doc/sourceforge.py
|
Python
|
gpl-2.0
| 990
| 0.005051
|
#!/usr/bin/env python
import os
import re
import sys
group_id = '142394'
def stamp(html):
"""Stamp a Python HTML documentation page with the SourceForge logo"""
def rep
|
lace(m):
return ('<span class="release-info">%s '
'Hosted on <a href="http://sourc
|
eforge.net">'
'<img src="http://sourceforge.net/'
'sflogo.php?group_id=%s&type=1" width="88" height="31"'
'border="0" alt="SourceForge Logo"></a></span>'
% (m.group(1), group_id))
mailRe = re.compile(r'<span class="release-info">(.*)</span>')
## m = mailRe.search(html)
## if m:
## print m.groups()
return re.sub(mailRe, replace, html)
# stamp()
if __name__ == '__main__':
for name in sys.argv[1:]:
html = open(name, 'r').read()
text = stamp(html)
if text != html:
os.remove(name)
file = open(name, 'w')
file.write(text)
file.close()
|
david30907d/feedback_django
|
example/get_feedback/migrations/0002_auto_20160519_0326.py
|
Python
|
mit
| 1,184
| 0.000845
|
# -*- coding: utf-8 -*-
from __future__ import
|
unicode_literals
from django.db import migr
|
ations, models
class Migration(migrations.Migration):
dependencies = [
('get_feedback', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='course',
name='feedback_amount',
field=models.DecimalField(default=0, decimal_places=0, max_digits=10),
),
migrations.AlterField(
model_name='course',
name='feedback_FU',
field=models.FloatField(),
),
migrations.AlterField(
model_name='course',
name='feedback_GPA',
field=models.FloatField(),
),
migrations.AlterField(
model_name='course',
name='feedback_easy',
field=models.FloatField(),
),
migrations.AlterField(
model_name='course',
name='feedback_freedom',
field=models.FloatField(),
),
migrations.AlterField(
model_name='course',
name='feedback_knowledgeable',
field=models.FloatField(),
),
]
|
metzlar/cms-form-plugin
|
cms_form_plugin/migrations/0001_initial.py
|
Python
|
gpl-2.0
| 3,411
| 0.007916
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'FormPlugin'
db.create_table(u'cmsplugin_formplugin', (
(u'cmsplugin_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['cms.CMSPlugin'], unique=True, primary_key=True)),
('form_class', self.gf('django.db.models.fields.CharField')(max_length=200)),
('s
|
uccess_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True)),
('post_to_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
))
db.send_create_signal(u'cms_form_plugin', ['FormPlugin'])
def backwards(self, orm):
# Deleting model 'FormPlugin'
db.delete_table(u'cmsplugin_formplugin')
models = {
|
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'cms_form_plugin.formplugin': {
'Meta': {'object_name': 'FormPlugin', 'db_table': "u'cmsplugin_formplugin'", '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'form_class': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'post_to_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'success_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
}
}
complete_apps = ['cms_form_plugin']
|
kickapoo/enhydris
|
enhydris/tests/admin/test_station.py
|
Python
|
agpl-3.0
| 16,487
| 0.00182
|
import datetime as dt
from io import StringIO
from locale import LC_CTYPE, getlocale, setlocale
from django.contrib.auth.models import Permission, User
from django.contrib.gis.geos import Point
from django.core.files.uploadedfile import SimpleUploadedFile
from django.test import TestCase, override_settings
from model_mommy import mommy
from enhydris.admin.station import LatLonField, LatLonWidget, TimeseriesInlineAdminForm
from enhydris.models import Station, Timeseries
class LatLonWidgetTestCase(TestCase):
def test_decompress_value(self):
result = LatLonWidget().decompress(Point(12.34567891234567, -23.456789123456))
self.assertAlmostEqual(result[0], 12.345679, places=13)
self.assertAlmostEqual(result[1], -23.456789, places=13)
def test_decompress_none(self):
result = LatLonWidget().decompress(None)
self.assertIsNone(result[0])
self.assertIsNone(result[1])
class LatLonFieldTestCase(TestCase):
def test_compress(self):
self.assertEqual(
LatLonField().compress([12.345678, -23.456789]),
"SRID=4326;POINT(12.345678 -23.456789)",
)
class StationPermsTestCaseBase(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
alice = User.objects.create_user(
username="alice", password="topsecret", is_staff=True, is_superuser=True
)
bob = User.objects.create_user(
username="bob", password="topsecret", is_staff=True, is_superuser=False
)
charlie = User.objects.create_user(
username="charlie", password="topsecret", is_staff=True, is_superuser=False
)
User.objects.create_user(
username="david", password="topsecret", is_staff=True, is_superuser=False
)
elaine = User.objects.create_user(
username="elaine", password="topsecret", is_staff=True, is_superuser=False
)
cls.azanulbizar = mommy.make(
Station, name="Azanulbizar", creator=bob, maintainers=[]
)
cls.barazinbar = mommy.make(
Station, name="Barazinbar", creator=bob, maintainers=[charlie]
)
cls.calenardhon = mommy.make(
Station, name="Calenardhon", creator=alice, maintainers=[]
)
po = Permission.objects
elaine.user_permissions.add(po.get(codename="add_station"))
elaine.user_permissions.add(po.get(codename="change_station"))
elaine.user_permissions.add(po.get(codename="delete_station"))
class CommonTests:
"""Tests that will run both for ENHYDRIS_USERS_CAN_ADD_CONTENT=True and False.
Below we have two TestCase subclasses (actually StationPermissionsTestCaseBase
subclasses); one of them overrides setting ENHYDRIS_USERS_CAN_ADD_CONTENT to True,
and the other one to False. This is a mixin containing tests that should have the
same results in both cases.
"""
def test_station_list_has_all_stations_for_superuser(self):
self.client.login(username="alice", password="topsecret")
response = self.client.get("/admin/enhydris/station/")
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Azanulbizar")
self.assertContains(response, "Barazinbar")
self.assertContains(response, "Calenardhon")
def test_station_list_has_all_stations_for_user_with_model_permissions(self):
self.client.login(username="elaine", password="topsecret")
response = self.client.get("/admin/enhydris/station/")
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Azanulbizar")
self.assertContains(response, "Barazinbar")
self.assertContains(response, "Calenardhon")
def test_station_list_has_nothing_when_user_does_not_have_permissions(self):
self.client.login(username="david", password="topsecret")
response = self.client.get("/admin/enhydris/station/")
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, "Azanulbizar")
self.assertNotContains(response, "Barazinbar")
self.assertNotContains(response, "Calenardhon")
def test_station_detail_is_inaccessible_if_user_does_not_have_perms(self):
self.client.login(username="david", password="topsecret")
response = self.client.get(
"/admin/enhydris/station/{}/change/".format(self.barazinbar.id)
)
self.assertEqual(response.status_code, 302)
@override_settings(ENHYDRIS_USERS_CAN_ADD_CONTENT=True)
class StationPermsTestCaseWhenUsersCanAddContent(StationPermsTestCaseBase, CommonTests):
def test_station_list_has_created_stations(self):
self.client.login(username="bob", password="topsecret")
response = self.client.get("/admin/enhydris/station/")
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Azanulbizar")
self.assertContains(response, "Barazinbar")
self.assertNotContains(response, "Calenardhon")
def test_station_list_has_maintained_stations(self):
self.client.login(username="charlie", password="topsecret")
response = self.client.get("/admin/enhydris/station/")
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, "Azanulbizar")
self.assertContains(response, "Barazinbar")
self.assertNotContains(response, "Calenardhon")
def test_station_detail_has_creator_and_maintainers_for_superuser(self):
self.client.login(username="alice", password="topsecret")
response = self.client.get(
"/admin/enhydris/station/{}/change/".format(self.azanulbizar.id)
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Creator")
self.assertContains(response, "Maintainers")
def test_station_detail_has_creator_and_maintainers_for_user_with_model_perms(self):
self.client.login(username="elaine", password="topsecret")
response = self.client.get(
"/admin/enhydris/station/{}/change/".format(self.azanulbizar.id)
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Creator")
self.assertContains(response, "Maintainers")
def test_station_detail_has_only_maintainers_for_creator(self):
self.client.login(username="bob", password="topsecret")
response = self.client.get(
"/admin/enhydris/station/{}/change/".format(self.azanulbizar.id)
)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, "Creator")
self.assertContains(response, "Maintainers")
def test_station_detail_has_neither_creator_nor_maintainers_for_maintainer(self):
self.client.login(username="charlie", password="topsecret")
response = self.client.get(
"/admin/enhydris/station/{}/change/".format(self.barazinbar.id)
)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, "Creator")
self.assertNotContains(response, "Maintainers")
def test_add_station_has_creator_and_maintainers_for_superuser(self):
self.client.login(username="alice", password="topsecret")
response = self.client.get("/admin
|
/enhydris/station/add/")
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Creator")
self.assertContains(response, "Maintainers")
def test_add_station_has_creator_and_maintainers_for_user_with_model_perms(self):
self.client.login(username="elaine", password="topsecret")
response = self.client.get("/admin/enhydris/station/add/")
self.assertEqual(response.status_code, 2
|
00)
self.assertContains(response, "Creator")
self.assertContains(response, "Maintainers")
def test_add_station_has_only_maintainers_for_user_without_model_perms(self):
self.client.login(username="bob", password="topsecret")
response = self.client.get("/admin/enhydris/station/add/")
self.assertEqual(response.status_code, 200)
|
eljost/pysisyphus
|
tests/test_irc/test_irc.py
|
Python
|
gpl-3.0
| 7,603
| 0.000921
|
#!/usr/bin/env python3
from pathlib import Path
import matplotlib.pyplot as plt
import numpy as np
import pytest
from pysisyphus.calculators.AnaPot import AnaPot
from pysisyphus.calculators.MullerBrownSympyPot import MullerBrownPot
from pysisyphus.calculators.PySCF import PySCF
from pysisyphus.calculators import Gaussian16, Turbomole
from pysisyphus.constants import BOHR2ANG
from pysisyphus.helpers import geom_loader
from pysisyphus.irc import *
from pysisyphus.testing import using
@pytest.fixture
def this_dir(request):
return Path(request.module.__file__).parents[0]
def assert_anapot_irc(irc):
fc = irc.all_coords[0]
bc = irc.all_coords[-1]
forward_ref = np.array((-1.0527, 1.0278, 0.))
backward_ref = np.array((1.941, 3.8543, 0.))
forward_diff = np.linalg.norm(fc - forward_ref)
backward_diff = np.linalg.norm(bc - backward_ref)
assert forward_diff == pytest.approx(0.05, abs=0.1)
assert backward_diff == pytest.approx(0.05, abs=0.1)
def plot_irc(irc, title=None):
geom = irc.geometry
calc = geom.calculator
levels = np.linspace(-3, 4, 120)
calc.plot()
ax = calc.ax
ax.plot(*irc.all_coords.T[:2], "ro-")
if title:
ax.set_title(title)
plt.show()
@pytest.mark.parametrize(
"irc_cls, mod_kwargs, ref", [
(DampedVelocityVerlet, {"v0": 0.1, "max_cycles": 400,}, None),
(Euler, {"step_length": 0.05,}, None),
(EulerPC, {}, None),
(GonzalezSchlegel, {}, None),
(IMKMod, {}, None),
(RK4, {}, None),
(LQA, {}, None),
]
)
def test_anapot_irc(irc_cls, mod_kwargs, ref):
geom = AnaPot().get_geom((0.61173, 1.49297, 0.))
kwargs = {
"step_length": 0.1,
"rms_grad_thresh": 1e-2,
}
kwargs.update(**mod_kwargs)
irc = irc_cls(geom, **kwargs)
irc.run()
# geom.calculator.plot_irc(irc, show=True)
assert_anapot_irc(irc)
@pytest.mark.parametrize(
"step_length", [
(0.1),
(0.2),
(0.3),
(0.4),
]
)
def test_imk(step_length):
geom = AnaPot().get_geom((0.61173, 1.49297, 0.))
irc_kwargs = {
"step_length": step_length,
"rms_grad_thresh": 1e-2,
"corr_first": True,
"corr_first_energy": True,
"corr_bisec_size": 0.0025,
"corr_bisec_energy": True,
}
irc = IMKMod(geom, **irc_kwargs)
irc.run()
# plot_irc(irc, irc.__class__.__name__)
assert_anapot_irc(irc)
@pytest.mark.parametrize(
"calc_cls, kwargs_", [
pytest.param(PySCF,
{"basis": "321g", },
marks=using("pyscf")
),
pytest.param(Gaussian16,
{"route": "HF/3-21G"},
marks=using("gaussian16")
),
pytest.param(Turbomole,
{"control_path": "./hf_abstr_control_path", "pal": 1},
marks=using("turbomole")
),
]
)
def test_hf_abstraction_dvv(calc_cls, kwargs_, this_dir):
geom = geom_loader("lib:hfabstraction_hf321g_displ_forward.xyz")
calc_kwargs = {
"pal": 2,
}
calc_kwargs.update(kwargs_)
if "control_path" in calc_kwargs:
calc_kwargs["control_path"] = this_dir / calc_kwargs["control_path"]
print("Using", calc_cls)
calc = calc_cls(**calc_kwargs)
geom.set_calculator(calc)
irc_kwargs = {
"dt0": 0.5,
"v0": 0.04,
"downhill": True,
"max_cycles": 150,
}
dvv = DampedVelocityVerlet(geom, **irc_kwargs)
dvv.run()
c3d = geom.coords3d * BOHR2ANG
def bond(i,j): return np.linalg.norm(c3d[i]-c3d[j])
assert bond(2, 7) == pytest.approx(0.93, abs=0.01)
assert bond(4, 7) == pytest.approx(2.42, abs=0.01)
assert bond(2, 0) == pytest.approx(2.23, abs=0.01)
@using("pyscf")
@pytest.mark.parametrize(
"irc_cls, irc_kwargs, fw_cycle, bw_cycle",
[
(EulerPC, {"hessian_recalc": 10, "dump_dwi": False,}, 30, 38),
# (EulerPC, {"hessian_recalc": 10, "corr_func": "scipy",}, 19, 23),
]
)
def test_hcn_irc(irc_cls, irc_kwargs, fw_cycle, bw_cycle):
geom = geom_loader("lib:hcn_iso_hf_sto3g_ts_opt.xyz")
calc = PySCF(
basis="sto3g",
)
geom.set_calculator(calc)
irc = irc_cls(geom, **irc_kwargs, rms_grad_thresh=1e-4)
irc.run()
# approx. +- 0.5 kJ/mol
ref_energies = [pytest.approx(en) for en in (-91.6444238, -91.67520895)]
assert irc.forward_energies[0] in ref_energies
assert irc.backward_energies[-1] in ref_energies
@pytest.mark.parametrize(
"scipy_method",
[
(None),
("RK45"),
("DOP853"),
]
)
def test_eulerpc_scipy(scipy_method):
geom = AnaPot().get_geom((0.61173, 1.49297, 0.))
kwargs = {
"step_length": 0.2,
"rms_grad_thresh": 1e-2,
"corr_func": "scipy",
"scipy_method": scipy_method,
}
irc = EulerPC(geom, **kwargs)
irc.run()
# plot_irc(irc, irc.__class__.__name__)
assert_anapot_irc(irc)
@using("pyscf")
@pytest.mark.parametrize(
"hessian_init, ref_cycle", [
("calc", 28),
pytest.param("fischer", 0, marks=pytest.mark.xfail),
pytest.param("unit", 0, marks=pytest.mark.xfail),
("lindh", 28),
("simple", 28),
("swart", 28),
]
)
def test_downhill_irc_model_hessian(hessian_init, ref_cycle):
geom = geom_loader("lib:hcn_downhill_model_hessian.xyz")
calc = PySCF(basis="sto3g", pal=2)
geom.set_calculator(calc)
irc_kwargs = {
"hessian_init": hessian_init,
"rms_grad_thresh": 5e-3,
"downhill": True,
}
irc = EulerPC(geom, **irc_kwargs)
irc.run()
assert irc.downhill_energies[-1] == pytest.approx(-91.67517096968325)
assert irc.downhill_cycle == ref_cycle
# @pytest.mark.skip()
@pytest.mark.parametrize(
"step_length", [
0.1,
0.2,
0.3,
# 0.4 # requires hessian_recalc=1
]
)
def test_mb_gs2(step_length):
calc = MullerBrownPot()
geom = calc.get_saddle(0)
irc_kwargs = {
"step_length": step_length,
"line_search": False,
# "hessian_recalc": 1,
}
irc = GonzalezSchlegel(geom, **irc_kwargs)
|
irc.run()
# calc.plot_irc(irc, show=True, title=f"length {step_length:.2f}")
assert irc.forward_is_converged
assert irc.backward_is_converged
@using("pyscf")
@pytest.mark.parametrize(
"step_length", [
0.1,
0.2,
0.3,
# 0.4, # sometimes fails in the CI
]
)
def test_hcn_iso_gs2(step_length):
geom = geom_loader(
|
"lib:hcn_iso_hf_sto3g_ts_opt.xyz")
calc = PySCF(basis="sto3g", verbose=0)
geom.set_calculator(calc)
irc_kwargs = {
"step_length": step_length,
"displ_energy": 0.0005,
}
irc = GonzalezSchlegel(geom, **irc_kwargs)
irc.run()
assert irc.forward_is_converged
assert irc.backward_is_converged
@pytest.mark.parametrize(
"step_length", [
0.1,
0.2,
# 0.3,
# 0.4,
]
)
def test_mb_eulerpc(step_length):
calc = MullerBrownPot()
geom = calc.get_saddle(0)
irc_kwargs = {
"step_length": step_length,
# Using Scipy here takes forever...
# "corr_func": "scipy",
# "scipy_method": "BDF",
}
irc = EulerPC(geom, **irc_kwargs)
irc.run()
# calc.plot_irc(irc, show=True, title=f"length {step_length:.2f}")
forward_coords = irc.all_coords[0]
backward_coords = irc.all_coords[-1]
assert np.linalg.norm(forward_coords - (-0.558, 1.441, 0.0)) <= 2e-2
assert np.linalg.norm(backward_coords - (-0.050, 0.466, 0.0)) <= 5e-3
|
HydrelioxGitHub/home-assistant
|
homeassistant/components/tradfri/__init__.py
|
Python
|
apache-2.0
| 3,856
| 0
|
"""Support for IKEA Tradfri."""
import logging
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
from homeassistant.util.json import load_json
from .const import (
CONF_IMPORT_GROUPS, CONF_IDENTITY, CONF_HOST, CONF_KEY, CONF_GATEWAY_ID)
from . import config_flow # noqa pylint_disable=unused-import
REQUIREMENTS = ['pytradfri[async]==6.0.1']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'tradfri'
CONFIG_FILE = '.tradfri_psk.conf'
KEY_GATEWAY = 'tradfri_gateway'
KEY_API = 'tradfri_api'
CONF_ALLOW_TRADFRI_GROUPS = 'allow_tradfri_groups'
DEFAULT_ALLOW_TRADFRI_GROUPS = False
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_ALLOW_TRADFRI_GROUPS,
default=DEFAULT_ALLOW_TRADFRI_GROUPS): cv.boolean,
})
}, extra=vol.ALLOW_EXTRA)
async def async_setup(hass, config):
"""Set up the Tradfri component."""
conf = config.get(DOMAIN)
if conf is None:
return True
configured_hosts = [entry.data['host'] for entry in
hass.config_entries.async_entries(DOMAIN)]
legacy_hosts = await hass.async_add_executor_job(
load_json, hass.config.path(CONFIG_FILE))
for host, info in legacy_hosts.items():
if host in configured_hosts:
continue
info[CONF_HOST] = host
info[CONF_IMPORT_GROUPS] = conf[CONF_ALLOW_TRADFRI_GROUPS]
hass.async_create_task(hass.config_entries.flow.async_init(
DOMAIN, context={'source': config_entries.SOURCE_IMPORT},
data=info
))
host = conf.get(CONF_HOST)
import_groups = conf[CONF_ALLOW_TRADFRI_GROUPS]
if host is None or host in configured_hosts or host in legacy_hosts:
return True
hass.async_create_task(hass.config_entries.flow.async_init(
DOMAIN, context={'source': config_entries.SOURCE_IMPORT},
data={CONF_HOST: host, CONF_IMPORT_GROUPS: import_groups}
))
return True
async def async_setup_entry(hass, entry):
"""Create a gateway."""
# host, identity, key, allow_tradfri_groups
from pytradfri import Gateway, RequestError # pylint: disable=import-error
from pytradfri.api.aiocoap_api import APIFactory
factory = APIFactory(
entry.data[CONF_HOST],
psk_id=entry.data[CONF_IDENTITY],
psk=entry.data[CONF_KEY],
loop=hass.loop
)
async def on_hass_stop(event):
"""Close connection when hass stops."""
await factory.shutdown()
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop)
api = factory.request
gateway = Gateway()
try:
gateway_info = await api(gateway.get_gateway_info())
except RequestError:
|
_LOGGER.error("Tradfri setup failed.")
return False
hass.data.setdefault(KEY_API, {})[entry.entry_id] = api
hass.data.setdefault(KEY_GATEWAY, {})[entry.entry_id] = gateway
dev_reg = await hass.helpers.device_registry.async_get_registry()
dev_reg.async_get_or_create(
config_entry_id=entry.entry_id,
connections=set(),
identifiers={
(DOMAIN, entry.data[CONF_GATEWAY_ID])
},
manufacturer='IKEA',
name='Gateway',
# They just have 1 gateway model. Type is not exposed yet.
model='E1526',
sw_version=gateway_info.firmware_version,
)
hass.async_create_task(hass.config_entries.async_forward_entry_setup(
entry, 'light'
))
hass.async_create_task(hass.config_entries.async_forward_entry_setup(
entry, 'sensor'
))
hass.async_create_task(hass.config_entries.async_forward_entry_setup(
entry, 'switch'
))
return True
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-web/azure/mgmt/web/models/reissue_certificate_order_request.py
|
Python
|
mit
| 2,522
| 0.00119
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .proxy_only_resource import ProxyOnlyResource
class ReissueCertificateOrderRequest(ProxyOnlyResource):
"""Class representing certificate reissue request.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Res
|
ource type.
:vartype type: str
:param key_size: Certificate Key Size.
:type key_size: int
:param delay_existing_revoke_in_hours: Delay in hours to revoke existing
certificate after the new certificate is issued.
:type delay_existing_revoke_in_hours: int
:param csr: Csr to be used for re-key operation.
|
:type csr: str
:param is_private_key_external: Should we change the ASC type (from
managed private key to external private key and vice versa).
:type is_private_key_external: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'key_size': {'key': 'properties.keySize', 'type': 'int'},
'delay_existing_revoke_in_hours': {'key': 'properties.delayExistingRevokeInHours', 'type': 'int'},
'csr': {'key': 'properties.csr', 'type': 'str'},
'is_private_key_external': {'key': 'properties.isPrivateKeyExternal', 'type': 'bool'},
}
def __init__(self, kind=None, key_size=None, delay_existing_revoke_in_hours=None, csr=None, is_private_key_external=None):
super(ReissueCertificateOrderRequest, self).__init__(kind=kind)
self.key_size = key_size
self.delay_existing_revoke_in_hours = delay_existing_revoke_in_hours
self.csr = csr
self.is_private_key_external = is_private_key_external
|
illagrenan/gtask-client
|
fabfile.py
|
Python
|
mit
| 2,356
| 0.000424
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import os
import configparser
from fabric.context_managers import cd
from fabric.decorators import task
from fabric.api import env
from fabric.operations import put, run, local
from fabric.utils import abort
from zip_dir.utils import create_zip_archive
ini_parser = configparser.ConfigParser()
# Example of "deploy.ini"
# =======================
# [remote]
# host : 80.xxx.xxx.xx
# user : john
# key_filename : ~/.ssh/id_rsa.private
ini_parser.read("deploy.ini")
remote_section = ini_parser["remote"]
env.hosts = [remote_section["host"]]
env.user = remote_section["user"]
env.key_filename = os.path.normpath(remote_section["key_filename"])
APP_BASE_DIR = '/var/www/gtasksapp_com/www/app'
DIST_ZIP_FILENAME = "dist.zip"
DIST_DIRECTORY_NAME = "dist"
def create_tmp_if_doesnt_exist():
if not os.path.isdir(".tmp"):
os.mkdir(".tmp")
@task()
def build_app():
local("grunt")
@task()
def grunt_clean():
local("grunt clean")
@task(alias='app')
def deploy_app():
"""Deploy app"""
create_tmp_if_doesnt_exist()
current_path = os.path.dirname(os.path.realpath(__file__))
dist_path = os.path.join(current_path, DIST_DIRECTORY_NAME)
if not os.path
|
.isdir(dist_path) or not os.listdir(dist_path):
abort("Dist path doesn't exist or dist directory is empty")
|
create_zip_archive(dist_path, os.path.join(".tmp", DIST_ZIP_FILENAME))
run("mkdir -p {0}".format(APP_BASE_DIR))
put(os.path.join(".tmp", DIST_ZIP_FILENAME), APP_BASE_DIR)
with cd(APP_BASE_DIR):
run("unzip -o {0}".format(DIST_ZIP_FILENAME))
run("rm {0}".format(DIST_ZIP_FILENAME))
grunt_clean()
@task(alias='landing')
def deploy_landing_page():
"""Deploy landing page"""
create_tmp_if_doesnt_exist()
current_path = os.path.dirname(os.path.realpath(__file__))
dist_path = os.path.join(current_path, "landing_page")
create_zip_archive(dist_path, ".tmp/landing_page.zip")
put(".tmp/landing_page.zip", "/var/www/gtasksapp_com/www/")
with cd("/var/www/gtasksapp_com/www/"):
run("unzip -o {0}".format("landing_page.zip"))
run("rm {0}".format("landing_page.zip"))
grunt_clean()
@task(alias='all')
def deploy_all():
"""Deploy all"""
build_app()
deploy_app()
deploy_landing_page()
|
ezequielpereira/Time-Line
|
timelinelib/wxgui/dialogs/editevent/view.py
|
Python
|
gpl-3.0
| 12,695
| 0.000788
|
# Copyright (C) 2009, 2010, 2011, 2012, 2013, 2014, 2015 Rickard Lindberg, Roger Lindberg
#
# This file is part of Timeline.
#
# Timeline is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Timeline is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Timeline. If not, see <http://www.gnu.org/licenses/>.
import wx
from timelinelib.db.utils import safe_locking
from timelinelib.repositories.dbwrapper import DbWrapperEventRepository
from timelinelib.wxgui.dialogs.editcontainer.view import EditContainerDialog
from timelinelib.wxgui.dialogs.editevent.controller import EditEventDialogController
from timelinelib.wxgui.framework import Dialog
from timelinelib.wxgui.utils import _set_focus_and_select
import timelinelib.wxgui.utils as gui_utils
class EditEventDialog(Dialog):
"""
<BoxSizerVertical>
<StaticBoxSizerVertical label="$(properties_label)" border="ALL" proportion="1">
<FlexGridSizer name="grid_sizer" columns="2" growableColumns="1" border="ALL" proportion="1">
%s
</FlexGridSizer>
</StaticBoxSizerVertical>
<CheckBox
name="add_more_checkbox"
label="$(add_more_label)"
border="LEFT|RIGHT|BOTTOM"
/>
<BoxSizerHorizontal border="LEFT|RIGHT|BOTTOM">
<TwoStateButton
initial_state_label="$(enlarge)"
second_state_label="$(reduce)"
event_EVT_INITIAL_STATE_CLICKED="on_enlarge_click"
event_EVT_SECOND_STATE_CLICKED="on_reduce_click"
/>
<StretchSpacer />
<DialogButtonsOkCancelSizer
event_EVT_BUTTON__ID_OK="on_ok_clicked"
/>
</BoxSizerHorizontal>
</BoxSizerVertical>
"""
TIME_DETAILS_ROW = """
<StaticText align="ALIGN_CENTER_VERTICAL" label="$(when_label)" />
<BoxSizerHorizontal>
<TimePicker
name="start_time"
time_type="$(time_type)"
config="$(config)"
/>
<Spacer />
<StaticText
label="$(to_label)"
name="to_label"
align="ALIGN_CENTER_VERTICAL"
/>
<Spacer />
<TimePicker
name="end_time"
time_type="$(time_type)"
config="$(config)"
/>
</BoxSizerHorizontal>
"""
CHECKBOX_ROW = """
<Spacer />
<FlexGridSizer rows="1">
<CheckBox
name="period_checkbox"
event_EVT_CHECKBOX="on_period_checkbox_changed"
label="$(period_checkbox_text)" />
<CheckBox
name="show_time_checkbox"
event_EVT_CHECKBOX="on_show_time_checkbox_changed"
label="$(show_time_checkbox_text)"
/>
<CheckBox
name="fuzzy_checkbox"
label="$(fuzzy_checkbox_text)"
/>
<CheckBox
name="locked_checkbox"
event_EVT_CHECKBOX="on_locked_checkbox_changed"
label="$(locked_checkbox_text)"
/>
<CheckBox
name="ends_today_checkbox"
label="$(ends_today_checkbox_text)"
/>
</FlexGridSizer>
"""
TEXT_FIELD_ROW = """
<StaticText align="ALIGN_CENTER_VERTICAL" label="$(text_label)" />
<TextCtrl name="name" />
"""
CATEGORY_LISTBOX_ROW = """
<StaticText align="ALIGN_CENTER_VERTICAL" label="$(category_label)" />
<CategoryChoice
name="category_choice"
allow_add="True"
allow_edit="True"
timeline="$(db)"
align="ALIGN_LEFT"
/>
"""
CONTAINER_LISTBOX_ROW = """
<StaticText align="ALIGN_CENTER_VERTICAL" label="$(container_label)" />
<ContainerChoice
name="container_choice"
event_EVT_CONTAINER_CHANGED="on_container_changed"
db="$(db)"
align="ALIGN_LEFT"
/>
"""
NOTEBOOK_ROW = """
<Spacer />
<Notebook name="notebook" style="BK_DEFAULT">
<DescriptionEditor
name="description"
notebookLabel="$(page_description)"
editor="$(self)"
proportion="1"
/>
<IconEditor
name="icon"
notebookLabel="$(page_icon)"
editor="$(self)"
proportion="1"
/>
<AlertEditor
name="alert"
notebookLabel="$(page_alert)"
editor="$(self)"
proportion="1"
/>
<HyperlinkEditor
name="hyperlink"
notebookLabel="$(page_hyperlink)"
editor="$(self)"
proportion="1"
/>
<ProgressEditor
name="progress"
notebookLabel="$(page_progress)"
editor="$(self)"
proportion="1"
/>
</Notebook>
"""
def __init__(self, parent, config, title, db, start=None, end=None, event=None):
self.timeline = db
self.config = config
self.start = start
self.event = event
self._insert_rows_in_correct_order_in_xml()
Dialog.__init__(self, EditEventDialogController, parent, {
"self": self,
"db": db,
"time_type": db.get_time_type(),
"config": config,
"properties_label": _("Event Properties"),
"when_label": _("When:"),
"period_checkbox_text": _("Period"),
"show_time_checkbox_text": _("Show time"),
"fuzzy_checkbox_text": _("Fuzzy"),
"locked_checkbox_text": _("Locked"),
"e
|
nds_today_checkbox_text": _("Ends today"),
"to_label": _("to"),
"text_label": _("Text:"),
"category_label": _("Category:"),
"container_label": _("Container:"),
"page_description": _("Description"),
"page_icon": _("Icon"),
"page_alert": _("Alert"),
"page_hyperlink": _("Hyperlink"),
"page_progress": _("Pro
|
gress"),
"add_more_label": _("Add more events after this one"),
"enlarge": _("&Enlarge"),
"reduce": _("&Reduce"),
}, title=title, style=wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER)
self.controller.on_init(
config,
db.get_time_type(),
DbWrapperEventRepository(db),
db,
start,
end,
event)
self._make_row_with_notebook_growable()
self.SetMinSize((800, -1))
self.Fit()
self.SetMinSize(self.GetSize())
def GetStart(self):
return self.start_time.get_value()
def SetStart(self, value):
self.start_time.set_value(value)
def GetEnd(self):
return self.end_time.get_value()
def SetEnd(self, value):
self.end_time.set_value(value)
def GetShowPeriod(self):
return self.period_checkbox.GetValue()
def SetShowPeriod(self, value):
self.period_checkbox.SetValue(value)
self.ShowToTime(value)
def ShowToTime(self, show):
self.to_label.Show(show)
self.end_time.Show(show)
def GetShowTime(self):
return self.show_time_checkbox.GetValue()
def SetShowTime(self, value):
if self.timeline.get_time_type().is_date_time_type():
self.show_time_checkbox.SetValue(value)
self.start_time.show_time(value)
|
openstack-packages/DLRN
|
dlrn/tests/test_driver_koji.py
|
Python
|
apache-2.0
| 24,497
| 0
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import os
import sh
import shutil
import stat
import tempfile
from dlrn.config import ConfigOptions
from dlrn import db
from dlrn.drivers.kojidriver import KojiBuildDriver
from dlrn.tests import base
from six.moves import configparser
from time import localtime
from time import strftime
def _mocked_listdir(directory):
return ['python-pysaml2-3.0-1a.el7.centos.src.rpm']
def _mocked_time():
return float(1533293385.545039)
def _mocked_call(*args, **kwargs):
if args[0] == '/usr/bin/git log':
return '1 2'
return True
@mock.patch('sh.restorecon', create=True)
@mock.patch('sh.env', create=True)
@mock.patch('os.listdir', side_effect=_mocked_listdir)
class TestDriverKoji(base.TestCase):
def setUp(self):
super(TestDriverKoji, self).setUp()
config = configparser.RawConfigParser()
config.read("projects.ini")
config.set("DEFAULT", "build_driver",
"dlrn.drivers.kojidriver.KojiBuildDriver")
self.config = ConfigOptions(config)
self.config.koji_krb_principal = 'test@example.com'
self.config.koji_krb_keytab = '/home/test/test.keytab'
self.config.koji_scratch_build = True
self.config.koji_build_target = 'build-target'
self.temp_dir = tempfile.mkdtemp()
self.config.datadir = self.temp_dir
# Create fake build log
with open("%s/kojibuild.log" % self.temp_dir, 'a') as fp:
fp.write("Created task: 1234")
# In the rhpkg case, we need to create a full dir structure
self.rhpkg_extra_dir = "%s/repos/12/34/1234567890abcdef_1_12345678"\
% self.temp_dir
os.makedirs(self.rhpkg_extra_dir)
with open("%s/rhpkgbuild.log"
% self.rhpkg_extra_dir, 'a') as fp:
fp.write("Created task: 5678")
# Another full-dir structure for the long extended hash test
self.rhpkg_extra_dir_2 = (
"%s/repos/12/34/1234567890abcdef_1_12345678_abcdefgh" %
self.temp_dir)
os.makedirs(self.rhpkg_extra_dir_2)
with open("%s/rhpkgbuild.log"
% self.rhpkg_extra_dir_2, 'a') as fp:
fp.write("Created task: 5678")
# Another full-dir structure for the long extended hash test
# with downstream driver
self.rhpkg_extra_dir_3 = (
"%s/repos/12/34/1234567890abcdef_fedcba09_1_1" %
self.temp_dir)
os.makedirs(self.rhpkg_extra_dir_3)
with open("%s/rhpkgbuild.log"
% self.rhpkg_extra_dir_3, 'a') as fp:
fp.write("Created task: 5678")
# Create a fake rhpkg binary
with open("%s/rhpkg" % self.temp_dir, 'a') as fp:
fp.write("true")
os.chmod("%s/rhpkg" % self.temp_dir,
stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
os.environ['PATH'] = self.temp_dir + ':' + os.environ['PATH']
def tearDown(self):
super(TestDriverKoji, self).tearDown()
shutil.rmtree(self.temp_dir)
def test_build_package(self, ld_mock, env_mock, rc_mock):
driver = KojiBuildDriver(cfg_options=self.config)
driver.build_package(output_directory=self.temp_dir)
expected = [mock.call(['koji',
'--principal', self.config.koji_krb_principal,
'--keytab', self.config.koji_krb_keytab,
'build', '--wait',
self.config.koji_build_target,
'%s/python-pysaml2-3.0-1a.el7.centos.src.rpm' %
self.temp_dir],
_err=driver._process_koji_output,
_out=driver._process_koji_output,
scratch=True,
_cwd=self.temp_dir,
_env={'PATH': '/usr/bin/'}),
mock.call(['koji', 'download-task', '--logs', '1234'],
_err=driver._process_koji_output,
_out=driver._process_koji_output,
_cwd=self.temp_dir,
_env={'PATH': '/usr/bin/'})]
# 1- koji build (handled by env_mock)
# 2- koji download (handled by env_mock)
# 3- restorecon (handled by rc_mock)
self.assertEqual(env_mock.call_count, 2)
self.assertEqual(rc_mock.call_count, 1)
self.assertEqual(env_mock.call_args_list, expected)
def test_build_package_no_scratch(self, ld_mock, env_mock, rc_mock):
self.config.koji_scratch_build = False
driver = KojiBuildDriver(cfg_options=self.config)
driver.build_package(output_directory=self.temp_dir)
expected = [mock.call(['koji',
'--principal', self.config.koji_krb_principal,
'--keytab', self.config.koji_krb_keytab,
'build', '--wait',
self.config.koji_build_target,
'%s/python-pysaml2-3.0-1a.el7.centos.src.rpm' %
self.temp_dir],
_err=driver._process_koji_output,
_out=driver._process_koji_output,
scratch=False,
_cwd=self.temp_dir,
_env={'PATH': '/usr/bin/'}),
mock.call(['koji', 'download-task', '--logs', '1234'],
_err=driver._process_koji_output,
_out=driver._process_koji_output,
_cwd=self.temp_dir,
_env={'PATH': '/usr/bin/'})]
# 1- koji build (handled by env_mock)
# 2- koji download (handled by env_mock)
# 3- restorecon (handled by rc_mock)
self.assertEqual(env_mock.call_count, 2)
self.assertEqual(rc_mock.call_count, 1)
self.assertEqual(env_mock.call_args_list, expected)
def test_build_package_brew(self, ld_mock, env_mock, rc_mock):
self.config.koji_exe = 'brew'
driver = KojiBuildDriver(cfg_options=self.config)
driver.build_packag
|
e(output_directory=self.temp_dir)
e
|
xpected = [mock.call(['brew',
'--principal', self.config.koji_krb_principal,
'--keytab', self.config.koji_krb_keytab,
'build', '--wait',
self.config.koji_build_target,
'%s/python-pysaml2-3.0-1a.el7.centos.src.rpm' %
self.temp_dir],
_err=driver._process_koji_output,
_out=driver._process_koji_output,
scratch=True,
_cwd=self.temp_dir,
_env={'PATH': '/usr/bin/'}),
mock.call(['brew', 'download-task', '--logs', '1234'],
_err=driver._process_koji_output,
_out=driver._process_koji_output,
_cwd=self.temp_dir,
_env={'PATH': '/usr/bin/'})]
# 1- koji build (handled by env_mock)
# 2- koji download (handled by env_mock)
# 3- restorecon (handled by rc_mock)
self.assertEqual(env_mock.call_count, 2)
self.assertEqual(rc_mock.call_count, 1)
self.asse
|
adarshdec23/Market
|
core/preference/main.py
|
Python
|
apache-2.0
| 3,319
| 0.008738
|
from collections import *
from config import main
import heapq
class UserPreference:
def __init__(self):
self.results = []
self.list1 = []
self.list2 = []
self.list3 = []
self.list4 = []
self.categories = []
self.sold_average = []
self.bought_average = []
def get_preferences(self, user):
# Reset all variable
self.results = []
self.list1 = []
self.list2 = []
self.list3 = []
self.list4 = []
self.categories = []
self.sold_average = []
self.bought_average = []
self.frequency_based(user+'.csv')
return self.results
def frequency_based(self, user):
fp = open(main.path+'data/user/'+user, "r")
lines = fp.readlines()
for i in range(len(lines)):
lines[i] = lines[i].strip()
for i in range(1,len(lines)):
self.list1 = lines[i].split(",")
self.list2.append(self.list1)
self.list3.append(self.list1[3])
d = defaultdict(int)
for i in self.list3:
d[i] += 1
result = max(iter(d.items()), key=lambda x: x[1])
self.results.append(result[0])
self.deviation_based(result[0])
# STANDARD DEVIATION APPROACH
def deviation_based(self,freq_cat):
for i in range(0,len(self.list2)
|
):
self.categories.append(self.list2[i][3])
self.categories = list(set(self.categories))
i = 0
for item in self.list2:
self.list4.append(self.categories.index(item[3]))
self.sold_average = [0]*len(self.categories)
self.bought_average = [0]*len(self.categories)
s_average = []
b_average = []
s=[0]*len(self.categories)
|
b=[0]*len(self.categories)
for item in self.list2:
cat = item[3]
ind = self.categories.index(cat)
if item[4] == 'sold':
self.sold_average[ind]+= int(float(item[5]))
else:
self.bought_average[ind]+= int(float(item[5]))
for x in self.list4:
if self.list2[i][3] == self.categories[x]:
if self.list2[i][4] == 'sold':
s[x]+=1
if self.list2[i][4] == 'bought':
b[x]+=1
i+=1
for i in range(len(self.categories)):
if s[i]!=0:
s_average.append(self.sold_average[i]/s[i])
else:
s_average.append(0)
for i in range(len(self.categories)):
if b[i]!=0:
b_average.append(self.bought_average[i]/b[i])
else:
b_average.append(0)
deviation = []
for i in range(len(self.categories)):
deviation.append(s_average[i]-b_average[i])
max_category = max(deviation)
max2_category = heapq.nlargest(2, deviation)
if max_category == freq_cat:
self.results.append(self.categories[deviation.index(max_category)])
else:
self.results.append(self.categories[deviation.index(max2_category[1])])
|
guilhermedallanol/dotfiles
|
vim/plugged/vial/vial/plugins/bufhist/plugin.py
|
Python
|
mit
| 3,812
| 0.000787
|
from time import time
from itertools import groupby
from vial import vfunc, vim, dref
from vial.utils import echon, redraw
from os.path import split
MAX_HISTORY_SIZE = 100
VHIST = 'vial_buf_hist'
VLAST = 'vial_last_buf'
def add_to_history(w, bufnr):
history = list(w.vars[VHIST])
history[:] = [r for r in history if r != bufnr][:MAX_HISTORY_SIZE - 1]
history.insert(0, bufnr)
w.vars[VHIST] = history
return history
def check_history(window):
if VHIST not in window.vars:
bufnr = vim.current.buffer.number
history = [r.number for r in vim.buffers if r.number != bufnr]
history.reverse()
history.insert(0, bufnr)
window.vars[VHIST] = history
def win_buf_enter():
w = vim.current.window
bufnr = int(vfunc.expand('<abuf>'))
if not w.vars.get('vial_bufhist_switch', None):
check_history(w)
add_to_history(w, bufnr)
w.vars[VLAST] = 0
else:
w.vars[VLAST] = bufnr, time()
@dref
def moved():
now = time()
w = vim.current.window
lastbuf = w.vars.get(VLAST, None)
if not lastbuf or now - lastbuf[1] > 0.1:
w.vars[VLAST] = 0
vim.command('echo "" | au! vial_bufhist_wait_action')
skey = lambda r: r[1][1]
def jump(dir):
w = vim.current.window
check_history(w)
history = list(w.vars[VHIST])
bufnr = vim.current.buffer.number
now = time()
lastbuf = w.vars.get(VLAST, None)
if not lastbuf or (bufnr == lastbuf[0] a
|
nd
now - lastbuf[1] > vim.vars['vial_bufhist_timeout']):
history = add_to_history(w, bufnr)
if bufnr not in history:
history = add_to_history(w, bufnr)
names = {r.number: (split(r.name)
if r.name
else ['', '[buf-{}]'.format(r.number)])
for r in vim.buffers if vfunc.buflisted(r.number)}
history[:] = filter(lambda r: r
|
in names, history)
dups = True
while dups:
dups = False
for name, g in groupby(sorted(names.iteritems(), key=skey), skey):
g = list(g)
if len(g) > 1:
dups = True
for nr, (path, _) in g:
p, n = split(path)
names[nr] = p, n + '/' + name
width = vim.vars['vial_bufhist_width']
if width < 0:
width += int(vim.eval('&columns')) - 1
try:
idx = history.index(bufnr)
except ValueError:
return
idx += dir
if idx < 0:
idx = 0
elif idx >= len(history):
idx = len(history) - 1
anr = history[idx]
active = names[anr][1]
before = ' '.join(names[r][1] for r in history[:idx])
after = ' '.join(names[r][1] for r in history[idx+1:])
half = (width - len(active) - 4) / 2
if len(before) < len(after):
blen = min(half, len(before))
alen = width - len(active) - blen - 4
else:
alen = min(half, len(after))
blen = width - len(active) - alen - 4
if len(before) > blen:
before = '...' + before[3-blen:]
if len(after) > alen:
after = after[:alen-3] + '...'
if before: before += ' '
if after: after = ' ' + after
vim.command('let x=&ruler | let y=&showcmd')
vim.command('set noruler noshowcmd')
redraw()
echon(before)
vim.command('echohl CursorLine')
echon(active)
vim.command('echohl None')
echon(after)
vim.command('let &ruler=x | let &showcmd=y')
if anr != bufnr:
w.vars['vial_bufhist_switch'] = 1
vim.command('silent b {}'.format(anr))
w.vars['vial_bufhist_switch'] = 0
vim.command('augroup vial_bufhist_wait_action')
vim.command('au!')
vim.command('au CursorMoved,CursorHold <buffer> python %s()' % moved.ref)
vim.command('augroup END')
|
annegabrielle/secure_adhoc_network_ns-3
|
ns3_source_code/ns-3.10/bindings/python/apidefs/gcc-LP64/ns3_module_udp_echo.py
|
Python
|
gpl-2.0
| 8,335
| 0.017876
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
def register_types(module):
root_module = module.get_root()
## udp-echo-client.h: ns3::UdpEchoClient [class]
module.add_class('UdpEchoClient', parent=root_m
|
odule['ns3::Application'])
## udp-echo-server.h: ns3::UdpEchoServer [class]
module.add_class('UdpEchoServer', parent=root_module['ns3::Application'])
## Register a nested module for the namespace Config
nested_module = module.add_cpp_namespace('Config')
register_types_ns3_Config(nested_module)
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
|
## Register a nested module for the namespace addressUtils
nested_module = module.add_cpp_namespace('addressUtils')
register_types_ns3_addressUtils(nested_module)
## Register a nested module for the namespace aodv
nested_module = module.add_cpp_namespace('aodv')
register_types_ns3_aodv(nested_module)
## Register a nested module for the namespace dot11s
nested_module = module.add_cpp_namespace('dot11s')
register_types_ns3_dot11s(nested_module)
## Register a nested module for the namespace dsdv
nested_module = module.add_cpp_namespace('dsdv')
register_types_ns3_dsdv(nested_module)
## Register a nested module for the namespace flame
nested_module = module.add_cpp_namespace('flame')
register_types_ns3_flame(nested_module)
## Register a nested module for the namespace internal
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module)
## Register a nested module for the namespace olsr
nested_module = module.add_cpp_namespace('olsr')
register_types_ns3_olsr(nested_module)
def register_types_ns3_Config(module):
root_module = module.get_root()
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_addressUtils(module):
root_module = module.get_root()
def register_types_ns3_aodv(module):
root_module = module.get_root()
def register_types_ns3_dot11s(module):
root_module = module.get_root()
def register_types_ns3_dsdv(module):
root_module = module.get_root()
def register_types_ns3_flame(module):
root_module = module.get_root()
def register_types_ns3_internal(module):
root_module = module.get_root()
def register_types_ns3_olsr(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3UdpEchoClient_methods(root_module, root_module['ns3::UdpEchoClient'])
register_Ns3UdpEchoServer_methods(root_module, root_module['ns3::UdpEchoServer'])
return
def register_Ns3UdpEchoClient_methods(root_module, cls):
## udp-echo-client.h: ns3::UdpEchoClient::UdpEchoClient(ns3::UdpEchoClient const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UdpEchoClient const &', 'arg0')])
## udp-echo-client.h: ns3::UdpEchoClient::UdpEchoClient() [constructor]
cls.add_constructor([])
## udp-echo-client.h: uint32_t ns3::UdpEchoClient::GetDataSize() const [member function]
cls.add_method('GetDataSize',
'uint32_t',
[],
is_const=True)
## udp-echo-client.h: static ns3::TypeId ns3::UdpEchoClient::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## udp-echo-client.h: void ns3::UdpEchoClient::SetDataSize(uint32_t dataSize) [member function]
cls.add_method('SetDataSize',
'void',
[param('uint32_t', 'dataSize')])
## udp-echo-client.h: void ns3::UdpEchoClient::SetFill(std::string fill) [member function]
cls.add_method('SetFill',
'void',
[param('std::string', 'fill')])
## udp-echo-client.h: void ns3::UdpEchoClient::SetFill(uint8_t fill, uint32_t dataSize) [member function]
cls.add_method('SetFill',
'void',
[param('uint8_t', 'fill'), param('uint32_t', 'dataSize')])
## udp-echo-client.h: void ns3::UdpEchoClient::SetFill(uint8_t * fill, uint32_t fillSize, uint32_t dataSize) [member function]
cls.add_method('SetFill',
'void',
[param('uint8_t *', 'fill'), param('uint32_t', 'fillSize'), param('uint32_t', 'dataSize')])
## udp-echo-client.h: void ns3::UdpEchoClient::SetRemote(ns3::Ipv4Address ip, uint16_t port) [member function]
cls.add_method('SetRemote',
'void',
[param('ns3::Ipv4Address', 'ip'), param('uint16_t', 'port')])
## udp-echo-client.h: void ns3::UdpEchoClient::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## udp-echo-client.h: void ns3::UdpEchoClient::StartApplication() [member function]
cls.add_method('StartApplication',
'void',
[],
visibility='private', is_virtual=True)
## udp-echo-client.h: void ns3::UdpEchoClient::StopApplication() [member function]
cls.add_method('StopApplication',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3UdpEchoServer_methods(root_module, cls):
## udp-echo-server.h: ns3::UdpEchoServer::UdpEchoServer(ns3::UdpEchoServer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UdpEchoServer const &', 'arg0')])
## udp-echo-server.h: ns3::UdpEchoServer::UdpEchoServer() [constructor]
cls.add_constructor([])
## udp-echo-server.h: static ns3::TypeId ns3::UdpEchoServer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## udp-echo-server.h: void ns3::UdpEchoServer::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## udp-echo-server.h: void ns3::UdpEchoServer::StartApplication() [member function]
cls.add_method('StartApplication',
'void',
[],
visibility='private', is_virtual=True)
## udp-echo-server.h: void ns3::UdpEchoServer::StopApplication() [member function]
cls.add_method('StopApplication',
'void',
[],
visibility='private', is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_Config(module.get_submodule('Config'), root_module)
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_addressUtils(module.get_submodule('addressUtils'), root_module)
register_functions_ns3_aodv(module.get_submodule('aodv'), root_module)
register_functions_ns3_dot11s(module.get_submodule('dot11s'), root_module)
register_functions_ns3_dsdv(module.get_submodule('dsdv'), root_module)
register_functions_ns3_flame(module.get_submodule('flame'), root_module)
register_functions_ns3_internal(module.get_submodule('internal'), root_module)
register_functions_ns3_olsr(module.get_submodule('olsr'), root_module)
return
def register_functions_ns3_Config(module, root_module):
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_addressUtils(module, root_module):
return
def register_functions_ns3_aodv(module, root_module):
return
def register_functions_ns3_dot11s(module, root_module):
return
def register_functions_ns3_dsdv(module, root_module):
return
def register_functions_ns3_flame(module, root_mo
|
erik-sn/xlwrap
|
xlwrap.py
|
Python
|
mit
| 14,765
| 0.001422
|
import os
import ntpath
import xlrd
import openpyxl
from openpyxl.utils import coordinate_from_string, column_index_from_string
from openpyxl.utils.exceptions import CellCoordinatesException
class ExcelManager:
"""
Wrapper that opens and operates on .xls, .xlsx or .xlsm excel files. By
default we take in a string representing the excel file path (extension
included), and depending on the file type use xlrd or openpyxl to operate
on it.
The dev facing api is identical for either - internally we use xlrd or
openpyxl methods depending on the file type.
For rows & columns we use 1 based indexing to stay with the more modern
openpyxl (and most users are more familiar with it if they are coming from
an office environment, not a programming one). Be aware of which file type
you are using if you retrieve the sheet object - it could be using zero OR
one based indexing.
Public Variables:
file_path: full file path with extension of the file we are operating on
workbook: openpyxl/xlrd workbook object for this file
sheet: currently in use openpxl/xlrd sheet object for this work book
read_count: number of sheet reads this object has done
write_count: number of sheet writes this object has done
Public Methods:
select_sheet - choose which sheet to use (by index or name)
cell - retrieve an openpyxl/xlrd cell object by row/column or index
read - retrieve the value from the current sheet at row/column or index
write - write a value to the current sheet at row/column or index
save - save the workbook at the initial file path, or a new file path
if one is specified
info - return basic information/status of the object
to_array - return a 2D numpy array representation of the current sheet
find_index - return the first index of the match or None if it does not exist
find_indexes - return a list of tuples containing the indexes of all matches
"""
write_count = 0
read_count = 0
sheet_array = None
def __init__(self, file_path, sheet_name=None, sheet_index=None):
self.file_path = file_path
self.__check_file_extension(file_path)
self.__check_file_exists(file_path)
if file_path.endswith('.xls'):
self.__is_xls = True
self.__init_xls(sheet_name, sheet_index)
else:
self.__is_xls = False
self.__init_excel(sheet_name, sheet_index)
def change_sheet(self, *args):
"""
Change the current active sheet object
:param name: sheet name
:param index: sheet index (1 index)
:return: None
"""
if is
|
instance(args[0], str):
name = args[0]
index = None
elif isinstance(args[0], int):
|
name = None
index = args[0]
else:
raise ValueError('Specify either the sheet name or sheet index to change sheets')
if self.__is_xls:
self.__select_xls_sheet(name, index - 1 if index else None)
else:
self.__select_excel_sheet(name, index - 1 if index else None)
def row(self, row_index):
"""
Return the row at the specified index
:row_index row_index: 1 based index
:return: list of values
"""
self.sheet_array = self.array()
return self.sheet_array[row_index - 1]
def column(self, column_index):
"""
return the column at the specified index
:param column_index: string or (1 based) int index
:return: list of values
"""
if isinstance(column_index, int):
column = column_index - 1
else:
column = column_index_from_string(column_index.upper()) - 1
self.sheet_array = self.array()
return [row[column] for row in self.sheet_array]
def cell(self, *args):
"""
Return the cell at the specified location
:param args: tuple with either a 1 based representation for row/column
or string based index
:return: xlrd/openpyxl cell object
"""
row, column = self.__parse_row_column_from_args(*args)
if self.__is_xls:
return self.__get_xls_cell(row - 1, column - 1) # xlrd is a 1 based index
else:
return self.__get_excel_cell(row, column)
def read(self, *args):
"""
Read the value from the target cell
:param args: tuple with either a 1 based representation for row/column
or string based index
:return: string
"""
self.read_count += 1
value = self.cell(*args).value
return value if value else ''
def write(self, *args, value=None):
"""
Input the value at the specified target
:param args: tuple with either a 1 based representation for row/column
or string based index
:param value:
:return:
"""
if self.__is_xls:
raise TypeError('Writing to a cell is not supported for .xls files')
self.cell(*args).value = value
self.write_count += 1
def save(self, *args):
"""
Save the current sheet either at the original file_path (if none
specified) or at the file_path parameter
:param file_path: new file path to save file
:return: None
"""
if len(args) == 1:
self.__check_file_extension(args[0])
file_path = args[0]
else:
file_path = self.file_path
if self.__is_xls:
raise TypeError('Saving is not supported for .xls files')
else:
self.workbook.save(file_path)
def info(self, string=False):
"""
return basic information about this ExcelWrapper instance
:return: string
"""
sheet_name = self.sheet.name if self.__is_xls else self.sheet.title
if string:
return 'File: {}\nSheet: {}\nReads: {}\nWrites: {}' \
.format(self.file_path, sheet_name, self.read_count, self.write_count)
else:
return {
'file': self.file_path,
'sheet': sheet_name,
'reads': self.read_count,
'writes': self.write_count
}
def array(self):
"""
Return a 2D list representing the spreadsheet
:return: list(list())
"""
if self.__is_xls:
self.sheet_array = self.__xls_to_array()
return self.sheet_array
else:
self.sheet_array = self.__excel_to_array()
return self.sheet_array
def search(self, value, match=1, case_insensitive=True, contains=False, many=False):
"""
Given a value find the 1 based index where that value is located
on the sheet or None if it does not exist. If 'many' is set true then
an empty list is returned if no matches are found
:param value: the value we are searching for
:param match: if multiple results are found we return only one - this
parameter determines which index of the list we return with a 1 based index
:param case_insensitive: whether or not the search should be case insensitive
:param contains: whether or not the search should use 'in' or equality to
check if the spreadsheet value is a match
:param many: whether or not to return a singular value or a list of values
:return:
"""
indexes = self.__find_indexes(value, case_insensitive=case_insensitive, contains=contains)
if many:
return indexes
try:
match = indexes[match - 1]
return match[0], match[1]
except IndexError:
return None, None
def __find_indexes(self, value, case_insensitive, contains):
"""
Iterate over the 2D list representation of the sheet and determine
if the input value exists based on search parameters
:param value: value we are looking for
:param case_insensitive: whether or not search is case_insensitive
:param contains: use 'in
|
thinkAmi-sandbox/Bottle-sample
|
lan_access.py
|
Python
|
unlicense
| 589
| 0.005236
|
# -*- coding:utf-8 -*-
from bottle import route, run
@route("/")
def access():
return "OK!"
# hostデフォルト値は、127.0.0.1
# OK - localhost / 127.0.0.1
# NG - 192.168.0.10 / hostname
# run(port=8080, debug=True, reloader=True)
# run(host="localhost", port=8080, debug=True, reloader=True)
# OK - 192.168.0.10 / hostname
# NG - localhost / 127.0.0.1
run(host="192.168.0.10", port=8080, deb
|
ug=True, reloader=True)
# run(host="<your hostname>", port=8080, debug=
|
True, reloader=True)
# OK - ALL
# run(host="0.0.0.0", port=8080, debug=True, reloader=True)
|
HarmonyEnterpriseSolutions/harmony-platform
|
src/gnue/common/datasources/drivers/sql/mssql/Behavior.py
|
Python
|
gpl-2.0
| 1,600
| 0.00625
|
# GNU Enterprise Common Library - Schema support for MS-SQL
#
# Copyright 2000-2007 Free Software Foundation
#
# This file is part of GNU Enterprise.
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# $Id: Behavior.py,v 1.2 2008/11/04 20:14:04 oleg Exp $
"""
Schema support plugin for MS-ADO backends.
"""
__all__ = ['Behavior']
from gnue.common.datasources import GSchema
from gnue.common.datasources.drivers import Base
# =============================================================================
# Behavior class
# =============================================================================
class Behavior (Base.Behavior):
"""
Behavior class for MS-SQL backends.
"""
# ---------------------------------------------------------------------------
# Constructor
# -----------------
|
----------------------------------------------------------
def __init__ (self, connection):
Base.Behavior.__init_
|
_ (self, connection)
|
lotrekagency/heimdall
|
server/server.py
|
Python
|
mit
| 371
| 0.008086
|
imp
|
ort falcon
import json
class QuoteResource:
def on_get(self, req, resp):
"""Handles GET requests"""
quote = {
'quote': 'I\'ve always been more interested in the future than in the past.',
'author': 'Grace Hopper'
}
resp.body = json.dumps(quote)
api = falcon.API()
api.add_route('/quote', QuoteResou
|
rce())
|
gotostack/swift
|
test/functional/tests.py
|
Python
|
apache-2.0
| 82,395
| 0.000158
|
#!/usr/bin/python -u
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
import hashlib
import json
import locale
import random
import StringIO
import time
import threading
import uuid
import unittest
from nose import SkipTest
from ConfigParser import ConfigParser
from test import get_config
from test.functional.swift_test_client import Account, Connection, File, \
ResponseError
from swift.common.constraints import MAX_FILE_SIZE, MAX_META_NAME_LENGTH, \
MAX_META_VALUE_LENGTH, MAX_META_COUNT, MAX_META_OVERALL_SIZE, \
MAX_OBJECT_NAME_LENGTH, CONTAINER_LISTING_LIMIT, ACCOUNT_LISTING_LIMIT, \
MAX_ACCOUNT_NAME_LENGTH, MAX_CONTAINER_NAME_LENGTH
default_constraints = dict((
('max_file_size', MAX_FILE_SIZE),
('max_meta_name_length', MAX_META_NAME_LENGTH),
('max_meta_value_length', MAX_META_VALUE_LENGTH),
('max_meta_count', MAX_META_COUNT),
('max_meta_overall_size', MAX_META_OVERALL_SIZE),
('max_object_name_length', MAX_OBJECT_NAME_LENGTH),
('container_listing_limit', CONTAINER_LISTING_LIMIT),
('account_listing_limit', ACCOUNT_LISTING_LIMIT),
('max_account_name_length', MAX_ACCOUNT_NAME_LENGTH),
('max_container_name_length', MAX_CONTAINER_NAME_LENGTH)))
constraints_conf = ConfigParser()
conf_exists = constraints_conf.read('/etc/swift/swift.conf')
# Constraints are set first from the test config, then from
# /etc/swift/swift.conf if it exists. If swift.conf doesn't exist,
# then limit test coverage. This allows SAIO tests to work fine but
# requires remote functional testing to know something about the cluster
# that is being tested.
config = get_config('func_test')
for k in default_constraints:
if k in config:
# prefer what's in test.conf
config[k] = int(config[k])
elif conf_exists:
# swift.conf exists, so use what's defined there (or swift defaults)
# This normally happens when the test is running locally to the cluster
# as in a SAIO.
config[k] = default_constraints[k]
else:
# .functests don't know what the constraints of the tested cluster are,
# so the tests can't reliably pass or fail. Therefore, skip those
# tests.
config[k] = '%s constraint is not defined' % k
web_front_end = config.get('web_front_end', 'integral')
normalized_urls = config.get('normalized_urls', False)
def load_constraint(name):
c = config[name]
if not isinstance(c, int):
raise SkipTest(c)
return c
locale.setlocale(locale.LC_COLLATE, config.get('collate', 'C'))
def chunks(s, length=3):
i, j = 0, length
while i < len(s):
yield s[i:j]
i, j = j, j + length
def timeout(seconds, method, *args, **kwargs):
class TimeoutThread(threading.Thread):
def __init__(self, method, *args, **kwargs):
threading.Thread.__init__(self)
self.method = method
self.args = args
self.kwargs = kwargs
self.exception = None
def run(self):
try:
self.method(*self.args, **self.kwargs)
except Exception as e:
self.exception = e
t = TimeoutThread(method, *args, **kwargs)
t.start()
t.join(seconds)
if t.exception:
raise t.exception
if t.isAlive():
t._Thread__stop()
return True
return False
class Utils(object):
@classmethod
def create_ascii_name(cls, length=None):
return uuid.uuid4().hex
@classmethod
def create_utf8_name(cls, length=None):
if length is None:
length = 15
else:
length = int(length)
utf8_chars = u'\uF10F\uD20D\uB30B\u9409\u8508\u5605\u3703\u1801'\
u'\u0900\uF110\uD20E\uB30C\u940A\u8509\u5606\u3704'\
u'\u1802\u0901\uF111\uD20F\uB30D\u940B\u850A\u5607'\
u'\u3705\u1803\u0902\uF112\uD210\uB30E\u940C\u850B'\
u'\u5608\u3706\u1804\u0903\u03A9\u2603'
return ''.join([random.choice(utf8_chars)
for x in xrange(length)]).encode('utf-8')
create_name = create_ascii_name
class Base(unittest.TestCase):
def setUp(self):
cls = type(self)
if not cls.set_up:
cls.env.setUp()
cls.set_up = True
def assert_body(self, body):
response_body = self.env.conn.response.read()
self.assert_(response_body == body,
'Body returned: %s' % (response_body))
def assert_status(self, status_or_statuses):
self.assert_(self.env.conn.response.status == status_or_statuses or
(hasattr(status_or_statuses, '__iter__') and
self.env.conn.response.status in status_or_statuses),
'Status returned: %d Expected: %s' %
(self.env.conn.response.status, status_or_statuses))
class Base2(object):
def setUp(self):
Utils.create_name = Utils.create_utf8_name
super(Base2, self).setUp()
def tearDown(self):
Utils.create_name = Utils.create_ascii_name
class TestAccountEnv(object):
@classmethod
def setUp(cls):
cls.conn = Connection(config)
cls.conn.authenticate()
cls.account = Account(cls.conn, config.get('account',
config['username']))
cls.account.delete_containers()
cls.containers = []
for i in range(10):
cont = cls.account.container(Utils.create_name())
if not cont.create():
raise ResponseError(cls.conn.response)
cls.containers.append(cont)
class TestAccountDev(Base):
env = TestAccountEnv
set_u
|
p = False
class TestAccountDevUTF8(Base2, TestAccountDev):
set_up = False
class TestAccount(Base):
env = TestAccountEnv
set_up = False
def testNoAuthToken(self):
self.assertRaises(ResponseError, self.env.account.info,
cfg={'no_auth_token': True})
self.assert_status([401, 412])
self.assertRaises(ResponseError, self.env.acco
|
unt.containers,
cfg={'no_auth_token': True})
self.assert_status([401, 412])
def testInvalidUTF8Path(self):
invalid_utf8 = Utils.create_utf8_name()[::-1]
container = self.env.account.container(invalid_utf8)
self.assert_(not container.create(cfg={'no_path_quote': True}))
self.assert_status(412)
self.assert_body('Invalid UTF8 or contains NULL')
def testVersionOnlyPath(self):
self.env.account.conn.make_request('PUT',
cfg={'version_only_path': True})
self.assert_status(412)
self.assert_body('Bad URL')
def testInvalidPath(self):
was_url = self.env.account.conn.storage_url
if (normalized_urls):
self.env.account.conn.storage_url = '/'
else:
self.env.account.conn.storage_url = "/%s" % was_url
self.env.account.conn.make_request('GET')
try:
self.assert_status(404)
finally:
self.env.account.conn.storage_url = was_url
def testPUT(self):
self.env.account.conn.make_request('PUT')
self.assert_status([403, 405])
def testAccountHead(self):
try_count = 0
while try_count < 5:
try_count += 1
info = self.env.account.info()
for field in ['object_count', 'container_count', 'bytes_used']:
self.assert_(info[field] >= 0)
if
|
stormi/tsunami
|
src/secondaires/navigation/commandes/matelot/liste.py
|
Python
|
bsd-3-clause
| 3,827
| 0.00131
|
# -*-coding:Utf-8 -*
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO Ematelot SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'liste' de la commande 'matelot'."""
from primaires.format.fonctions import supprimer_accents
from primaires.format.tableau import Tableau
from primaires.interpreteur.masque.parametre import Parametre
from secondaires.navigation.equipage.postes.hierarchie import ORDRE
class PrmListe(Parametre):
"""Commande 'matelot liste'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__
|
init__(self, "liste", "list")
self.tronquer = True
self.aide_courte = "liste les matelots de l'équipage"
self.aide_longue = \
"Cette commande liste les matelots de votre équipage. " \
"Elle permet d'obtenir rapidement des informations pratiques " \
"sur le nom du matelot ainsi que l'endroit où il se trouve."
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
|
salle = personnage.salle
if not hasattr(salle, "navire"):
personnage << "|err|Vous n'êtes pas sur un navire.|ff|"
return
navire = salle.navire
equipage = navire.equipage
if not navire.a_le_droit(personnage, "officier"):
personnage << "|err|Vous ne pouvez donner d'ordre sur ce " \
"navire.|ff|"
return
matelots = tuple((m, m.nom_poste) for m in \
equipage.matelots.values())
matelots += tuple(equipage.joueurs.items())
matelots = sorted(matelots, \
key=lambda couple: ORDRE.index(couple[1]), reverse=True)
if len(matelots) == 0:
personnage << "|err|Votre équipage ne comprend aucun matelot.|ff|"
return
tableau = Tableau()
tableau.ajouter_colonne("Nom")
tableau.ajouter_colonne("Poste")
tableau.ajouter_colonne("Affectation")
for matelot, nom_poste in matelots:
nom = matelot.nom
nom_poste = nom_poste.capitalize()
titre = "Aucune"
if hasattr(matelot, "personnage"):
titre = matelot.personnage.salle.titre_court.capitalize()
tableau.ajouter_ligne(nom, nom_poste, titre)
personnage << tableau.afficher()
|
DamnWidget/mamba
|
mamba/application/app.py
|
Python
|
gpl-3.0
| 7,315
| 0
|
# -*- test-case-name: mamba.test.test_application mamba.test.test_mamba -*-
# Copyright (c) 2012 - Oscar Campos <oscar.campos@member.fsf.org>
# See LICENSE for more details
"""
.. module: app
:platform: Linux
:synopsis: Mamba Application Manager
.. moduleauthor:: Oscar Campos <oscar.campos@member.fsf.org>
"""
import os
import gc
from twisted.web import http
from twisted.internet import address
from twisted.python.logfile import DailyLogFile
from twisted.python.monkey import MonkeyPatcher
from twisted.python import versions, filepath, log
from mamba.utils import borg
from mamba.http import headers
from mamba.core import packages
from mamba import _version as _mamba_version
from mamba.application import controller, model
_app_ver = versions.Version('Application', 0, 1, 0)
_app_project_ver = versions.Version('Project', 0, 1, 0)
class ApplicationError(Exception):
"""ApplicationError raises when an error occurs
"""
class Mamba(borg.Borg):
"""
This object is just a global configuration for mamba applications that
act as the central object on them and is able to act as a central registry.
It inherits from the :class: `~mamba.utils.borg.Borg` so you can just
instantiate a new object of this class and it will share all the
information between instances.
You create an instance of the :class:`~Mamba` class in your main module
or in your `Twisted` `tac` file:
.. sourcecode:: python
|
from mamba import Mamba
app = Mamba({'name': 'MyApp', 'description': 'My App', ...})
:param options: options to initialize the application with
:type options: dict
"""
def __init__(self, options
|
=None):
"""Mamba constructor"""
super(Mamba, self).__init__()
if hasattr(self, 'initialized') and self.initialized is True:
return
self.monkey_patched = False
self.development = False
self.already_logging = False
self._mamba_ver = _mamba_version.version.short()
self._ver = _app_ver.short()
self._port = 1936
self._log_file = None
self._project_ver = _app_project_ver.short()
self.name = 'Mamba Webservice v%s' % _mamba_version.version.short()
self.description = (
'Mamba %s is a Web applications framework that works '
'over Twisted using Jinja2 as GUI enhancement '
'Mamba has been developed by Oscar Campos '
'<oscar.campos@member.fsf.org>' % _mamba_version.version.short()
)
self.language = os.environ.get('LANG', 'en_EN').split('_')[0]
self.lessjs = False
self._parse_options(options)
# monkey patch twisted
self._monkey_patch()
# register log file if any
self._handle_logging()
# PyPy does not implement set_debug method in gc object
if getattr(options, 'debug', False):
if hasattr(gc, 'set_debug'):
gc.set_debug(gc.DEBUG_STATS | gc.DEBUG_INSTANCES)
else:
log.msg(
'Debug is set as True but gc object is laking '
'set_debug method'
)
self._header = headers.Headers()
self._header.language = self.language
self._header.description = self.description
self.managers = {
'controller': controller.ControllerManager(),
'model': model.ModelManager(),
'packages': packages.PackagesManager()
}
self.initialized = True
def _handle_logging(self):
"""
Start logging to file if there is some file configuration and we
are not running in development mode
"""
if self.development is False and self._log_file is not None:
self.already_logging = True
log.startLogging(DailyLogFile.fromFullPath(self.log_file))
def _parse_options(self, options):
if options is None:
return
for key in dir(options):
if key.startswith('__'):
continue
if key == 'port':
setattr(self, '_port', getattr(options, key))
elif key == 'version':
setattr(self, '_ver', getattr(options, key))
elif key == 'log_file':
if getattr(options, key) is not None:
log_file = 'logs/{}'.format(getattr(options, key))
setattr(self, '_log_file', log_file)
else:
setattr(self, key, getattr(options, key))
def _monkey_patch(self):
"""
Monkeypatch some parts of the twisted library that are waiting
for bugfix inclussion in the trunk
"""
if not self.monkey_patched:
# add new method
setattr(http.Request, 'getClientProxyIP', getClientProxyIP)
# patch getClientIP
monkey_patcher = MonkeyPatcher(
(http.Request, 'getClientIP', getClientIPPatch)
)
monkey_patcher.patch()
self.monkey_patched = True
@property
def port(self):
return self._port
@port.setter
def port(self, value):
if type(value) is not int:
raise ApplicationError("Int expected, get %s" % (type(value)))
self._port = value
@property
def log_file(self):
return self._log_file if self._log_file is not None else 'service.log'
@log_file.setter
def log_file(self, file):
path = filepath.FilePath(file)
if not filepath.exists(path.dirname()):
raise ApplicationError('%s' % (
'Given directory %s don\t exists' % path.dirname())
)
self._log_file = file
@property
def project_ver(self):
return self._project_ver
@project_ver.setter
def project_ver(self, ver):
if type(ver) is not versions.Version:
raise ApplicationError('%s expected, get %s' % (
'twisted.python.versions.Version', type(ver))
)
self._project_ver = ver
@property
def mamba_ver(self):
return self._mamba_ver
@mamba_ver.setter
def mamba_ver(self, value):
raise ApplicationError("'mamba_ver' is readonly")
@property
def ver(self):
return self._ver
@ver.setter
def ver(self, value):
raise ApplicationError("'ver' is readonly")
def getClientIPPatch(self):
"""
Return the IP address of the client who submitted this request. If
there are headers for X-Forwarded-For, they are returned as well.
If you need to get the value of Request.client.host you can use the
new patched method Request.getClientProxyIP() on Request objects.
:returns: the client IP address(es)
"""
x_forwarded_for = self.getHeader('x-forwarded-for')
if x_forwarded_for is not None:
return x_forwarded_for.split(', ')[0]
return self.getClientProxyIP()
def getClientProxyIP(self):
"""
Return the IP address of the client/proxy who submitted the request.
:returns: the client/proxy IP address or None
"""
if isinstance(self.client, address.IPv4Address):
return self.client.host
return None
__all__ = ['Mamba', 'ApplicationError']
|
ttreeagency/PootleTypo3Org
|
pootle/apps/staticpages/views.py
|
Python
|
gpl-2.0
| 5,490
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012-2013 Zuza Software Foundation
#
# This file is part of Pootle.
#
# Pootle is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with translate; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from __future__ import absolute_import
from django.contrib import auth, messages
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse_lazy
from django.http import Http404
from django.shortcuts import redirect, render_to_response
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from django.views.generic import (CreateView, DeleteView, TemplateView,
Update
|
View)
from pootle.core.views import SuperuserRequiredMixin
from .forms import agreement_form_factory
from .models import AbstractPage, LegalPage, StaticPage
class PageModelMixin(object):
"""Mixin used to set the view's page model according to the
`page_type` argument caught in a url pattern.
"""
def dispatch(self, request, *args, **kwargs):
self.page_type = kwargs.get('page_type', None)
self.model = {
|
'legal': LegalPage,
'static': StaticPage,
}.get(self.page_type)
if self.model is None:
raise Http404
return super(PageModelMixin, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
ctx = super(PageModelMixin, self).get_context_data(**kwargs)
ctx.update({
'has_page_model': True,
})
return ctx
class AdminTemplateView(SuperuserRequiredMixin, TemplateView):
template_name = 'staticpages/admin/page_list.html'
def get_context_data(self, **kwargs):
ctx = super(AdminTemplateView, self).get_context_data(**kwargs)
ctx.update({
'legalpages': LegalPage.objects.all(),
'staticpages': StaticPage.objects.all(),
})
return ctx
class PageCreateView(SuperuserRequiredMixin, PageModelMixin, CreateView):
success_url = reverse_lazy('staticpages.admin')
template_name = 'staticpages/admin/page_create.html'
def get_initial(self):
initial = super(PageModelMixin, self).get_initial()
next_page_number = AbstractPage.max_pk() + 1
initial.update({
'title': _('Page Title'),
'virtual_path': _('page-%d', next_page_number),
})
return initial
class PageUpdateView(SuperuserRequiredMixin, PageModelMixin, UpdateView):
success_url = reverse_lazy('staticpages.admin')
template_name = 'staticpages/admin/page_update.html'
def get_context_data(self, **kwargs):
ctx = super(PageUpdateView, self).get_context_data(**kwargs)
ctx.update({
'show_delete': True,
'page_type': self.page_type,
})
return ctx
class PageDeleteView(SuperuserRequiredMixin, PageModelMixin, DeleteView):
success_url = reverse_lazy('staticpages.admin')
def display_page(request, virtual_path):
"""Displays an active page defined in `virtual_path`."""
page = None
for page_model in AbstractPage.__subclasses__():
try:
page = page_model.objects.live(request.user).get(
virtual_path=virtual_path,
)
except ObjectDoesNotExist:
pass
if page is None:
raise Http404
if page.url:
return redirect(page.url)
if request.user.is_superuser and not page.active:
msg = _('This page is inactive and visible to administrators '
'only. You can activate it by <a href="%s">editing its '
'properties</a>', page.get_edit_url())
messages.warning(request, msg)
template_name = 'staticpages/page_display.html'
if 'HTTP_X_FANCYBOX' in request.META:
template_name = 'staticpages/_body.html'
ctx = {
'page': page,
}
return render_to_response(template_name, ctx, RequestContext(request))
def legal_agreement(request):
"""Displays the pending documents to be agreed by the current user."""
pending_pages = LegalPage.objects.pending_user_agreement(request.user)
form_class = agreement_form_factory(pending_pages, request.user)
if request.method == 'POST':
form = form_class(request.POST)
if form.is_valid():
# The user agreed, let's record the specific agreements
# and redirect to the next page
form.save()
redirect_to = request.POST.get(auth.REDIRECT_FIELD_NAME, '/')
return redirect(redirect_to)
else:
form = form_class()
ctx = {
'form': form,
'next': request.GET.get(auth.REDIRECT_FIELD_NAME, ''),
}
return render_to_response('staticpages/agreement.html', ctx,
RequestContext(request))
|
zuun77/givemegoogletshirts
|
codejam/2016/Round2/q1.py
|
Python
|
apache-2.0
| 877
| 0.013683
|
def solve(N, R, P, S):
if max
|
([R, P, S]) > 2**(N-1): return "IMPOSSIBLE"
if N > 2 and max([R, P, S]) == 2**(N-1): return "IMPOSSIBLE"
min_val = min([R, P, S])
rep = 2**N//3
if min_val < rep: return "IMPOSSIBLE"
if N == 1:
tmp = ""
if P: tmp += "P"
if R: tmp += "R"
if S: tmp += "S"
return tmp
elif N == 2:
if P == 2: return "PRPS"
elif R == 2: return "PRRS"
else: return "PSRS"
else:
preP = P//2
preR = R
|
//2
preS = S//2
if preP < P-preP: preP += 1
elif preR < R-preR: preR += 1
else: preS += 1
ans = solve(N-1, preR, preP, preS) + solve(N-1, R-preR, P-preP, S-preS)
return ans
for case in range(1, eval(input()) + 1):
N, R, P, S = map(int, input().split())
print("Case #{}: {}".format(case, solve(N, R, P, S)))
|
SerpentCS/purchase-workflow
|
purchase_order_line_sequence/models/__init__.py
|
Python
|
agpl-3.0
| 413
| 0
|
# -*- coding: utf-8 -*-
# Author: Alexandre Fayolle
# Copyright 2013 Camptocamp SA
# Author: Damien Crier
# Copyright 2015 Camptocamp SA
# © 20
|
15 Eficent Business and IT Consulting Services S.L. -
# Jordi Ballester Alomar
# © 2015 Serpent Consulting Services Pvt. Ltd. - Sudhir Arya
# License LGPL-3.0 or later (https://www.gnu
|
.org/licenses/lgpl.html).
from . import purchase
from . import invoice
|
dhodges/sgfspider
|
tests/test_igokisen.py
|
Python
|
mit
| 1,778
| 0.006187
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import pdb
import unittest
from datetime import date
from testing_utils import setupTestDB, fake_response_from_file
from scrapy.http import Response, Request, HtmlResponse
from sgfSpider.dbsgf import DBsgf, DBNewsItem
from sgfSpider.spiders.igokisen import IgokisenSpider
class TestIgokisenSpider(unittest.TestCase):
def setUp(self):
setupTestDB()
self.spider = IgokisenSpider()
def testIgokisenNewsParsing(self):
results = self.spider.parse(fake_response_from_file('Go_Topics.html'))
# there should be 48 items
for x in range(48):
results.next()
dbitems = DBsgf().session.query(DBNewsItem).order_by(DBNewsItem.date).all()
self.assertEqual(len(dbitems), 48)
item = dbitems[7]
self.assertEqual(item.date.strftime('%Y-%m-%d'), '2015-04-02')
self.assertEqual(item.game, 'GS Caltex Cup')
self.assertEqual(item.link, 'file:///var/folders/08/1yh0yp1955z8rg6jdhrps2vw0000gn/T/kr/gs.html')
self.assertEqual(item.nation,'Korea')
self.assertEqual(item.site, 'igokisen')
def testIgokisenGameParsing(self):
results = self.spider.parseTournamentGames(fake_response_from_file('Gosei.html'))
urls = []
# there should be 4 items
urls.extend(results.next()['file_urls'])
urls.extend(results.next()['file_urls'])
urls.extend(results.next()['file_urls'])
urls.extend(results.next()['file_urls'])
self.assertEqual(so
|
rted(urls), [
u'http://igokisen.web.fc2.com/jp/sgf/40goseit1.sgf',
u'http://igokisen.web.fc2.com/jp/sgf/40goseit2.sgf',
u'http://igokisen.web.fc2.com/jp/sgf/40goseit3.sgf',
u'http://igokis
|
en.web.fc2.com/jp/sgf/40goseit4.sgf'
])
if __name__ == '__main__':
unittest.main()
|
carlos-ferras/Sequence-ToolKit
|
view/gensec/dialogs/processes/ui_tl.py
|
Python
|
gpl-3.0
| 8,875
| 0.001691
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/krl1to5/Work/FULL/Sequence-ToolKit/2016/resources/ui/gensec/process/tl.ui'
#
# Created by: PyQt5 UI code generator 5.5.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_process(object):
def setupUi(self, process):
process.setObjectName("process")
process.resize(680, 164)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(process.sizePolicy().hasHeightForWidth())
process.setSizePolicy(sizePolicy)
process.setMinimumSize(QtCore.QSize(0, 164))
process.setMaximumSize(QtCore.QSize(16777215, 164))
self.horizontalLayout_8 = QtWidgets.QHBoxLayout(process)
self.horizontalLayout_8.setSpacing(12)
self.horizontalLayout_8.setObjectName("horizontalLayout_8")
self.form_area = QtWidgets.QFrame(process)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.form_area.sizePolicy().hasHeightForWidth())
self.form_area.setSizePolicy(sizePolicy)
self.form_area.setFrameShape(QtWidgets.QFrame.Box)
self.form_area.setFrameShadow(QtWidgets.QFrame.Raised)
self.form_area.setObjectName("form_area")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.form_area)
self.verticalLayout_2.setContentsMargins(5, 5, 5, 5)
self.verticalLayout_2.setSpacing(12)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.layout = QtWidgets.QHBoxLayout()
self.layout.setSpacing(15)
self.layout.setObjectName("layout")
self.layout_2 = QtWidgets.QHBoxLayout()
self.layout_2.setObjectName("layout_2")
self.final_temperature_label = QtWidgets.QLabel(self.form_area)
self.final_temperature_label.setObjectName("final_temperature_label")
self.layout_2.addWidget(self.final_temperature_label)
self.final_temperature = QtWidgets.QDoubleSpinBox(self.form_area)
self.final_temperature.setMinimumSize(QtCore.QSize(80, 28))
self.final_temperature.setMaximumSize(QtCore.QSize(80, 16777215))
self.final_temperature.setMaximum(600.0)
self.final_temperature.setObjectName("final_temperature")
self.layout_2.addWidget(self.final_temperature)
self.layout.addLayout(self.layout_2)
self.layout_3 = QtWidgets.QHBoxLayout()
self.layout_3.setObjectName("layout_3")
self.time_at_final_temp_label = QtWidgets.QLabel(self.form_area)
self.time_at_final_temp_label.setObjectName("time_at_final_temp_label")
self.layout_3.addWidget(self.time_at_final_temp_label)
self.time_at_final_temp = QtWidgets.QDoubleSpinBox(self.form_area)
self.time_at_final_temp.setMinimumSize(QtCore.QSize(80, 28))
self.time_at_final_temp.setMaximumSize(QtCore.QSize(80, 16777215))
self.time_at_final_temp.setMaximum(99999.0)
self.time_at_final_temp.setObjectName("time_at_final_temp")
self.layout_3.addWidget(self.time_at_final_temp)
self.layout.addLayout(self.layout_3)
spacerItem = QtWidgets.QSpacerItem(0, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.layout.addItem(spacerItem)
self.verticalLayout_2.addLayout(self.layout)
self.layout_4 = QtWidgets.QHBoxLayout()
self.layout_4.setSpacing(15)
self.layout_4.setObjectName("layout_4")
self.layout_5 = QtWidgets.QHBoxLayout()
self.layout_5.setObjectName("layout_5")
self.channels_label = QtWidgets.QLabel(self.form_area)
self.channels_label.setObjectName("channels_label")
self.layout_5.addWidget(self.channels_label)
self.channels = QtWidgets.QSpinBox(self.form_area)
self.channels.setMinimumSize(QtCore.QSize(55, 28))
self.channels.setMaximumSize(QtCore.QSize(55, 16777215))
self.channels.setMaximum(512)
self.channels.setObjectName("channels")
self.layout_5.addWidget(self.channels)
self.layout_4.addLayout(self.layout_5)
self.layout_6 = QtWidgets.QHBoxLayout()
self.layout_6.setObjectName("layout_6")
self.heating_rate_label = QtWidgets.QLabel(self.form_area)
self.heating_rate_label.setObjectName("heating_rate_label")
self.layout_6.addWidget(self.heating_rate_label)
self.heating_rate = QtWidgets.QDoubleSpinBox(self.form_area)
self.heating_rate.setMinimumSize(QtCore.QSize(80, 28))
self.heating_rate.setMaximumSize(QtCore.QSize(80, 16777215))
self.heating_rate.setMinimum(0.1)
self.heating_rate.setMaximum(20.0)
self.heating_rate.setObjectName("heating_rate")
self.layout_6.addWidget(self.heating_rate)
self.layout_4.addLayout(self.layout_6)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.layout_4.addItem(spacerItem1)
self.verticalLayout_2.addLayout(self.layout_4)
self.layout_7 = QtWidgets.QHBoxLayout()
self.layout_7.setObjectName("layout_7")
self.save_temp = QtWidgets.QCheckBox(self.form_area)
self.save_temp.setObjectName("save_
|
temp
|
")
self.layout_7.addWidget(self.save_temp)
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.layout_7.addItem(spacerItem2)
self.verticalLayout_2.addLayout(self.layout_7)
self.horizontalLayout_8.addWidget(self.form_area)
self.buttons_area = QtWidgets.QFrame(process)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.buttons_area.sizePolicy().hasHeightForWidth())
self.buttons_area.setSizePolicy(sizePolicy)
self.buttons_area.setMinimumSize(QtCore.QSize(0, 0))
self.buttons_area.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.buttons_area.setFrameShape(QtWidgets.QFrame.Box)
self.buttons_area.setFrameShadow(QtWidgets.QFrame.Raised)
self.buttons_area.setObjectName("buttons_area")
self.verticalLayout = QtWidgets.QVBoxLayout(self.buttons_area)
self.verticalLayout.setContentsMargins(5, 5, 5, 5)
self.verticalLayout.setSpacing(12)
self.verticalLayout.setObjectName("verticalLayout")
self.push_button_accept = QtWidgets.QPushButton(self.buttons_area)
self.push_button_accept.setMinimumSize(QtCore.QSize(100, 32))
self.push_button_accept.setShortcut("Return")
self.push_button_accept.setObjectName("push_button_accept")
self.verticalLayout.addWidget(self.push_button_accept)
self.push_button_cancel = QtWidgets.QPushButton(self.buttons_area)
self.push_button_cancel.setMinimumSize(QtCore.QSize(100, 32))
self.push_button_cancel.setShortcut("Esc")
self.push_button_cancel.setObjectName("push_button_cancel")
self.verticalLayout.addWidget(self.push_button_cancel)
self.push_button_info = QtWidgets.QPushButton(self.buttons_area)
self.push_button_info.setMinimumSize(QtCore.QSize(100, 32))
self.push_button_info.setObjectName("push_button_info")
self.verticalLayout.addWidget(self.push_button_info)
spacerItem3 = QtWidgets.QSpacerItem(20, 0, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem3)
self.horizontalLayout_8.addWidget(self.buttons_area)
self.retranslateUi(process)
QtCore.QMetaObject.connectSlotsByName(process)
def retranslateUi(self, process):
_translate = QtCore.QCoreApplication.translate
process.s
|
GustavoHennig/ansible
|
contrib/vault/vault-keyring.py
|
Python
|
gpl-3.0
| 3,430
| 0.001166
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2014, Matt Martz <matt@sivel.net>
# (c) 2016, Justin Mayer <https://justinmayer.com/>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and
|
/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at
|
your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# =============================================================================
#
# This script is to be used with vault_password_file or --vault-password-file
# to retrieve the vault password via your OS's native keyring application.
#
# This file *MUST* be saved with executable permissions. Otherwise, Ansible
# will try to parse as a password file and display: "ERROR! Decryption failed"
#
# The `keyring` Python module is required: https://pypi.python.org/pypi/keyring
#
# By default, this script will store the specified password in the keyring of
# the user that invokes the script. To specify a user keyring, add a [vault]
# section to your ansible.cfg file with a 'username' option. Example:
#
# [vault]
# username = 'ansible-vault'
#
# Another optional setting is for the key name, which allows you to use this
# script to handle multiple project vaults with different passwords:
#
# [vault]
# keyname = 'ansible-vault-yourproject'
#
# You can configure the `vault_password_file` option in ansible.cfg:
#
# [defaults]
# ...
# vault_password_file = /path/to/vault-keyring.py
# ...
#
# To set your password, `cd` to your project directory and run:
#
# python /path/to/vault-keyring.py set
#
# If you choose not to configure the path to `vault_password_file` in
# ansible.cfg, your `ansible-playbook` command might look like:
#
# ansible-playbook --vault-password-file=/path/to/vault-keyring.py site.yml
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
import sys
import getpass
import keyring
import ansible.constants as C
def main():
(parser, config_path) = C.load_config_file()
if parser.has_option('vault', 'username'):
username = parser.get('vault', 'username')
else:
username = getpass.getuser()
if parser.has_option('vault', 'keyname'):
keyname = parser.get('vault', 'keyname')
else:
keyname = 'ansible'
if len(sys.argv) == 2 and sys.argv[1] == 'set':
intro = 'Storing password in "{}" user keyring using key name: {}\n'
sys.stdout.write(intro.format(username, keyname))
password = getpass.getpass()
confirm = getpass.getpass('Confirm password: ')
if password == confirm:
keyring.set_password(keyname, username, password)
else:
sys.stderr.write('Passwords do not match\n')
sys.exit(1)
else:
sys.stdout.write('{}\n'.format(keyring.get_password(keyname,
username)))
sys.exit(0)
if __name__ == '__main__':
main()
|
sccblom/vercors
|
deps/z3/4.4.1/Windows NT/intel/bin/example.py
|
Python
|
mpl-2.0
| 178
| 0
|
# Copyright (c) Microsoft Corporation 2015
from z3 import *
x = Real('x')
y = Real('y')
s = Solver()
|
s.add(x + y > 5, x > 1, y > 1)
|
print(s.check())
print(s.model())
|
elelay/gPodderAsRSSReader
|
src/gpodder/gtkui/desktop/sync.py
|
Python
|
gpl-3.0
| 13,131
| 0.003427
|
# -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2011 Thomas Perl and the gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# gpodder.gtkui.desktop.sync - Glue code between GT
|
K+ UI and sync module
|
# Thomas Perl <thp@gpodder.org>; 2009-09-05 (based on code from gui.py)
import gtk
import threading
import gpodder
_ = gpodder.gettext
from gpodder import util
from gpodder import sync
from gpodder.liblogger import log
from gpodder.gtkui.desktop.syncprogress import gPodderSyncProgress
from gpodder.gtkui.desktop.deviceplaylist import gPodderDevicePlaylist
class gPodderSyncUI(object):
def __init__(self, config, notification, \
parent_window, show_confirmation, \
update_episode_list_icons, \
update_podcast_list_model, \
preferences_widget, \
episode_selector_class, \
commit_changes_to_database):
self._config = config
self.notification = notification
self.parent_window = parent_window
self.show_confirmation = show_confirmation
self.update_episode_list_icons = update_episode_list_icons
self.update_podcast_list_model = update_podcast_list_model
self.preferences_widget = preferences_widget
self.episode_selector_class = episode_selector_class
self.commit_changes_to_database = commit_changes_to_database
def _filter_sync_episodes(self, channels, only_downloaded=True):
"""Return a list of episodes for device synchronization
If only_downloaded is True, this will skip episodes that
have not been downloaded yet and podcasts that are marked
as "Do not synchronize to my device".
"""
episodes = []
for channel in channels:
if not channel.sync_to_devices and only_downloaded:
log('Skipping channel: %s', channel.title, sender=self)
continue
for episode in channel.get_all_episodes():
if episode.was_downloaded(and_exists=True) or \
not only_downloaded:
episodes.append(episode)
return episodes
def _show_message_unconfigured(self):
title = _('No device configured')
message = _('Please set up your device in the preferences dialog.')
self.notification(message, title, widget=self.preferences_widget)
def _show_message_cannot_open(self):
title = _('Cannot open device')
message = _('Please check the settings in the preferences dialog.')
self.notification(message, title, widget=self.preferences_widget)
def on_synchronize_episodes(self, channels, episodes=None, force_played=True):
if self._config.device_type == 'ipod' and not sync.gpod_available:
title = _('Cannot sync to iPod')
message = _('Please install python-gpod and restart gPodder.')
self.notification(message, title, important=True)
return
elif self._config.device_type == 'mtp' and not sync.pymtp_available:
title = _('Cannot sync to MTP device')
message = _('Please install libmtp and restart gPodder.')
self.notification(message, title, important=True)
return
device = sync.open_device(self._config)
if device is not None:
def after_device_sync_callback(device, successful_sync):
if device.cancelled:
log('Cancelled by user.', sender=self)
elif successful_sync:
title = _('Device synchronized')
message = _('Your device has been synchronized.')
self.notification(message, title)
else:
title = _('Error closing device')
message = _('Please check settings and permission.')
self.notification(message, title, important=True)
# Update the UI to reflect changes from the sync process
episode_urls = set()
channel_urls = set()
for episode in episodes:
episode_urls.add(episode.url)
channel_urls.add(episode.channel.url)
util.idle_add(self.update_episode_list_icons, episode_urls)
util.idle_add(self.update_podcast_list_model, channel_urls)
util.idle_add(self.commit_changes_to_database)
device.register('post-done', after_device_sync_callback)
if device is None:
return self._show_message_unconfigured()
if not device.open():
return self._show_message_cannot_open()
if self._config.device_type == 'ipod':
#update played episodes and delete if requested
for channel in channels:
if channel.sync_to_devices:
allepisodes = [e for e in channel.get_all_episodes() \
if e.was_downloaded(and_exists=True)]
device.update_played_or_delete(channel, allepisodes, \
self._config.ipod_delete_played_from_db)
if self._config.ipod_purge_old_episodes:
device.purge()
if episodes is None:
force_played = False
episodes = self._filter_sync_episodes(channels)
def check_free_space():
# "Will we add this episode to the device?"
def will_add(episode):
# If already on-device, it won't take up any space
if device.episode_on_device(episode):
return False
# Might not be synced if it's played already
if not force_played and \
self._config.only_sync_not_played and \
episode.is_played:
return False
# In all other cases, we expect the episode to be
# synchronized to the device, so "answer" positive
return True
# "What is the file size of this episode?"
def file_size(episode):
filename = episode.local_filename(create=False)
if filename is None:
return 0
return util.calculate_size(str(filename))
# Calculate total size of sync and free space on device
total_size = sum(file_size(e) for e in episodes if will_add(e))
free_space = max(device.get_free_space(), 0)
if total_size > free_space:
title = _('Not enough space left on device')
message = _('You need to free up %s.\nDo you want to continue?') \
% (util.format_filesize(total_size-free_space),)
if not self.show_confirmation(message, title):
device.cancel()
device.close()
return
# Finally start the synchronization process
gPodderSyncProgress(self.parent_window, device=device)
def sync_thread_func():
device.add_tracks(episodes, force_played=force_played)
device.close()
threading.Thread(target=sync_thread_func).start()
# This function is used to remove files from the device
def cleanup_episodes():
# 'only_sync_not_played' must be used or else all the
# played tracks will be copied then immediately deleted
if self._config.mp3_player_delete
|
rdhyee/osf.io
|
api/nodes/views.py
|
Python
|
apache-2.0
| 141,472
| 0.004298
|
import re
from modularodm import Q
from rest_framework import generics, permissions as drf_permissions
from rest_framework.exceptions import PermissionDenied, ValidationError, NotFound, MethodNotAllowed, NotAuthenticated
from rest_framework.status import HTTP_204_NO_CONTENT
from rest_framework.response import Response
from framework.auth.oauth_scopes import CoreScopes
from framework.postcommit_tasks.handlers import enqueue_postcommit_task
from api.base import generic_bulk_views as bulk_views
from api.base import permissions as base_permissions
from api.base.exceptions import InvalidModelValueError, JSONAPIException, Gone
from api.base.filters import ODMFilterMixin, ListFilterMixin
from api.base.views import JSONAPIBaseView
from api.base.parsers import (
JSONAPIRelationshipParser,
JSONAPIRelationshipParserForRegularJSON,
JSONAPIMultipleRelationshipsParser,
JSONAPIMultipleRelationshipsParserForRegularJSON,
)
from api.base.exceptions import RelationshipPostMakesNoChanges, EndpointNotImplementedError
from api.base.pagination import CommentPagination, NodeContributorPagination, MaxSizePagination
from api.base.utils import get_object_or_error, is_bulk_request, get_user_auth, is_truthy
from api.base.settings import ADDONS_OAUTH, API_BASE
from api.caching.tasks import ban_url
from api.addons.views import AddonSettingsMixin
from api.files.serializers import FileSerializer
from api.comments.serializers import NodeCommentSerializer, CommentCreateSerializer
from api.comments.permissions import CanCommentOrPublic
from api.users.views import UserMixin
from api.wikis.serializers import NodeWikiSerializer
from api.base.views import LinkedNodesRelationship, BaseContributorDetail, BaseContributorList, BaseNodeLinksDetail, BaseNodeLinksList, BaseLinkedList
from api.base.throttling import (
UserRateThrottle,
NonCookieAuthThrottle,
AddContributorThrottle,
)
from api.nodes.filters import NodePreprintsFilterMixin
from api.nodes.serializers import (
NodeSerializer,
ForwardNodeAddonSettingsSerializer,
NodeAddonSettingsSerializer,
NodeLinksSerializer,
NodeForksSerializer,
NodeDetailSerializer,
NodeProviderSerializer,
DraftRegistrationSerializer,
DraftRegistrationDetailSerializer,
NodeContributorsSerializer,
NodeContributorDetailSerializer,
NodeInstitutionsRelationshipSerializer,
NodeAlternativeCitationSerializer,
NodeContributorsCreateSerializer,
NodeViewOnlyLinkSerializer,
NodeViewOnlyLinkUpdateSerializer,
NodeCitationSerializer,
NodeCitationStyleSerializer
)
from api.nodes.utils import get_file_object
from api.citations.utils import render_citation
from api.addons.serializers import NodeAddonFolderSerializer
from api.registrations.serializers import RegistrationSerializer
from api.institutions.serializers import InstitutionSerializer
from api.identifiers.serializers import NodeIdentifierSerializer
from api.identifiers.views import IdentifierList
from api.nodes.permissions import (
IsAdmin,
IsPublic,
AdminOrPublic,
ContributorOrPublic,
RegistrationAndPermissionCheckForPointers,
ContributorDetailPermissions,
ReadOnlyIfRegistration,
IsAdminOrReviewer,
WriteOrPublicForRelationshipInstitutions,
ExcludeWithdrawals,
NodeLinksShowIfVersion,
)
from api.logs.serializers import NodeLogSerializer
from api.preprints.serializers import PreprintSerializer
from website.addons.wiki.model import NodeWikiPage
from website.exceptions import NodeStateError
from website.util.permissions import ADMIN
from website.models import Node, Pointer, Comment, NodeLog, Institution, DraftRegistration, PrivateLink, PreprintService
from website.files.models import FileNode
from framework.auth.core import User
from api.base.utils import default_node_list_query, default_node_permission_query
class NodeMixin(object):
"""Mixin with convenience methods for retrieving the current node based on the
current URL. By default, fetches the current node based on the node_id kwarg.
"""
serializer_class = NodeSerializer
node_lookup_url_kwarg = 'node_id'
def get_node(self, check_object_permissions=True):
node = get_object_or_error(
Node,
self.kwargs[self.node_lookup_url_kwarg],
display_name='node'
)
# Nodes that are folders/collections are treated as a separate resource, so if the c
|
lient
# requests a collection through a node endpoint, we return a 404
if node.is_collection or node.is_registration:
raise NotFound
# May raise a permission denied
if check_object_permissions:
self.check_object_
|
permissions(self.request, node)
return node
class DraftMixin(object):
serializer_class = DraftRegistrationSerializer
def get_draft(self, draft_id=None):
node_id = self.kwargs['node_id']
if draft_id is None:
draft_id = self.kwargs['draft_id']
draft = get_object_or_error(DraftRegistration, draft_id)
if not draft.branched_from._id == node_id:
raise ValidationError('This draft registration is not created from the given node.')
if self.request.method not in drf_permissions.SAFE_METHODS:
registered_and_deleted = draft.registered_node and draft.registered_node.is_deleted
if draft.registered_node and not draft.registered_node.is_deleted:
raise PermissionDenied('This draft has already been registered and cannot be modified.')
if draft.is_pending_review:
raise PermissionDenied('This draft is pending review and cannot be modified.')
if draft.requires_approval and draft.is_approved and (not registered_and_deleted):
raise PermissionDenied('This draft has already been approved and cannot be modified.')
self.check_object_permissions(self.request, draft)
return draft
class WaterButlerMixin(object):
path_lookup_url_kwarg = 'path'
provider_lookup_url_kwarg = 'provider'
def get_file_item(self, item):
attrs = item['attributes']
file_node = FileNode.resolve_class(
attrs['provider'],
FileNode.FOLDER if attrs['kind'] == 'folder'
else FileNode.FILE
).get_or_create(self.get_node(check_object_permissions=False), attrs['path'])
file_node.update(None, attrs, user=self.request.user)
self.check_object_permissions(self.request, file_node)
return file_node
def fetch_from_waterbutler(self):
node = self.get_node(check_object_permissions=False)
path = self.kwargs[self.path_lookup_url_kwarg]
provider = self.kwargs[self.provider_lookup_url_kwarg]
return self.get_file_object(node, path, provider)
def get_file_object(self, node, path, provider, check_object_permissions=True):
obj = get_file_object(node=node, path=path, provider=provider, request=self.request)
if provider == 'osfstorage':
if check_object_permissions:
self.check_object_permissions(self.request, obj)
return obj
class NodeList(JSONAPIBaseView, bulk_views.BulkUpdateJSONAPIView, bulk_views.BulkDestroyJSONAPIView, bulk_views.ListBulkCreateJSONAPIView, NodePreprintsFilterMixin, WaterButlerMixin):
"""Nodes that represent projects and components. *Writeable*.
Paginated list of nodes ordered by their `date_modified`. Each resource contains the full representation of the
node, meaning additional requests to an individual node's detail view are not necessary. Registrations and withdrawn
registrations cannot be accessed through this endpoint (see registration endpoints instead).
<!--- Copied Spiel from NodeDetail -->
On the front end, nodes are considered 'projects' or 'components'. The difference between a project and a component
is that a project is the top-level node, and components are children of the project. There is also a [category
field](/v2/#osf-node-categories) that includes 'project' as an option. The categorization essentially determines
which icon is displayed by th
|
google/smilesparser
|
test_smilesparser_rdkit.py
|
Python
|
apache-2.0
| 6,797
| 0.010152
|
# Copyright 2016 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import smilesparser
from rdkit import Chem
serial = 0
element_number = {'C': 6,
'N': 7,
'O': 8,
'H': 1,
'S': 16
}
class SMILES:
def __init__(self, smiles):
self.mol = Chem.RWMol()
self.parsed = smilesparser.SMILES.parseString(smiles)[0]
self.prevAtomIdx = None
self.prevBond = None
self.atomStack = []
self.ringClosures = {}
self.iterate_smiles(self.parsed.smiles)
def AddAtom(self, s):
a = Chem.Atom(element_number[s.upper()])
if a.GetSymbol() == 'S':
a.SetHybridization(Chem.rdchem.HybridizationType.SP2)
a.SetNumRadicalElectrons(1)
a.SetNoImplicit(True)
else:
if not self.prevBond:
a.SetHybridization(Chem.rdchem.HybridizationType.SP3)
elif self.prevBond == ':':
bt = Chem.rdchem.BondType.SINGLE
a.SetHybridization(Chem.rdchem.HybridizationType.SP2)
elif self.prevBond == '=':
bt = Chem.rdchem.BondType.DOUBLE
a.SetHybridization(Chem.rdchem.HybridizationType.SP2)
else:
raise RuntimeError
idx = self.mol.AddAtom(a)
if self.prevAtomIdx is not None:
self.AddBond(idx)
self.prevAtomIdx = idx
return a
def AddBond(self, idx):
bt = Chem.rdchem.BondType.SINGLE
if self.prevBond:
if self.prevBond == '=':
bt = Chem.rdchem.BondType.DOUBLE
if self.prevBond == '#':
bt = Chem.rdchem.BondType.TRIPLE
if self.prevBond == ':':
bt = Chem.rdchem.BondType.AROMATIC
self.mol.AddBond(self.prevAtomIdx, idx, bt)
self.prevBond = None
def inspect_organic_symbol(self, organic_symbol, indent=0):
s = ''.join(organic_symbol)
self.AddAtom(s)
def inspect_aromatic_symbol(self, aromatic_symbol, indent=0):
s = ''.join(aromatic_symbol)
a = self.AddAtom(s)
a.SetIsAromatic(True)
self.prevBond = ":"
def inspect_element_symbol(self, element_symbol, indent=0):
s = ''.join(element_symbol)
self.AddAtom(s)
def inspect_chiral_class(self, chiral_class, indent=0):
pass
def inspect_hcount(self, hcount, indent=0):
pass
def inspect_charge(self, charge, indent=0):
pass
def inspect_atomspec(self, atomspec, indent=0):
self.atomStack.append(self.prevAtomIdx)
for item in atomspec:
if isinstance(item, smilesparser.AST.AromaticSymbol):
self.inspect_aromatic_symbol(item.aromatic_symbol, indent+1)
elif isinstance(item, smilesparser.AST.ElementSymbol):
self.inspect_element_symbol(item.element_symbol, indent+1)
elif isinstance(item, smilesparser.AST.ChiralClass):
self.inspect_chiral_class(item.chiral_class, indent+1)
elif isinstance(item, smilesparser.AST.HCount):
self.inspect_hcount(item.hcount, indent+1)
elif isinstance(item, smilesparser.AST.Charge):
self.inspect_charge(item.charge, indent+1)
else:
print " " * indent + str(item), dir(item)
self.prevAtomIdx = self.atomStack.pop()
def inspect_atom(self, atom, indent=0):
if isinstance(atom, smilesparser.AST.OrganicSymbol):
self.inspect_organic_symbol(atom.organic_symbol, indent)
elif isinstance(atom, smilesparser.AST.AromaticSymbol):
self.inspect_aromatic_symbol(atom.aromatic_symbol, indent)
elif isinstance(atom, smilesparser.AST.AtomSpec):
self.inspect_atomspec(atom.atom_spec, indent)
else:
print " " * indent + atom, dir(atom)
def inspect_bond(self, bond, indent=0):
self.prevBond = bond
def inspect_ring_closure(self, ring_closure, indent=0):
if ring_closure not in self.ringClosures:
self.ringClosures[ring_closure] = self.prevAtomIdx
else:
idx = self.ringClosures[ring_closure]
self.AddBond(idx)
def inspect_chain(self, chain, indent=0):
for item in chain:
if isinstance(item, smilesparser.AST.Bond):
self.inspect_bond(item.bond, indent)
elif isinstance(item, smilesparser.AST.Atom):
self.inspect_atom(item.atom, indent)
elif isinstance(item, smilesparser.AST.RingClosure):
self.inspect_ring_closure(item.ring_closure
|
, indent)
else:
|
print " " * indent + item, dir(item)
def iterate_branch(self, branch, indent=0):
self.atomStack.append(self.prevAtomIdx)
for item in branch[0]:
if isinstance(item, smilesparser.AST.Bond):
self.inspect_bond(item.bond, indent+1)
elif isinstance(item, smilesparser.AST.SMILES):
self.iterate_smiles(item.smiles, indent+1)
else:
print " " * indent + item, dir(item)
self.prevAtomIdx = self.atomStack.pop()
def iterate_smiles(self, smiles, indent=0):
for item in smiles:
if isinstance(item, smilesparser.AST.Atom):
self.inspect_atom(item.atom, indent)
elif isinstance(item, smilesparser.AST.Chain):
self.inspect_chain(item.chain, indent)
elif isinstance(item, smilesparser.AST.Branch):
self.iterate_branch(item, indent+1)
else:
print " " * indent + item, dir(item)
def print_mol(mol):
for atom in mol.GetAtoms():
atom.UpdatePropertyCache(strict=False)
print (atom.GetIdx(),
atom.GetAtomicNum(),
atom.GetDegree(),
atom.GetTotalDegree(),
atom.GetTotalValence(),
atom.GetImplicitValence(),
atom.GetExplicitValence(),
atom.GetFormalCharge(),
atom.GetNumRadicalElectrons(),
atom.GetHybridization(),
atom.GetNoImplicit())
for bond in mol.GetBonds():
print (bond.GetBeginAtomIdx(),
bond.GetEndAtomIdx(),
bond.GetBondType())
if __name__ == '__main__':
smiles=[
# 'C',
# 'CC',
# 'CCCCC(CCC)CCC',
# 'C1CCC(C1C)CCCC',
# 'c1ccccc1',
# 'Cc1ccccc1',
# 'CCC[S]=O',
# 'CC[S@](=O)c1ccc2c(c1)[nH]/c(=N/C(=O)OC)/[nH]2',
'C=CCc1cc(OC)c2c(c1OC)OCO2'
# 'CCC(=O)O[C@]1(CC[NH+](C[C@@H]1CC=C)C)c2ccccc2'
]
for s in smiles:
print s
m = Chem.MolFromSmiles(s)
s1 = Chem.MolToSmiles(m)
print s1
print_mol(m)
print
sm = SMILES(s1)
print_mol(sm.mol)
print Chem.MolToSmiles(sm.mol)
print
|
jamielennox/python-kiteclient
|
kiteclient/tests/v1/utils.py
|
Python
|
apache-2.0
| 1,915
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class DummyKeyResponse(object):
def __init__(self, gen=1):
self.generation = gen
self.name = ""
def request(self, path, method, **kwargs):
self.name = path.split('/')[-1]
return self
def json(self):
return {"generation": self.generation,
"name": self.name}
class DummyTicketResponse(object):
def __init__(self, signature, metadata, ticket):
self.signature = signature
self.metadata = metadata
self.ticket = ticket
def request(self, path, method, **kwargs):
return self
def json(self):
return {"signature": self.signature,
"metadata": se
|
lf.metadata,
"ticket": self.ticket}
class DummyGroupResponse(object):
def __init__(self, name):
self.name = name
def request(self, path, method, **kwargs):
return self
def json(self):
return {"name": self.name}
class DummyGroupKeyResponse(object):
def __init__(self, signature, metadata, group_key):
self.signature = signature
self.metadata = metadata
|
self.group_key = group_key
def request(self, path, method, **kwargs):
return self
def json(self):
return {"signature": self.signature,
"metadata": self.metadata,
"group_key": self.group_key}
|
fortesit/search-engine
|
posting-list-search-k-distanced-words.py
|
Python
|
mit
| 663
| 0.045249
|
#! /usr/bin/env python
from sys import argv
script, q1, q2, k = argv
fw = open('c.txt', 'w+')
for docid in range(1,192):
filename = 'data/' + str(docid) + '.txt'
fr = open(filename)
|
string = fr.read()
pp1 = []
pp2 = []
l = []
position = 0
for token in string.split():
if token == q1:
pp1.append(position)
if token == q2:
pp2.append(position)
position += 1
for i in pp1:
for j in pp2:
if abs(i -
|
j) <= int(k):
l.append(j)
elif j > i:
break
while l and abs(l[0] - i) > int(k):
l.pop(0)
prev_ps = -1
for ps in l:
if ps != prev_ps:
fw.write(str(docid) + ' ' + str(i) + ' ' + str(ps) + '\n')
prev_ps = ps
|
jelly/calibre
|
src/calibre/linux.py
|
Python
|
gpl-3.0
| 45,645
| 0.003856
|
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
''' Post installation script for linux '''
import sys, os, cPickle, textwrap, stat, errno
from subprocess import check_call, check_output
from functools import partial
from calibre import __appname__, prints, guess_type
from calibre.constants import islinux, isbsd
from calibre.customize.ui import all_input_formats
from calibre.ptempfile import TemporaryDirectory
from calibre import CurrentDir
entry_points = {
'console_scripts': [
'ebook-device = calibre.devices.cli:main',
'ebook-meta = calibre.ebooks.metadata.cli:main',
'ebook-convert = calibre.ebooks.conversion.cli:main',
'ebook-polish = calibre.ebooks.oeb.polish.main:main',
'markdown-calibre = calibre.ebooks.markdown.__main__:run',
'web2disk = calibre.web.fetch.simple:main',
'calibre-server = calibre.srv.standalone:main',
'lrf2lrs = calibre.ebooks.lrf.lrfparser:main',
'lrs2lrf = calibre.ebooks.lrf.lrs.convert_from:main',
'calibre-debug = calibre.debug:main',
'calibredb = calibre.db.cli.main:main',
'calibre-parallel = calibre.utils.ipc.worker:main',
'calibre-customize = calibre.customize.ui:main',
'calibre-complete = calibre.utils.complete:main',
'fetch-ebook-metadata = calibre.ebooks.metadata.sources.cli:main',
'calibre-smtp = calibre.utils.smtp:main',
],
'gui_scripts' : [
__appname__+' = calibre.gui_launch:calibre',
'lrfviewer = calibre.gui2.lrf_renderer.main:main',
|
'ebook-viewer = calibre.gui_launch:ebook_viewer',
'ebook-edit = calibre.gui_launch:ebook_edit',
],
}
class PreserveMIMEDefaults(object):
def __init__(self):
|
self.initial_values = {}
def __enter__(self):
def_data_dirs = '/usr/local/share:/usr/share'
paths = os.environ.get('XDG_DATA_DIRS', def_data_dirs)
paths = paths.split(':')
paths.append(os.environ.get('XDG_DATA_HOME', os.path.expanduser(
'~/.local/share')))
paths = list(filter(os.path.isdir, paths))
if not paths:
# Env var had garbage in it, ignore it
paths = def_data_dirs.split(':')
paths = list(filter(os.path.isdir, paths))
self.paths = {os.path.join(x, 'applications/defaults.list') for x in
paths}
self.initial_values = {}
for x in self.paths:
try:
with open(x, 'rb') as f:
self.initial_values[x] = f.read()
except:
self.initial_values[x] = None
def __exit__(self, *args):
for path, val in self.initial_values.iteritems():
if val is None:
try:
os.remove(path)
except:
pass
elif os.path.exists(path):
try:
with open(path, 'r+b') as f:
if f.read() != val:
f.seek(0)
f.truncate()
f.write(val)
except EnvironmentError as e:
if e.errno != errno.EACCES:
raise
# Uninstall script {{{
UNINSTALL = '''\
#!{python}
from __future__ import print_function, unicode_literals
euid = {euid}
import os, subprocess, shutil
try:
raw_input
except NameError:
raw_input = input
if os.geteuid() != euid:
print ('The installer was last run as user id:', euid, 'To remove all files you must run the uninstaller as the same user')
if raw_input('Proceed anyway? [y/n]:').lower() != 'y':
raise SystemExit(1)
frozen_path = {frozen_path!r}
if not frozen_path or not os.path.exists(os.path.join(frozen_path, 'resources', 'calibre-mimetypes.xml')):
frozen_path = None
for f in {mime_resources!r}:
cmd = ['xdg-mime', 'uninstall', f]
print ('Removing mime resource:', os.path.basename(f))
ret = subprocess.call(cmd, shell=False)
if ret != 0:
print ('WARNING: Failed to remove mime resource', f)
for x in tuple({manifest!r}) + tuple({appdata_resources!r}) + (os.path.abspath(__file__), __file__, frozen_path):
if not x or not os.path.exists(x):
continue
print ('Removing', x)
try:
if os.path.isdir(x):
shutil.rmtree(x)
else:
os.unlink(x)
except Exception as e:
print ('Failed to delete', x)
print ('\t', e)
icr = {icon_resources!r}
mimetype_icons = []
def remove_icon(context, name, size, update=False):
cmd = ['xdg-icon-resource', 'uninstall', '--context', context, '--size', size, name]
if not update:
cmd.insert(2, '--noupdate')
print ('Removing icon:', name, 'from context:', context, 'at size:', size)
ret = subprocess.call(cmd, shell=False)
if ret != 0:
print ('WARNING: Failed to remove icon', name)
for i, (context, name, size) in enumerate(icr):
if context == 'mimetypes':
mimetype_icons.append((name, size))
continue
remove_icon(context, name, size, update=i == len(icr) - 1)
mr = {menu_resources!r}
for f in mr:
cmd = ['xdg-desktop-menu', 'uninstall', f]
print ('Removing desktop file:', f)
ret = subprocess.call(cmd, shell=False)
if ret != 0:
print ('WARNING: Failed to remove menu item', f)
print ()
if mimetype_icons and raw_input('Remove the e-book format icons? [y/n]:').lower() in ['', 'y']:
for i, (name, size) in enumerate(mimetype_icons):
remove_icon('mimetypes', name, size, update=i == len(mimetype_icons) - 1)
'''
# }}}
# Completion {{{
class ZshCompleter(object): # {{{
def __init__(self, opts):
self.opts = opts
self.dest = None
base = os.path.dirname(self.opts.staging_sharedir)
self.detect_zsh(base)
if not self.dest and base == '/usr/share':
# Ubuntu puts site-functions in /usr/local/share
self.detect_zsh('/usr/local/share')
self.commands = {}
def detect_zsh(self, base):
for x in ('vendor-completions', 'vendor-functions', 'site-functions'):
c = os.path.join(base, 'zsh', x)
if os.path.isdir(c) and os.access(c, os.W_OK):
self.dest = os.path.join(c, '_calibre')
break
def get_options(self, parser, cover_opts=('--cover',), opf_opts=('--opf',),
file_map={}):
if hasattr(parser, 'option_list'):
options = parser.option_list
for group in parser.option_groups:
options += group.option_list
else:
options = parser
for opt in options:
lo, so = opt._long_opts, opt._short_opts
if opt.takes_value():
lo = [x+'=' for x in lo]
so = [x+'+' for x in so]
ostrings = lo + so
ostrings = u'{%s}'%','.join(ostrings) if len(ostrings) > 1 else ostrings[0]
exclude = u''
if opt.dest is None:
exclude = u"'(- *)'"
h = opt.help or ''
h = h.replace('"', "'").replace('[', '(').replace(
']', ')').replace('\n', ' ').replace(':', '\\:').replace('`', "'")
h = h.replace('%default', type(u'')(opt.default))
arg = ''
if opt.takes_value():
arg = ':"%s":'%h
if opt.dest in {'extract_to', 'debug_pipeline', 'to_dir', 'outbox', 'with_library', 'library_path'}:
arg += "'_path_files -/'"
elif opt.choices:
arg += "(%s)"%'|'.join(opt.choices)
elif set(file_map).intersection(set(opt._long_opts)):
k = set(file_map).intersection(set(opt._long_opts))
exts = file_map[tuple(k)[0]]
if exts:
|
OCA/partner-contact
|
partner_contact_department/tests/test_recursion.py
|
Python
|
agpl-3.0
| 915
| 0
|
# © 2016 Tecnativa - Vicent Cubells
# License AG
|
PL-3.0 or later (https://www.gnu.org/licenses/agpl-3.0).
from odoo.exceptions import UserError
from odoo.tests import common
class TestRecursion(common.SavepointCase):
@classmethod
def setUpClass(cls):
super(TestRecursion, cls).setUpClass()
cls.department_obj = cls.env["res.partner.department"]
# Instances
cls.dpt1 = c
|
ls.department_obj.create({"name": "Dpt. 1"})
cls.dpt2 = cls.department_obj.create(
{"name": "Dep. 2", "parent_id": cls.dpt1.id}
)
def test_recursion(self):
""" Testing recursion """
self.dpt3 = self.department_obj.create(
{"name": "Dep. 3", "parent_id": self.dpt2.id}
)
# Creating a parent's child department using dpt1.
with self.assertRaises(UserError):
self.dpt1.write(vals={"parent_id": self.dpt3.id})
|
bnmrrs/runkeeper-api
|
runkeeper/httpclient.py
|
Python
|
mit
| 1,887
| 0.00159
|
#
# The MIT License
#
# Copyright (c) 2009 Ben Morris
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Softw
|
are, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of th
|
e Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
""" Runkeeper Python API
The Runkeeper Python API is used to interact with
Runkeeper (http://runkeeper.com). Runkeeper does not provide an official API
so BeautifulSoup is used to scrape pages.
Here is the basic example of getting total distance for a user
from runkeeper import User
user = User("bnmrrs")
activities = user.get_all_activities()
total_distance = 0
for activity in activities:
total_distance += activity.get_distance()
print total_distance
"""
import urllib
def get(url):
"""Used to make very basic HTTP requests. Currently no error handling.
Takes a URL as it's only argument and returns the resulting page
"""
f = urllib.urlopen(url)
s = f.read()
f.close()
return s
|
apdjustino/DRCOG_Urbansim
|
src/opus_core/variables/utils/parse_tree_pattern_generator.py
|
Python
|
agpl-3.0
| 1,882
| 0.005845
|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington
# See opus_core/LICENSE
# Utility classes that can be used to generate parse tree patterns. These
# utilities take a sample expression or statement, and return a parse tree that
# uses symbolic names for the nodes. You'll need to then do additiona
|
l editing on
# the parse tree as needed (for example, replacing a specific value with a pattern).
import parser
from symbol im
|
port sym_name
from token import tok_name
from pprint import pprint
# pretty-prints a symbolic parse tree for expr (as for use with 'eval')
# the symbolic names will be strings, so to use this as a constant
# in some code you'll need to replace the quotes with nothing
# (except for the actual string constants ...)
def print_eval_tree(expr):
t = parser.ast2tuple(parser.expr(expr))
# t = parser.ast2tuple(parser.suite(expr))
pprint(integer2symbolic(t))
# same as print_eval_tree, except as for use with 'exec' (for definitions, statements, etc)
def print_exec_tree(expr):
t = parser.ast2tuple(parser.suite(expr))
pprint(integer2symbolic(t))
# take a parse tree represented as a tuple, and return a new tuple
# where the integers representing internal nodes and terminal nodes are
# replaced with symbolic names
def integer2symbolic(fragment):
head = fragment[0]
if head in sym_name:
rest = tuple(map(integer2symbolic, fragment[1:]))
return ('symbol.' + sym_name[head], ) + rest
if head in tok_name:
return ('token.' + tok_name[head], ) + fragment[1:]
raise ValueError("bad value in parsetree")
# examples of use:
# print_eval_tree("urbansim.gridcell.population**2")
# print_exec_tree("x = urbansim.gridcell.population**2")
s = """def foo(x=5):
y = x+3
return y*2
"""
print_exec_tree(s)
|
JimboMonkey1234/pushserver
|
handlers/Handler.py
|
Python
|
mit
| 725
| 0.002759
|
#!/usr/bin/env python
from collections import namedtuple
Payload = namedtuple('Payload', ['iden', 'body', 'send_date', 'sender'])
class Handler(object):
@staticmethod
def config():
re
|
turn
def __init__(self, logger):
self.logger = logger
def create_translator():
return
def create_listener(task):
return
def configure_modules(modules, push_config):
return
class Translator(object):
def get_recent():
return
def is_valid(message):
return
def get_module(message, modules):
return
def clean
|
up(message):
return
def to_payload(message):
return
def respond(message, response):
return
|
moopie/botologist
|
plugins/streams/cache.py
|
Python
|
mit
| 445
| 0.031461
|
cl
|
ass StreamCache:
def __init__(self):
self.initiated = False
self.new_cache = []
self.old_cache = []
def push(self, streams):
assert isinstance(streams, list)
self.old_cache = self.new_cache
self.new_cache = streams
if not self.initiated:
self.initiated = T
|
rue
def get_all(self):
return set(self.new_cache + self.old_cache)
def __contains__(self, stream):
return stream in self.new_cache or stream in self.old_cache
|
SLiana/inf1340_2015_asst1
|
exercise3.py
|
Python
|
mit
| 2,130
| 0.004695
|
#!/usr/bin/env python
""" Assignment 1, Exercise 3, INF1340, Fall, 2015. Troubleshooting Car Issues.
This module contains one function diagnose_car(). It is an expert system to
interactive diagnose car issues.
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
"""
"""
# Interactively queries the user with yes/no questions to identify a possible issue with a car.
#
|
Inputs: As is but not nested - same indentation all the way through
# Expected Outputs: To fol
|
low the decision logic of the question tree
# Errors: Did not proceed according to logic. fixed by nesting properly
"""
"""
def diagnose_car():
silent = raw_input("Is the car silent when you turn the key? ")
#this begins the line of questions on the left side of the question tree
if silent == 'Y':
corroded = raw_input("Are the battery terminals corroded?")
if corroded == 'Y':
print "Clean terminals and try starting again."
elif corroded == 'N':
print "Replace cables and try again."
elif silent == 'N':
#this begins the line of questions on the right side of the question tree
clicking = raw_input("Does the car make a clicking noise?")
if clicking == 'Y':
print "Replace the battery."
elif clicking == 'N':
crank = raw_input("Does the car crank up but fails to start?")
if crank == 'Y':
print "Check spark plug connections."
elif crank == 'N':
start_and_die = raw_input("Does the engine start and then die?")
if start_and_die == 'Y':
fuel_injection = raw_input("Does your car have fuel injection?")
if fuel_injection == 'N':
print "Check to ensure the choke is opening and closing."
elif fuel_injection == 'Y':
print "Get it in for service."
elif start_and_die == 'N':
print "Engine is not getting enough fuel. Clean fuel pump."
diagnose_car()
|
22i/minecraft-voxel-blender-models
|
models/extra/blender-scripting/lib/iron_golem.py
|
Python
|
gpl-3.0
| 1,325
| 0.018113
|
import bpy
import os
# join them together ctrl+j
bpy.ops.object.join()
def get_override(area_type, region_type):
for area in bpy.context.screen.areas:
if area.type == area_type:
for region in area.regions:
if r
|
egion.type == region_type:
override = {'area': area, 'region': region}
return override
#error message if the area o
|
r region wasn't found
raise RuntimeError("Wasn't able to find", region_type," in area ", area_type,
"\n Make sure it's open while executing script.")
#we need to override the context of our operator
override = get_override( 'VIEW_3D', 'WINDOW' )
#rotate about the X-axis by 45 degrees
bpy.ops.transform.rotate(override, axis=(0,0,1))
bpy.ops.transform.rotate(override, axis=(0,0,1))
blend_file_path = bpy.data.filepath
directory = os.path.dirname(blend_file_path)
#target_file = os.path.join(directory, 'agent.obj')
#target_file = os.path.join(directory, 'exported/agent.b3d')
target_file = os.path.join(directory, 'iron_golem.b3d')
#bpy.ops.export_scene.obj(filepath=target_file)
bpy.ops.screen.b3d_export(filepath=target_file)
#bpy.ops.export_scene.obj()
#bpy.ops.screen.b3d_export()
# exits blender
bpy.ops.wm.quit_blender()
|
PeerioTechnologies/peerio-client-mobile
|
tests/test.py
|
Python
|
gpl-3.0
| 414
| 0.002415
|
impor
|
t time
import appium
import selenium
from common.helper import *
from common.processes import *
capabilities = {
"androidDeviceSocket": "com.peerio_devtools_remote",
"chromeOptions": {
'androidPackage': 'com.peerio',
'androidActivity': '.MainActivity',
"androidDeviceSocket": "co
|
m.peerio_devtools_remote"
}
}
restartAppium()
restartChromedriver()
test_connect_android()
|
joushou/stackable
|
utils.py
|
Python
|
mit
| 3,175
| 0.033701
|
#
# Utility stackables
#
from __future__ import print_function, absolute_import, unicode_literals, division
from stackable.stackable import Stackable, StackableError
import json, pickle
from time import sleep
from threading import Thread, Event
from datetime import datetime, timedelta
class StackablePickler(Stackable):
'Pickle codec'
def process_input(self, data):
return pickle.loads(data)
def process_output(self, data):
return pickle.dumps(data, protocol=2)
class StackableJSON(Stackable):
'JSON codec'
def process_input(self, data):
try:
return json.loads(data)
except ValueError:
return None
def process_output(self, data):
return json.dumps(data)
class StackableWriter(Stackable):
'Reads and writes from/to a file'
def __init__(self, filename):
super(StackableWriter, self).__init__()
self.fd = open(filename, "w")
def process_input(self, data):
self.fd.write(data)
self.fd.flush()
def process_output(self, data):
return data
# def poll(self):
# return self.fd.read()
class StackablePrinter(Stackable):
'''Prints all input and output, and returns it unmodified.
Useful for quick debugging of Stackables.'''
def __init__(self, printer=print):
'Takes a printing function as argument - defaults to print'
self.printer = printer
super(StackablePrinter, self).__init__()
def process_input(self, data):
self.printer(data)
return data
def process_output(self, data):
self.printer(data)
return data
import sys
class StackableStdout(Stackable):
def process_input(self, data):
sys.stdout.write(data)
return data
def process_output(self, data):
return data
from collections import deque
class StackableInjector(Stackable):
def __init__(self):
super(StackableInjector, self).__init__()
self.in_buf = deque()
self.out_buf = deque()
def push(self, data):
self.in_buf.append(data)
def poll(self):
if len(self.in_buf):
return self.in_buf.popleft()
return None
def process_output(self, data):
self.out_buf.append(data)
return data
class StackablePoker(Stackable):
def __init__(self, interval=20, send=True, ping_string='__stack_ping', pong_string='__stack_pong'):
super(StackablePoker, self).__init__()
self.ping_string = ping_string.encode('utf-8')
self.pong_string = pong_string.encode('utf-8')
self.w = Event()
self.interval = interval
self.send = send
if self.send:
self.reset()
def _detach(self):
super(StackablePoker, self)._detach()
self.w.set()
def reset(self):
s
|
elf.timestamp = datetime.now()
def ping():
self.w.wait(self.interval)
try:
self._feed(s
|
elf.ping_string)
except:
pass
x = Thread(target=ping)
x.daemon = True
x.start()
def process_output(self, data):
if self.send and (datetime.now() - self.timestamp) > timedelta(seconds=30):
raise StackableError('Pong not received')
return data
def process_input(self, data):
if data == self.pong_string:
self.reset()
return None
elif data == self.ping_string:
self._feed(self.pong_string)
return None
elif self.send and (datetime.now() - self.timestamp) > timedelta(seconds=30):
raise StackableError('Pong not received')
return data
|
GarrettArm/TheDjangoBook
|
mysite_project/milage/urls.py
|
Python
|
gpl-3.0
| 452
| 0.002212
|
from django.urls import path, include
from .routers import router
from . import views
app_name = "milage"
urlpatterns = [
path("api/", include(router.urls), name="api_router"),
path("class-based/", views.ClassBas
|
edView.as_view(), name="class_based_drf"),
path(
"class-based-detail/<int:pk>",
views.ClassBasedDetailView.as_view(),
name="class_detail",
|
),
path("", views.BaseView.as_view(), name="index"),
]
|
Anber/django-extended-messages
|
setup.py
|
Python
|
bsd-3-clause
| 1,190
| 0.017647
|
import os, extended_messages
from setuptools import setup, find_packages
if extended_messages.VERSION[-1] == 'final':
CLASSIFIERS = ['Development Status :: 5 - Stable']
elif 'beta' in extended_messages.VERSION[-1]:
CLASSIFIERS = ['Development Status :: 4 - Beta']
else:
CLASSIFIERS = ['Development Status :: 3 - Alpha']
CLASSIFIERS += [
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
setup(
author = extended_messages.__maintainer__,
author_email = extended_messages.__email__,
name = 'django-extended-messages',
version = extended_messages.__version__,
description = 'Extended version of django.contrib.messages',
long_description = open(os.path.join(os.path.dirname(__file__), 'README.md')).read(),
url = 'http://github.com/Anber/dja
|
ngo-extended-messages/tree/master',
license = 'BSD License',
platforms=['OS Independent'],
classifiers = CLASSIFIERS,
requires=[
|
'django (>1.2.0)',
'simplejson',
],
packages=find_packages(),
zip_safe=False
)
|
kafan15536900/ADfree-Player-Offline
|
onServer/ruletool/oconfiglist.py
|
Python
|
gpl-3.0
| 562
| 0
|
[
{
"name": "syoukuloader",
"status": "0"
},
{
"name": "syoukuplayer",
"status": "0"
},
{
"name": "sku6",
|
"status": "0"
},
{
"name": "studou",
"status": "0"
},
{
"name": "sletv",
"status": "0"
},
{
"name": "siqiyi",
"status": "0"
},
{
"name": "spps",
"status": "0"
},
{
"name
|
": "ssohu",
"status": "0"
},
{
"name": "ssohu_live",
"status": "0"
}
]
|
vyos-legacy/vyconfd
|
vyconf/utils/__init__.py
|
Python
|
lgpl-2.1
| 35
| 0
|
from
|
.item_status im
|
port * # noqa
|
belokop/indico_bare
|
indico/modules/groups/forms.py
|
Python
|
gpl-3.0
| 1,904
| 0.000525
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licens
|
es/>.
from __future__ import unicode_literals
from wtforms.fields import StringField, BooleanField, SelectField
from wtforms.validators import DataRequired, ValidationError
from indico.core.db import db
from indico.modules.groups.models.groups import LocalGroup
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm
from indico.web.forms.fields import Prin
|
cipalListField
class SearchForm(IndicoForm):
provider = SelectField(_('Provider'))
name = StringField(_('Group name'), [DataRequired()])
exact = BooleanField(_('Exact match'))
class EditGroupForm(IndicoForm):
name = StringField(_('Group name'), [DataRequired()])
members = PrincipalListField(_('Group members'))
def __init__(self, *args, **kwargs):
self.group = kwargs.pop('group', None)
super(EditGroupForm, self).__init__(*args, **kwargs)
def validate_name(self, field):
query = LocalGroup.find(db.func.lower(LocalGroup.name) == field.data.lower())
if self.group:
query = query.filter(LocalGroup.id != self.group.id)
if query.count():
raise ValidationError(_('A group with this name already exists.'))
|
jballanc/openmicroscopy
|
components/tools/OmeroWeb/omeroweb/webclient/forms.py
|
Python
|
gpl-2.0
| 111,021
| 0.010998
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
#
# Copyright (c) 2008-2011 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>, 2008.
#
# Version: 1.0
#
import datetime
import time
import logging
import traceback
from django.conf import settings
from django import forms
from django.forms.widgets import Textarea
from django.forms.widgets import HiddenInput
from django.core.urlresolvers import reverse
from omeroweb.custom_forms import NonASCIIForm
from custom_forms import UrlField, MetadataModelChoiceField, \
AnnotationModelMultipleChoiceField, \
ObjectModelMultipleChoiceField
from omeroweb.webadmin.custom_forms import ExperimenterModelChoiceField, \
ExperimenterModelMultipleChoiceField, \
GroupModelMultipleChoiceField, GroupModelChoiceField
logger
|
= logging.getLogger(__name__)
##########################################
|
########################
# Static values
# TODO: change to reverse
help_button = "%swebgateway/img/help16.png" % settings.STATIC_URL
help_wiki = '<span id="markup" title="Markups - <small>If you\'d like to include URL please type:<br/><b>http://www.openmicroscopy.org.uk/</b></small>"><img src="%s" /></span>' % help_button
help_wiki_c = '<span id="markup_c" title="Markups - <small>If you\'d like to include URL please type:<br/><b>http://www.openmicroscopy.org.uk/</b></small>"><img src="%s" /></span>' % help_button
help_enable = '<span id="enable" title="Enable/Disable - <small>This option allows the owner to keep the access control of the share.</small>"><img src="%s" /></span>' % help_button
help_expire = '<span id="expire" title="Expire date - <small>This date defines when share will stop being available. Date format: YY-MM-DD.</small>"><img src="%s" /></span>' % help_button
#################################################################
# Non-model Form
class GlobalSearchForm(NonASCIIForm):
search_query = forms.CharField(widget=forms.TextInput(attrs={'size':25}))
class ShareForm(NonASCIIForm):
def __init__(self, *args, **kwargs):
super(ShareForm, self).__init__(*args, **kwargs)
try:
if kwargs['initial']['shareMembers']: pass
self.fields['members'] = ExperimenterModelMultipleChoiceField(queryset=kwargs['initial']['experimenters'], initial=kwargs['initial']['shareMembers'], widget=forms.SelectMultiple(attrs={'size':5}))
except:
self.fields['members'] = ExperimenterModelMultipleChoiceField(queryset=kwargs['initial']['experimenters'], widget=forms.SelectMultiple(attrs={'size':5}))
self.fields.keyOrder = ['message', 'expiration', 'enable', 'members']#, 'guests']
message = forms.CharField(widget=forms.Textarea(attrs={'rows': 7, 'cols': 39}), help_text=help_wiki_c)
expiration = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':20}), label="Expire date", help_text=help_expire, required=False)
enable = forms.CharField(widget=forms.CheckboxInput(attrs={'size':1}), required=False, help_text=help_enable)
#guests = MultiEmailField(required=False, widget=forms.TextInput(attrs={'size':75}))
def clean_expiration(self):
if self.cleaned_data['expiration'] is not None and len(self.cleaned_data['expiration']) < 1:
return None
if self.cleaned_data['expiration'] is not None:
d = str(self.cleaned_data['expiration']).rsplit("-")
try:
date = datetime.datetime.strptime(("%s-%s-%s" % (d[0],d[1],d[2])), "%Y-%m-%d")
except:
raise forms.ValidationError('Date is in the wrong format. YY-MM-DD')
if time.mktime(date.timetuple()) <= time.time():
raise forms.ValidationError('Expire date must be in the future.')
return self.cleaned_data['expiration']
class BasketShareForm(ShareForm):
def __init__(self, *args, **kwargs):
super(BasketShareForm, self).__init__(*args, **kwargs)
try:
self.fields['image'] = GroupModelMultipleChoiceField(queryset=kwargs['initial']['images'], initial=kwargs['initial']['selected'], widget=forms.SelectMultiple(attrs={'size':10}))
except:
self.fields['image'] = GroupModelMultipleChoiceField(queryset=kwargs['initial']['images'], widget=forms.SelectMultiple(attrs={'size':10}))
class ContainerForm(NonASCIIForm):
name = forms.CharField(max_length=250, widget=forms.TextInput(attrs={'size':45}))
description = forms.CharField(widget=forms.Textarea(attrs={'rows': 2, 'cols': 49}), required=False, help_text=help_wiki)
class ContainerNameForm(NonASCIIForm):
name = forms.CharField(max_length=250, widget=forms.TextInput(attrs={'size':45}))
class ContainerDescriptionForm(NonASCIIForm):
description = forms.CharField(widget=forms.Textarea(attrs={'rows': 3, 'cols': 39}), required=False)
class BaseAnnotationForm(NonASCIIForm):
"""
This is the superclass of the various forms used for annotating single or multiple objects.
All these forms use hidden fields to specify the object(s) currently being annotated.
"""
def __init__(self, *args, **kwargs):
super(BaseAnnotationForm, self).__init__(*args, **kwargs)
images = 'images' in kwargs['initial'] and kwargs['initial']['images'] or list()
if len(images) > 0:
try:
self.fields['image'] = ObjectModelMultipleChoiceField(queryset=images, initial=kwargs['initial']['selected']['images'], widget=forms.SelectMultiple(attrs={'size':10}), required=False)
except:
self.fields['image'] = ObjectModelMultipleChoiceField(queryset=images, widget=forms.SelectMultiple(attrs={'size':10}), required=False)
datasets = 'datasets' in kwargs['initial'] and kwargs['initial']['datasets'] or list()
if len(datasets) > 0:
try:
self.fields['dataset'] = ObjectModelMultipleChoiceField(queryset=datasets, initial=kwargs['initial']['selected']['datasets'], widget=forms.SelectMultiple(attrs={'size':10}), required=False)
except:
self.fields['dataset'] = ObjectModelMultipleChoiceField(queryset=datasets, widget=forms.SelectMultiple(attrs={'size':10}), required=False)
projects = 'projects' in kwargs['initial'] and kwargs['initial']['projects'] or list()
if len(projects) > 0:
try:
self.fields['project'] = ObjectModelMultipleChoiceField(queryset=projects, initial=kwargs['initial']['selected']['projects'], widget=forms.SelectMultiple(attrs={'size':10}), required=False)
except:
self.fields['project'] = ObjectModelMultipleChoiceField(queryset=projects, widget=forms.SelectMultiple(attrs={'size':10}), required=False)
screens = 'screens' in kwargs['initial'] and kwargs['initial']['screens'] or list()
if len(screens) > 0:
try:
self.fields['screen'] = ObjectModelMultipleChoiceField(queryset=screens, initial=kwargs['initial']['selected']['screens'], widget=forms.SelectMultiple(attrs={'size':10}), required=False)
except:
self.fields['screen'] = ObjectModelMultipleChoiceField(queryset=screens, widget=forms.SelectMultiple(attrs={'size':10}), required=False)
plates = 'plates' in kwargs['initial
|
shashankjagannath/shashankfoo
|
genresigpath.py
|
Python
|
cc0-1.0
| 1,485
| 0.032997
|
import time
import glob
import os
import types
import socket
def read_paths ():
fulllist = []
for file in glob.glo
|
b("*96*messages"):
print 'reading ' + file
fullfile = (open(file).read().splitlines())
for x in fullfile:
if 'RPD_MPLS_LSP_CHANGE'in x and 'Sep 17' in x:
if 'flag' in x:
fulllist.append(x.split())
print 'done reading'
return fulllist
newpaths=read_paths()
dnsdict = {}
def convert_paths (newpaths):
convertedpaths = []
dnsfile = (open("/home/mkasten/c
|
onfigs/addresses.txt").read().splitlines())
for x in dnsfile:
if '96c'in x or 'ibr' in x or '96l' in x or '20lsr' in x :
dnsdict[x.split(":")[0]] = x.split(":")[1] +" " + x.split(":")[2]
for x in newpaths:
z = [x[8],x[12]]
for y in x:
if 'flag=0x2' in y:
rest = y.split('(',1)[0]
z.append(dnsdict[rest])
if rest not in dnsdict:
try :
a=socket.gethostbyaddr(rest)[0]
except :
print "Unknown : " + rest
a=rest
dnsdict[rest] = a.split('.',1)[0]
dnsdict[rest] = a
z.append(a)
z.append(a.split('.',1)[0])
a='None'
convertedpaths.append(z)
print 'done converting'
return convertedpaths
listofresignals = convert_paths(newpaths)
filename = 'resignallists'
outputfile = open(filename,'w')
print 'starting write'
for resig in listofresignals:
outputfile.write( ' '.join(resig) +'\n')
|
SunDwarf/Pyte
|
pyte/tokens_33.py
|
Python
|
mit
| 2,143
| 0.000467
|
# _V!(3, 3)
# This file was automatically generated by `dump_dis.py`.
# This file is designed for Python (3, 3).
import sys
# Check Python version
if sys.version_info[0:2] != (3, 3):
raise SystemError("Inappropriate Python version for these bytecode symbols.")
# Begin tokens. These are ordered.
POP_TOP = 1
ROT_TWO = 2
ROT_THREE = 3
DUP_TOP = 4
DUP_TOP_TWO = 5
NOP = 9
UNARY_POSITIVE = 10
UNARY_NEGATIVE = 11
UNARY_NOT = 12
UNARY_INVERT = 15
BINARY_POWER = 19
BINARY_MULTIPLY = 20
BINARY_MODULO = 22
BINARY_ADD = 23
BINARY_SUBTRACT = 24
BINARY_SUBSCR = 25
BINARY_FLOOR_DIVIDE = 26
BINARY_TRUE_DIVIDE = 27
INPLACE_FLOOR_DIVIDE = 28
INPLACE_TRUE_DIVIDE = 29
STORE_MAP = 54
INPLACE_ADD = 55
INPLACE_SUBTRACT = 56
INPLACE_MULTIPLY = 57
INPLACE_MODULO = 59
STORE_SUBSCR = 60
DELETE_SUBSCR = 61
BINARY_LSHIFT = 62
BINARY_RSHIFT = 63
BINARY_AND = 64
BINARY_XOR = 65
BINARY_OR = 66
INPLACE_POWER = 67
GET_ITER = 68
STORE_LOCALS = 69
PRINT_EXPR = 70
LOAD_BUILD_CLASS = 71
YIELD_FROM = 72
INPLACE_LSHIFT = 75
INPLACE_RSHIFT = 76
INPLACE_AND = 77
INPLACE_XOR = 78
INPLACE_OR = 79
BREAK_LOOP = 80
WITH_CLEANUP = 81
RETURN_VALUE = 83
IMPORT_STAR = 84
YIELD_VALUE = 86
POP_BLOCK = 87
END_FINALLY = 88
POP_EXCEPT = 89
STORE_NAME = 90
DE
|
LETE_NAME = 91
UNPACK_SEQUENCE = 92
FOR_ITER = 93
UNPACK_EX = 94
STORE_ATTR = 95
DELETE_ATTR = 96
STORE_GLOBAL = 97
DELETE_GLOBAL = 98
LOAD_CONST = 100
LOAD_NAME = 101
BUILD_TUPLE = 102
BUILD_LIST = 103
|
BUILD_SET = 104
BUILD_MAP = 105
LOAD_ATTR = 106
COMPARE_OP = 107
IMPORT_NAME = 108
IMPORT_FROM = 109
JUMP_FORWARD = 110
JUMP_IF_FALSE_OR_POP = 111
JUMP_IF_TRUE_OR_POP = 112
JUMP_ABSOLUTE = 113
POP_JUMP_IF_FALSE = 114
POP_JUMP_IF_TRUE = 115
LOAD_GLOBAL = 116
CONTINUE_LOOP = 119
SETUP_LOOP = 120
SETUP_EXCEPT = 121
SETUP_FINALLY = 122
LOAD_FAST = 124
STORE_FAST = 125
DELETE_FAST = 126
RAISE_VARARGS = 130
CALL_FUNCTION = 131
MAKE_FUNCTION = 132
BUILD_SLICE = 133
MAKE_CLOSURE = 134
LOAD_CLOSURE = 135
LOAD_DEREF = 136
STORE_DEREF = 137
DELETE_DEREF = 138
CALL_FUNCTION_VAR = 140
CALL_FUNCTION_KW = 141
CALL_FUNCTION_VAR_KW = 142
SETUP_WITH = 143
EXTENDED_ARG = 144
LIST_APPEND = 145
SET_ADD = 146
MAP_ADD = 147
|
EmanueleCannizzaro/scons
|
src/engine/SCons/Tool/applelink.py
|
Python
|
mit
| 2,828
| 0.003182
|
"""SCons.Tool.applelink
Tool-specific initialization for the Apple gnu-like linker.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/applelink.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
import SCons.Util
# Even though the Mac is based on the GNU toolchain, it doesn't understand
# the -rpath option, so we use the "link" tool instead of "gnulink".
import link
def generate(env):
"""Add Builders and construction variables for applelink to an
Environment."""
link.generate(env)
env['FRAMEWORKPATHPREFIX'] = '-F'
env['_FRAMEWORKPATH'] = '${_concat(FRAMEWORKPATHPREFIX, FRAMEWORKPATH, "", __env__)}'
env['_FRAMEWORKS'] = '${_concat("-framework ", FRAMEWORKS, "", __env__)}'
env['LINKCOM'] = env['LINKCOM'] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -dynamiclib')
env['SHLINKCOM'] = env['SHLINKCOM'] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAG
|
S'
# override the default for loadable modules, which are different
# on OS X than dynamic shared libs. echoing what XCode does for
# pre/suffixes:
env['LDMODULEPREFIX'] = ''
env['LDMODULESUFFIX'] = ''
env['LDMODULEFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -bundle')
env['LDMODULECOM'] = '$LDMODULE -o ${TARGET} $LDMODULEFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS'
def exists(
|
env):
return env['PLATFORM'] == 'darwin'
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
kzlin129/practice-typing
|
lib/python2.7/site-packages/profilehooks.py
|
Python
|
apache-2.0
| 26,416
| 0.000341
|
"""
Profiling hooks
This module contains a couple of decorators (`profile` and `coverage`) that
can be used to wrap functions and/or methods to produce profiles and line
coverage reports. There's a third convenient decorator (`timecall`) that
measures the duration of function execution without the extra profiling
overhead.
Usage example (Python 2.4 or newer)::
from profilehooks import profile, coverage
@profile # or @coverage
def fn(n):
if n < 2: return 1
else: return n * fn(n-1)
print(fn(42))
Or without imports, with some hack
$ python -m profilehooks yourmodule
@profile # or @coverage
def fn(n):
if n < 2: return 1
else: return n * fn(n-1)
print(fn(42))
Usage example (Python 2.3 or older)::
from profilehooks import profile, coverage
def fn(n):
if n < 2: return 1
else: return n * fn(n-1)
# Now wrap that function in a decorator
fn = profile(fn) # or coverage(fn)
print fn(42)
Reports for all thusly decorated functions will be printed to sys.stdout
on program termination. You can alternatively request for immediate
reports for each call by passing immediate=True to the profile decorator.
There's also a @timecall decorator for printing the time to sys.stderr
every time a function is called, when you just want to get a rough measure
instead of a detailed (but costly) profile.
Caveats
A thread on python-dev convinced me that hotshot produces bogus numbers.
See http://mail.python.org/pipermail/python-dev/2005-November/058264.html
I don't know what will happen if a decorated function will try to call
another decorated function. All decorators probably need to explicitly
support nested profiling (currently TraceFuncCoverage is the only one
that supports this, while HotShotFuncProfile has support for recursive
functions.)
Profiling with hotshot creates temporary files (*.prof for profiling,
*.cprof for coverage) in the current directory. These files are not
cleaned up. Exception: when you specify a filename to the profile
decorator (to store the pstats.Stats object for later inspection),
the temporary file will be the filename you specified with '.raw'
appended at the end.
Coverage analysis with hotshot seems to miss some executions resulting
in lower line counts and some lines errorneously marked as never
executed. For this reason coverage analysis now uses trace.py which is
slower, but more accurate.
Copyright (c) 2004--2014 Marius Gedminas <marius@pov.lt>
Copyright (c) 2007 Hanno Schlichting
Copyright (c) 2008 Florian Schulze
Released under the MIT licence since December 2006:
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
(Previously it was distributed under the GNU General Public Licence.)
"""
__author__ = "Marius Gedminas <marius@gedmin.as>"
__copyright__ = "Copyright 2004-2015 Marius Gedminas and contributors"
__license__ = "MIT"
__version__ = "1.8.0"
__date__ = "2015-03-25"
import atexit
import inspect
import sys
import re
# For profiling
from profile import Profile
import pstats
# For hotshot profiling (inaccurate!)
try:
import hotshot
import hotshot.stats
except ImportError:
hotshot = None
# For trace.py coverage
import trace
# For hotshot coverage (inaccurate!; uses undocumented APIs; might break)
if hotshot is not None:
import _hotshot
import hotshot.log
# For cProfile profiling (best)
try:
import cProfile
except ImportError:
cProfile = None
# For timecall
import time
# registry of available profilers
AVAILABLE_PROFILERS = {}
__all__ = ['coverage', 'coverage_with_hotshot', 'profile', 'timecall']
def profile(fn=None, skip=0, filename=None, immediate=False, dirs=False,
sort=None, entries=40,
profiler=('cProfile', 'profile', 'hotshot'),
stdout=True):
"""Mark `fn` for profiling.
If `skip` is > 0, first `skip` calls to `fn` will not be profiled.
If `immediate` is False, profiling results will be printed to
sys.stdout on program termination. Otherwise results will be printed
after each call. (If you don't want this, set stdout=False and specify a
`filename` to store profile data.)
If `dirs` is False only the name of the file will be printed.
Otherwise the full path is used.
`sort` can be a list of sort keys (defaulting to ['cumulative',
'time', 'calls']). The following ones are recognized::
'calls' -- call count
'cumulative' -- cumulative time
'file' -- file name
'line' -- line number
'module' -- file name
'name' -- function name
'nfl' -- name/file/line
'pcalls' -- call count
'stdname' -- standard name
'time' -- internal time
`entries` limits the output to the first N entries.
`profiler` can be used to select the preferred profiler, or specify a
sequence of them, in order of preference. The default is ('cProfile'.
'profile', 'hotshot').
If `filename` is specified, the profile stats will be stored in the
named file. You can load them with pstats.Stats(filename) or use a
visualization tool like RunSnakeRun.
Usage::
def fn(...):
...
fn = profile(fn, skip=1)
If you are using Python 2.4, you should be able to use the decorator
syntax::
@profile(skip=3)
def fn(...):
...
or just ::
@profile
def fn(...):
...
"""
if fn is None: # @profile() syntax -- we are a decorator maker
def decorator(fn):
return profile(fn, skip=skip, filename=filename,
immediate=immediate, dirs=dirs,
sort=sort, entries=entries,
profiler=profiler, stdout=stdout)
return decorator
# @profile syntax -- we are a decorator.
if isinstance(profiler, str):
profiler = [profiler]
for p in profiler:
if p in AVAILABLE_PROFILERS:
profiler_class = AVAILABLE_PROFILERS[p]
break
else:
raise ValueError('only these profilers are available: %s'
% ', '.join(sorted(AVAILABLE_PROFILERS)))
fp = profiler_class(fn, skip=skip, filename=filename,
immediate=immediate, dirs=dirs,
sort=sort, entries=entries, stdout=stdout)
# We cannot return fp or fp.__call__ directly as that would break method
# definitions, instead we need to return a plain function.
def new_fn(*args, **kw):
return fp(*args, **kw)
new_fn.__doc__ = fn.__doc__
new_fn.__na
|
me__ = fn.__name__
new_fn.__dict__ = fn.__dict__
new_fn.__module__ = fn.__module__
return new_fn
def coverage(fn):
"""Mark `fn` for line coverage analysis.
Results will be printed to sys.stdout on progr
|
am termination.
Usage::
def fn(...):
...
fn = coverage(fn)
If you are using Python 2.4, you should be able to use the decorator
syntax::
|
MingfeiPan/leetcode
|
array/74.py
|
Python
|
apache-2.0
| 655
| 0.003396
|
#这个题面试时遇到过, 本身matrix是有特点的, 如果从左下角开始搜索 就可以看到规律
class Solution:
def searchMatrix(self, matrix, target):
"""
:type matrix: List[List[int]]
:type target: int
:rtype: bool
"""
i = len(matrix) - 1
|
j = 0
flag = 0
if not matrix or not matr
|
ix[0]:
return False
while i >= 0 and j < len(matrix[0]):
if matrix[i][j] == target:
return True
elif matrix[i][j] < target:
j += 1
else:
i -= 1
return False
|
jaufrec/whatnext
|
clog/migrations/0011_auto_20160528_0055.py
|
Python
|
gpl-3.0
| 419
| 0
|
#
|
-*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-05-28 00:55
from __future__
|
import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('clog', '0010_auto_20160410_2149'),
]
operations = [
migrations.AlterUniqueTogether(
name='variable',
unique_together=set([('user', 'name')]),
),
]
|
greut/invenio-kwalitee
|
kwalitee/cli/prepare.py
|
Python
|
gpl-2.0
| 6,883
| 0
|
# -*- coding: utf-8 -*-
#
# This file is part of kwalitee
# Copyright (C) 2014, 2015 CERN.
#
# kwalitee is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# kwalitee is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kwalitee; if not, write to the Free Software Foundatio
|
n,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Int
|
ergovernmental Organization
# or submit itself to any jurisdiction.
"""Prepare release news from git log.
Prepares release news from git log messages, breaking release news
into (1) sections (e.g. Security fixes, detected from commit labels)
and (2) modules (e.g. search, detected from commit log headlines).
"""
from __future__ import absolute_import, print_function, unicode_literals
import itertools
import re
import sys
import textwrap
from collections import OrderedDict
from flask import current_app
from flask_script import Manager
from .check import _git_commits, _pygit2_commits
manager = Manager(usage=__doc__)
def analyse_body_paragraph(body_paragraph, labels=None):
"""Analyse commit body paragraph and return (label, message).
>>> analyse_body_paragraph('* BETTER Foo and bar.',
>>> ... {'BETTER': 'Improvements'})
('BETTER', 'Foo and bar.')
>>> analyse_body_paragraph('* Foo and bar.')
(None, 'Foo and bar.')
>>> analyse_body_paragraph('Foo and bar.')
(None, None)
"""
# try to find leading label first:
for label, dummy in labels:
if body_paragraph.startswith('* ' + label):
return (label, body_paragraph[len(label) + 3:].replace('\n ',
' '))
# no conformed leading label found; do we have leading asterisk?
if body_paragraph.startswith('* '):
return (None, body_paragraph[2:].replace('\n ', ' '))
# no leading asterisk found; ignore this paragraph silently:
return (None, None)
def remove_ticket_directives(message):
"""Remove ticket directives like "(closes #123).
>>> remove_ticket_directives('(closes #123)')
'(#123)'
>>> remove_ticket_directives('(foo #123)')
'(foo #123)'
"""
if message:
message = re.sub(r'closes #', '#', message)
message = re.sub(r'addresses #', '#', message)
message = re.sub(r'references #', '#', message)
return message
def amended_commits(commits):
"""Return those git commit sha1s that have been amended later."""
# which SHA1 are declared as amended later?
amended_sha1s = []
for message in commits.values():
amended_sha1s.extend(re.findall(r'AMENDS\s([0-f]+)', message))
return amended_sha1s
def enrich_git_log_dict(messages, labels):
"""Enrich git log with related information on tickets."""
for commit_sha1, message in messages.items():
# detect module and ticket numbers for each commit:
component = None
title = message.split('\n')[0]
try:
component, title = title.split(":", 1)
component = component.strip()
except ValueError:
pass # noqa
paragraphs = [analyse_body_paragraph(p, labels)
for p in message.split('\n\n')]
yield {
'sha1': commit_sha1,
'component': component,
'title': title.strip(),
'tickets': re.findall(r'\s(#\d+)', message),
'paragraphs': [
(label, remove_ticket_directives(message))
for label, message in paragraphs
],
}
@manager.option('repository', default='.', nargs='?', help='repository path')
@manager.option('commit', metavar='<sha or branch>', nargs='?',
default='HEAD', help='an integer for the accumulator')
@manager.option('-c', '--components', default=False, action="store_true",
help='group components', dest='group_components')
def release(commit='HEAD', repository='.', group_components=False):
"""Generate release notes."""
from ..kwalitee import get_options
from ..hooks import _read_local_kwalitee_configuration
options = get_options(current_app.config)
options.update(_read_local_kwalitee_configuration(directory=repository))
try:
sha = 'oid'
commits = _pygit2_commits(commit, repository)
except ImportError:
try:
sha = 'hexsha'
commits = _git_commits(commit, repository)
except ImportError:
print('To use this feature, please install pygit2. GitPython will '
'also work but is not recommended (python <= 2.7 only).',
file=sys.stderr)
return 2
messages = OrderedDict([(getattr(c, sha), c.message) for c in commits])
for commit_sha1 in amended_commits(messages):
if commit_sha1 in messages:
del messages[commit_sha1]
full_messages = list(
enrich_git_log_dict(messages, options.get('commit_msg_labels'))
)
indent = ' ' if group_components else ''
wrapper = textwrap.TextWrapper(
width=70,
initial_indent=indent + '- ',
subsequent_indent=indent + ' ',
)
for label, section in options.get('commit_msg_labels'):
if section is None:
continue
bullets = []
for commit in full_messages:
bullets += [
{'text': bullet, 'component': commit['component']}
for lbl, bullet in commit['paragraphs']
if lbl == label and bullet is not None
]
if len(bullets) > 0:
print(section)
print('-' * len(section))
print()
if group_components:
def key(cmt):
return cmt['component']
for component, bullets in itertools.groupby(
sorted(bullets, key=key), key):
bullets = list(bullets)
if len(bullets) > 0:
print('+ {}'.format(component))
print()
for bullet in bullets:
print(wrapper.fill(bullet['text']))
print()
else:
for bullet in bullets:
print(wrapper.fill(bullet['text']))
print()
return 0
|
stephenliu1989/HK_DataMiner
|
hkdataminer/template_matching/Select_angle/__init__.py
|
Python
|
apache-2.0
| 23
| 0.043478
|
from .cal_va
|
r_byPCA
|
*
|
globocom/GloboNetworkAPI-client-python
|
networkapiclient/EquipamentoAmbiente.py
|
Python
|
apache-2.0
| 6,040
| 0.002815
|
# -*- coding:utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from networkapiclient.GenericClient import GenericClient
from networkapiclient.utils import is_valid_int_param
from networkapiclient.exception import InvalidParameterError
class EquipamentoAmbiente(GenericClient):
def __init__(self, networkapi_url, user, password, user_ldap=None):
"""Class constructor receives parameters to connect to the networkAPI.
:param networkapi_url: URL to access the network API.
:param user: User for authentication.
:param password: Password for authentication.
"""
super(
EquipamentoAmbiente,
self).__init__(
networkapi_url,
user,
password,
user_ldap)
def inserir(self, id_equipment, id_environment, is_router=0):
"""Inserts a new Related Equipment with Environment and returns its identifier
:param id_equipment: Identifier of the Equipment. Integer value and greater than zero.
:param id_environment: Identifier of the Environment. Integer value and greater than zero.
:param is_router: Identifier of the Environment. Boolean value.
:return: Dictionary with the following structure:
::
{'equipamento_ambiente': {'id': < id_equipment_environment >}}
:raise InvalidParameterError: The identifier of Equipment or Environment is null and invalid.
:raise AmbienteNaoExisteError: Environment not registered.
:raise EquipamentoNaoExisteError: Equipment not registered.
:raise EquipamentoAmbienteError: Equipment is already associated with the Environment.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
equipment_environment_map = dict()
equipment_environment_map['id_equipamento'] = id_equipment
equipment_environment_map['id_ambiente'] = id_environment
equipment_environment_map['is_router'] = is_router
code, xml = self.submit(
{'equipamento_ambiente': equipment_environment_map}, 'POST', 'equipamentoambiente/')
return self.response(code, xml)
def remover(self, id_equipment, id_environment):
"""Remove Related Equipment with Environment from by the identifier.
:param id_equipment: Identifier of the Equipment. Integer value and greater than zero.
:param id_environment: Identifier of the Environment. Integer value and greater than zero.
:return: None
:raise InvalidParameterError: The identifier of Environment, Equipament is null and invalid.
:raise Equipament
|
oNotFoundError: Equipment not registered.
:raise EquipamentoAmbienteNaoExisteError: Environment not registered.
:raise VipIpError: IP-related equipment is being used for a request VIP.
:raise XMLError: Networkapi failed to generate the XML response.
:raise DataBaseError: Networkapi failed to access the database.
"""
if not is_valid_int_param(id_equipment):
raise InvalidParameterError(
u'The identifi
|
er of Equipment is invalid or was not informed.')
if not is_valid_int_param(id_environment):
raise InvalidParameterError(
u'The identifier of Environment is invalid or was not informed.')
url = 'equipment/' + \
str(id_equipment) + '/environment/' + str(id_environment) + '/'
code, xml = self.submit(None, 'DELETE', url)
return self.response(code, xml)
def update(self, id_equipment, id_environment, is_router):
"""Remove Related Equipment with Environment from by the identifier.
:param id_equipment: Identifier of the Equipment. Integer value and greater than zero.
:param id_environment: Identifier of the Environment. Integer value and greater than zero.
:param is_router: Identifier of the Environment. Boolean value.
:return: None
:raise InvalidParameterError: The identifier of Environment, Equipament is null and invalid.
:raise EquipamentoNotFoundError: Equipment not registered.
:raise EquipamentoAmbienteNaoExisteError: Environment not registered.
:raise VipIpError: IP-related equipment is being used for a request VIP.
:raise XMLError: Networkapi failed to generate the XML response.
:raise DataBaseError: Networkapi failed to access the database.
"""
if not is_valid_int_param(id_equipment):
raise InvalidParameterError(
u'The identifier of Equipment is invalid or was not informed.')
if not is_valid_int_param(id_environment):
raise InvalidParameterError(
u'The identifier of Environment is invalid or was not informed.')
equipment_environment_map = dict()
equipment_environment_map['id_equipamento'] = id_equipment
equipment_environment_map['id_ambiente'] = id_environment
equipment_environment_map['is_router'] = is_router
code, xml = self.submit(
{'equipamento_ambiente': equipment_environment_map}, 'PUT', 'equipamentoambiente/update/')
return self.response(code, xml)
|
fedora-conary/conary
|
conary_test/verifytest.py
|
Python
|
apache-2.0
| 12,956
| 0.015282
|
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import grp, os, pwd
from conary_test import recipes
from conary.local import database
from conary.cmds import verify
from conary.repository import changeset
from conary_test import rephelp
class VerifyTest(rephelp.RepositoryHelper):
def testDisplay(self):
userDict = {}
userDict['user'], userDict['group'] = self._getUserGroup()
self.resetRepository()
self.resetRoot()
(built, d) = self.buildRecipe(recipes.testRecipe1, "TestRecipe1")
pkgname, version = built[0][:2]
self.updatePkg(self.rootDir, 'testcase', version)
self.writeFile(self.rootDir + '/usr/bin/hello', 'newtext')
sb = os.stat(self.rootDir + '/usr/bin/hello')
# we need the time to change; conary ignores size changes on
# executables to allow it to handle prelink sanely
os.utime(self.rootDir + '/usr/bin/hello', (sb.st_mtime + 1,
sb.st_mtime + 1))
db = database.Database(self.rootDir, self.cfg.dbPath)
rc, str = self.captureOutput(verify.verify, ['testcase'], db, self.cfg)
# XXX verify that output is correct here...will have to ignore
# uid/gid information, as localcs expects everything to be owned
# by root. Can share parsing code with showchangesettest
rc, str2 = self.captureOutput(verify.verify, [], db, self.cfg, all=True)
assert(str == str2)
assert('testcase:runtime' in str)
assert('/usr/bin/hello' in str)
assert(' 7 ' in str)
assert(' 20 ' in str) # make sure original size of file is displayed
assert(' -rwxr-xr-x ' in str) # make sure original mode of file is
# display (Even though that wasn't changed)
rc, str = self.captureOutput(verify.verify, ['testcase:runtime'], db,
self.cfg, diffBinaries=True)
self.assertEquals(str,
'diff --git a/etc/changedconfig b/etc/changedconfig\n'
'old user root\n'
'new user %(user)s\n'
'old group root\n'
'new group %(group)s\n'
'diff --git a/etc/unchangedconfig b/etc/unchangedconfig\n'
'old user root\n'
'new user %(user)s\n'
'old group root\n'
'new group %(group)s\n'
'diff --git a/usr/share/changed b/usr/share/changed\n'
'old user root\n'
'new user %(user)s\n'
'old group root\n'
'new group %(group)s\n'
'diff --git a/usr/share/unchanged b/usr/share/unchanged\n'
'old user root\n'
'new user %(user)s\n'
'old group root\n'
'new group %(group)s\n'
'diff --git a/usr/bin/hello b/usr/bin/hello\n'
'old user root\n'
'new user %(user)s\n'
'old group root\n'
'new group %(group)s\n'
'GIT binary patch\n'
'literal 7\n'
'Oc$~{iEiXx}C;<Qr9Rm;m\n'
'\n' % userDict)
self.logFilter.add()
verify.verify(['unknownpkg'], db, self.cfg)
|
verify.verify(['unknownpkg=@rpl:linux'], db, self.cfg)
self.logFilter.remove()
self.logFilter.compare(('error: trove unknownpkg is not installed',
'error: version @rpl:linux of trove unknownpkg is not installed'))
def testVerifyWithSignatures(self):
# Make sure that verify works with troves that have
# missing components, which means that the collection's signature
# is no good...
|
self.addComponent('foo:runtime', '1.0', '',
['/foo'])
self.addComponent('foo:data', '1.0')
self.addCollection('foo', '1.0', [':runtime', ':data'])
self.updatePkg(['foo', 'foo:runtime'], recurse=False)
self.writeFile(self.rootDir + '/foo', 'newtext')
db = database.Database(self.rootDir, self.cfg.dbPath)
self.captureOutput(verify.verify, ['foo'], db, self.cfg)
def testVerifyRemovedFiles(self):
# CNY-950
self.addComponent('foo:runtime', '1.0', fileContents = ['/foo'])
self.updatePkg('foo:runtime')
self.removeFile(self.rootDir, '/foo')
db = database.Database(self.rootDir, self.cfg.dbPath)
s = self.captureOutput(verify.verify, ['foo:runtime'], db, self.cfg)
assert(not s[1])
@staticmethod
def _getUserGroup():
user = pwd.getpwuid(os.getuid()).pw_name
group = grp.getgrgid(os.getgid()).gr_name
return user, group
def testVerifyToFile(self):
db = database.Database(self.rootDir, self.cfg.dbPath)
os.chdir(self.workDir)
user, group = self._getUserGroup()
self.addComponent('foo:runtime', '1.0',
fileContents = [('/foo',
rephelp.RegularFile(owner = user,
group = group))])
self.updatePkg('foo:runtime')
s = verify.verify(['foo:runtime'], db, self.cfg,
changesetPath = 'foo.ccs')
cs = changeset.ChangeSetFromFile('foo.ccs')
assert(list(cs.iterNewTroveList()) == [])
f = open(self.rootDir + '/foo', "a")
f.write("mod")
f.close()
s = self.captureOutput(verify.verify, ['foo:runtime'], db, self.cfg,
changesetPath = 'foo.ccs')
assert(not s[1])
cs = changeset.ChangeSetFromFile('foo.ccs')
assert(list(cs.iterNewTroveList())[0].getName() == 'foo:runtime')
def testVerifyAll(self):
os.chdir(self.workDir)
self.addComponent('foo:runtime', '1.0', fileContents = ['/bin/b'])
self.addComponent('bar:lib', '1.0', fileContents = ['/lib/l'])
self.addCollection('foo', [ ':runtime' ])
self.addCollection('bar', [ ':lib' ])
db = database.Database(self.rootDir, self.cfg.dbPath)
self.updatePkg('foo')
self.updatePkg('bar')
verify.verify([], db, self.cfg, all = True, changesetPath = 'foo.ccs')
cs = changeset.ChangeSetFromFile('foo.ccs')
assert(sorted([ x.getName() for x in cs.iterNewTroveList() ]) ==
[ 'bar:lib', 'foo:runtime' ] )
def testHashCheck(self):
# by default, we trust the size/date timestamps
repos = self.openRepository()
db = database.Database(self.rootDir, self.cfg.dbPath)
os.chdir(self.workDir)
user, group = self._getUserGroup()
trv = self.addComponent('foo:runtime',
fileContents = [ ( '/a', rephelp.RegularFile(contents = '1234',
owner = user,
group = group)) ] )
fileInfo = trv.iterFileList().next()
self.updatePkg('foo:runtime')
f = open(self.rootDir + '/a', "w")
f.write('abcd')
f.close()
f = repos.getFileVersions([(fileInfo[0], fileInfo[2], fileInfo[3])])[0]
st = os.stat(self.rootDir + '/a')
os.utime(self.rootDir + '/a', (f.inode.mtime(), f.inode.mtime()))
s = self.captureOutput(verify.verify, ['foo:runtime'], db, self.cfg,
changesetPath = 'foo.ccs')
assert(not s[1])
verify.verify(['foo:runtime'], db, self.cfg, forceHashCheck = True,
changesetPath = 'foo.ccs')
cs = changeset.ChangeSetFromFile('foo.ccs')
assert(cs.files)
def testNe
|
A-Kokolis/thesis-ntua
|
scc_implementation/scc_countermeasures.py
|
Python
|
gpl-3.0
| 2,957
| 0.00372
|
import abc
import logging
from time import sleep, time
from subprocess import call, check_output
from config import sim_dump_location, safe_location, devel
import infoli_diagnostics
import sys
class countermeasure(object):
''' Countermeasure class '''
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def perform(self):
return
''' defines an ordering among the different countermeasures, based on MTTR '''
countermeasure_enum = {
'restartSimulation':0
}
def wait_for_cores(core_names, timeout):
''' Utility function that blocks until a set of cores is available
or until the timeout is reached
'''
if devel:
return True
t0 = time()
available_cores = 0
while available_cores < len(core_names):
status = check_output(['sccBoot', '-s'])
if status[-11:-8] == "All":
available_cores = 48
elif status[-10:-8] == "No":
available_cores = 0
else:
available_cores = int(status[-10:-8])
if time() - t0 > timeout:
logging.error("Timeout exceeded for %s cores", expected)
return False
sleep(10)
status = check_output(['sccBoot', '-s'])
print status
return True
class restartSimulation(countermeasure):
""" Restarts the simulation """
__name__ = 'restartSimulation'
def __init__(self, manager):
self.manager = manager
def perform(self):
logging.info("performing the Restart Simulation countermeasure")
print self.manager.checkpoints
if any(isinstance(x, infoli_diagnostics.infoliOutputDivergence) for x in self.manager.failed_diagnostics()): #infoli-specific
# check if the SDC detection diagnostic has failed, and use the SDC checkpoint
print sorted(self.manager.checkpoints)
checkpoint = max(self.manager.checkpoints)
else:
checkpoint = max(self.manager.checkpoints)
print("The mttr_values are:",self.manager.mttr_values)
print("Calling
|
dvfs: ")
self.manager.dvfs.dvfsOperation(checkpoint)
print "Restarting from step" + str(checkpoint)
logging.info("Restarting from step " + str(checkpoint))
with self.manager.lock:
# Copy safe checkpoints
#for i in range(self.manager.num_cores):
# call( ['cp', '-f', '-u', safe_location + str(checkpoint) + '/ckptFile%d.bin' %i, sim_dump_location])
# call( ['cp', '-f', '-u', safe_location + str(checkpoi
|
nt) + '/InferiorOlive_Output%d.txt' %i, sim_dump_location])
self.manager.rccerun([self.manager.restart_exec] + self.manager.exec_list[1:]) # use False as extra last argument to avoid piping stdout for diagnostics - useful for measurements
logging.info("Restart Simulation countermeasure completed")
return True
|
pkovac/evedustrial
|
eve/db.py
|
Python
|
mit
| 1,136
| 0.011444
|
from . xml import EVECACHEPATH
import time
from os import path
import urllib
import MySQLdb as mysql
from urllib2 import urlopen
class EveDb(object):
"""
This class is responsible for loading up an instance of the eve static
dump information. Without this, most functionality of this library will
not work. """
def __ini
|
t__(self, database, user, passwd, host="localhost"):
self.db = mysql.connect(host=host, user=user, passwd=passwd, db=database)
def get_item_row(self, id):
cur=self.db.cursor()
cols = ("typeID", "typeName", "description", "volume")
cur.execute("select "+ ",".join(cols) + " from invTypes where typeID = %s", (id,))
ro
|
w = cur.fetchone()
row = dict(zip(cols, row))
return row
def get_location_row(self, id):
return
def get_location_by_string(self, id):
return
def get_item_by_string(self, txt):
c = self.db.cursor()
c.execute("select typeName from invTypes where typeName GLOB '%s'", (txt,))
row = c.fetchone()
return row["typeName"]
game_db=EveDb("eve", "eve", "eve")
|
OCA/l10n-spain
|
l10n_es_aeat_mod347/tests/__init__.py
|
Python
|
agpl-3.0
| 104
| 0
|
# L
|
icense AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from . import test_l10n_es_a
|
eat_mod347
|
mmpagani/oq-hazardlib
|
openquake/hazardlib/gsim/edwards_fah_2013a.py
|
Python
|
agpl-3.0
| 9,481
| 0
|
# -*- coding: utf-8 -*-
# The Hazard Library
# Copyright (C) 2013-2014, GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Module exports
:class:`EdwardsFah2013Alpine10Bars`,
:class:`EdwardsFah2013Alpine20Bars`,
:class:`EdwardsFah2013Alpine30Bars`,
:class:`EdwardsFah2013Alpine50Bars`,
:class:`EdwardsFah2013Alpine60Bars`,
:class:`EdwardsFah2013Alpine75Bars`,
:class:`EdwardsFah2013Alpine90Bars`,
:class:`EdwardsFah2013Alpine120Bars`.
"""
from __future__ import division
import numpy as np
from scipy.constants import g
from openquake.hazardlib.gsim.base import GMPE
from openquake.hazardlib import const
from openquake.hazardlib.imt import PGV, PGA, SA
from openquake.hazardlib.gsim.edwards_fah_2013a_coeffs import (
COEFFS_ALPINE_60Bars,
COEFFS_ALPINE_10Bars,
COEFFS_ALPINE_20Bars,
COEFFS_ALPINE_30Bars,
COEFFS_ALPINE_50Bars,
COEFFS_ALPINE_75Bars,
COEFFS_ALPINE_90Bars,
COEFFS_ALPINE_120Bars
)
from openquake.hazardlib.gsim.utils_swiss_gmpe import (
_compute_phi_ss,
_compute_C1_term
)
class EdwardsFah2013Alpine10Bars(GMPE):
"""
This function implements the GMPE developed by Ben Edwars and Donath Fah
and published as "A Stochastic Ground-Motion Model for Switzerland"
Bulletin of the Seismological Society of America,
Vol. 103, No. 1, pp. 78–98, February 2013.
The GMPE was parametrized by Carlo Cauzzi to be implemented in OpenQuake.
This class implements the equations for 'Alpine' and 'Foreland - two
tectonic regionalizations defined for the Switzerland -
therefore this GMPE is region specific".
@ implemented by laurentiu.danciu@sed.ethz.zh
"""
#: Supported tectonic region type is ALPINE which
#: is a sub-region of Active Shallow Crust.
DEFINED_FOR_TECTONIC_REGION_TYPE = const.TRT.ACTIVE_SHALLOW_CRUST
#: Supported intensity measure types are spectral acceleration,
#: and peak ground acceleration, see tables 3 and 4, pages 227 and 228.
DEFINED_FOR_INTENSITY_MEASURE_TYPES = set([
PGV,
PGA,
SA
])
#: Supported intensity measure component is the geometric mean of two
#: horizontal components
#: :attr:`~openquake.hazardlib.const.IMC.AVERAGE_HORIZONTAL`
DEFINED_FOR_INTENSITY_MEASURE_COMPONENT = const.IMC.AVERAGE_HORIZONTAL
#: Supported standard deviation type is total,
#: Carlo Cauzzi - Personal Communication
DEFINED_FOR_STANDARD_DEVIATION_TYPES = set([
const.StdDev.TOTAL
])
#: Required site parameter is only Vs30 (used to distinguish rock
#: and deep soil).
REQUIRES_SITES_PARAMETERS = set(('vs30', ))
#: Required rupture parameters: magnitude
REQUIRES_RUPTURE_PARAMETERS = set(('mag', 'rake'))
#: Required distance measure is Rrup
REQUIRES_DISTANCES = set(('rrup', ))
#: Vs30 value representing typical rock conditions in Switzerland.
#: confirmed by the Swiss GMPE group
ROCK_VS30 = 1105
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
COEFFS = self.COEFFS[imt]
R = self._compute_term_r(COEFFS, rup.mag, dists.rrup)
mean = 10 ** (self._compute_mean(COEFFS, rup.mag, R))
# Convert units to g,
# but only for PGA and SA (not PGV):
if isinstance(imt, (PGA, SA)):
mean = np.log(mean / (g*100.))
else:
# PGV:
mean = np.log(mean)
c1_rrup = _compute_C1_term(COEFFS, dists.rrup)
log_phi_ss = 1.00
stddevs = self._get_stddevs(
COEFFS, stddev_types, sites.vs30.shape[0], rup.mag, c1_rrup,
log_phi_ss, COEFFS['mean_phi_ss']
)
return mean, stddevs
def _get_stddevs(self, C, stddev_types, num_sites, mag, c1_rrup,
log_phi_ss, mean_phi_ss):
"""
Return standard deviations
"""
phi_ss = _compute_phi_ss(C, mag, c1_rrup, log_phi_ss, mean_phi_ss)
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(np.sqrt(
C['tau'] * C['tau'] +
phi_ss * phi_ss) +
np.zeros(num_sites))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(phi_ss + np.zeros(num_sites))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(C['tau'] + np.zeros(num_sites))
return stddevs
def _compute_term_r(self, C, mag, rrup):
"""
Compute distance term
d = log10(max(R,rmin));
"""
if mag > self.M1:
rrup_min = 0.55
elif mag > self.M2:
rrup_min = -2.80 * mag + 14.55
else:
rrup_min = -0.295 * mag + 2.65
R = np.maximum(rrup, rrup_min)
return np.log10(R)
def _compute_term_1(self, C, mag):
"""
Compute term 1
a1 + a2.*M + a3.*M.^2 + a4.*M.^3 + a5.*M.^4 + a6.*M.^5 + a7.*M.^6
"""
return (
C['a1'] + C['a2'] * mag + C['
|
a3'] *
np.power(mag, 2) + C['a4'] * np.power(mag, 3)
+ C['a5'] * np.power(mag, 4) + C['a6'] *
np.power(mag, 5) + C['a7'] * np.power(mag, 6)
)
def _compute_term_2(self, C, mag, R):
"""
(a8 + a9.*M + a10.*M.*M + a11.*M.*M.*M).*d(r)
"""
return (
(C['a8'] + C['a9'] * mag + C['a10'] * np.power(mag, 2) +
C['a11'] * np.power(mag, 3)) * R
)
def _compute_term_
|
3(self, C, mag, R):
"""
(a12 + a13.*M + a14.*M.*M + a15.*M.*M.*M).*(d(r).^2)
"""
return (
(C['a12'] + C['a13'] * mag + C['a14'] * np.power(mag, 2) +
C['a15'] * np.power(mag, 3)) * np.power(R, 2)
)
def _compute_term_4(self, C, mag, R):
"""
(a16 + a17.*M + a18.*M.*M + a19.*M.*M.*M).*(d(r).^3)
"""
return (
(C['a16'] + C['a17'] * mag + C['a18'] * np.power(mag, 2) +
C['a19'] * np.power(mag, 3)) * np.power(R, 3)
)
def _compute_term_5(self, C, mag, R):
"""
(a20 + a21.*M + a22.*M.*M + a23.*M.*M.*M).*(d(r).^4)
"""
return (
(C['a20'] + C['a21'] * mag + C['a22'] * np.power(mag, 2) +
C['a23'] * np.power(mag, 3)) * np.power(R, 4)
)
def _compute_mean(self, C, mag, term_dist_r):
"""
compute mean
"""
return (self._compute_term_1(C, mag) +
self._compute_term_2(C, mag, term_dist_r) +
self._compute_term_3(C, mag, term_dist_r) +
self._compute_term_4(C, mag, term_dist_r) +
self._compute_term_5(C, mag, term_dist_r))
#: Fixed magnitude terms
M1 = 5.00
M2 = 4.70
COEFFS = COEFFS_ALPINE_10Bars
class EdwardsFah2013Alpine20Bars(EdwardsFah2013Alpine10Bars):
"""
This class extends :class:`EdwardsFah2013Alpine10Bars`
and implements the 20Bars Model :class:`EdwardsFah2013Alpine20Bars`
"""
COEFFS = COEFFS_ALPINE_20Bars
class EdwardsFah2013Alpine30Bars(EdwardsFah2013Alpine10Bars):
"""
This class extends :class:`EdwardsFah2013Alpine10Bars`
and implements the 30Bars Model :class:`Edwar
|
lCharlie123l/django-thumborstorage
|
tests/thumbor_project/thumbor_project/wsgi.py
|
Python
|
mit
| 1,446
| 0.000692
|
"""
WSGI config for thumbor_project project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` command
|
s discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django W
|
SGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "thumbor_project.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "thumbor_project.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
ntim/g4sipm
|
sample/run/luigi/all.py
|
Python
|
gpl-3.0
| 561
| 0.023173
|
#!/usr/bin/env python
import luigi
import dynamic_range_simulation
import darknoise_simulation
import pde_simulation
i
|
mport relative_pde_simulation
import n_pe_simulation
import crosstalk_neighbour_simulation
class All(luigi.WrapperTask):
def requires(self):
yield crosstalk_neighbour_simulation.All()
yield darknoise_simulation.All()
yield dynamic_range_simulation.All()
yield n_pe_simulation.All()
yield pde_simulation.All()
yi
|
eld relative_pde_simulation.All()
if __name__ == "__main__":
luigi.run(main_task_cls=All)
|
alexm92/sentry
|
tests/sentry/models/test_event.py
|
Python
|
bsd-3-clause
| 1,907
| 0
|
from __future__ import absolute_import
from sentry.testutils import TestCase
class EventTest(TestCase):
def test_legacy_tags(self):
event = self.create_event(data={
'tags': [
('logger', 'foobar'),
('site', 'foo'),
('server_name', 'bar'),
]
})
assert event.logger == 'foobar'
assert event.level == event.group.level
assert event.site == 'foo'
assert event.server_name == 'bar'
assert event.culprit == event.group.culprit
def test_email_subject(self):
event1 = self.create_event(
event_id='a' * 32, group=self.group, tags={'level': 'info'},
message='Foo bar')
event2 = self.create_event(
event_id='b' * 32, group=self.group, tags={'level': 'error'},
message='Foo bar')
self.group.level = 30
assert event1.get_email_subject() == '[foo Bar] INFO: Foo bar'
assert event2.get_email_subject() == '[foo Bar] ERROR: Foo bar'
class EventGetLegacyMessageTest(TestCase):
def test_message(self):
|
event = self.create_event(message='foo bar')
assert event.get_legacy_message() == 'foo bar'
def test_message_interface(self):
event = self.create_event(
message='biz baz',
data={
'sentry.interfaces.Message': {'message': 'foo bar'}
},
)
assert event.get_legacy_message() == 'foo bar'
def test_message_interface
|
_with_formatting(self):
event = self.create_event(
message='biz baz',
data={
'sentry.interfaces.Message': {
'message': 'foo %s',
'formatted': 'foo bar',
'params': ['bar'],
}
},
)
assert event.get_legacy_message() == 'foo bar'
|
asedunov/intellij-community
|
python/testData/completion/relativeFromImportInNamespacePackage2/nspkg1/a.after.py
|
Python
|
apache-2.0
| 17
| 0.058824
|
from . import fo
|
o
|
|
joelsmith/openshift-tools
|
ansible/roles/lib_openshift_3.2/library/oadm_ca.py
|
Python
|
apache-2.0
| 35,621
| 0.002892
|
#!/usr/bin/env python # pylint: disable=too-many-lines
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
import atexit
import json
import os
import re
import shutil
import subprocess
import ruamel.yaml as yaml
#import yaml
#
## This is here because of a bug that causes yaml
## to incorrectly handle timezone info on timestamps
#def timestamp_constructor(_, node):
# '''return timestamps as strings'''
# return str(node.value)
#yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = kubeconfig
self.all_namespaces = all_namespaces
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = '/tmp/%s' % rname
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''return all pods '''
cmd = ['-n', self.namespace, 'replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''return all pods '''
fname = '/tmp/%s' % rname
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''return all pods '''
return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
def _delete(self, resource, rname, selector=None):
'''return all pods '''
cmd = ['delete', resource, rname, '-n', self.namespace]
if selector:
cmd.append('--selector=%s' % selector)
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None):
'''return all pods '''
cmd = ['process', '-n', self.namespace]
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["%s=%s" % (key, value) for key, value in params.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = '/tmp/%s' % template_name
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['-n', self.namespace, 'create', '-f', fname])
def _get(self, resource, rname=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector:
cmd.append('--selector=%s' % selector)
if self.all_namespaces:
cmd.extend(['--all-namespaces'])
elif self.namespace:
cmd.extend(['-n', self.namespace])
cmd.extend(['-o', 'json'])
if rname:
cmd.append(rname)
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if rval.has_key('items'):
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
cmd.append('--schedulable=%s' % schedulable)
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
#pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
if grace_period:
cmd.append('--grace-period=%s' % int(grace_period))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
|
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm
|
')
return self.openshift_cmd(cmd)
#pylint: disable=too-many-arguments
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = []
if oadm:
cmds = ['/usr/bin/oadm']
else:
cmds = ['/usr/bin/oc']
cmds.extend(cmd)
rval = {}
results = ''
err = None
if self.verbose:
print ' '.join(cmds)
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env={'KUBECONFIG': self.kubeconfig})
stdout, stderr = proc.communicate(input_data)
rval = {"returncode": proc.returncode,
"results": results,
"cmd": ' '.join(cmds),
}
if proc.returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.message:
err = err.message
elif output_type == 'raw':
|
mdcic/ssp
|
docs/source/conf.py
|
Python
|
gpl-3.0
| 752
| 0.00133
|
# -*- coding: utf-8 -*-
import sys, os
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'ssp'
copyright = u'2013, Yury Konovalov'
version = '0.0.1'
release = '0.0.1'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_bas
|
ename = 'sspdoc'
latex_elements = {
}
latex_
|
documents = [
('index', 'ssp.tex', u'ssp Documentation',
u'Yury Konovalov', 'manual'),
]
man_pages = [
('index', 'ssp', u'ssp Documentation',
[u'Yury Konovalov'], 1)
]
texinfo_documents = [
('index', 'ssp', u'ssp Documentation',
u'Yury Konovalov', 'ssp', 'One line description of project.',
'Miscellaneous'),
]
|
yoe/veyepar
|
dj/scripts/email_url.py
|
Python
|
mit
| 1,390
| 0.009353
|
#!/usr/bin/python
# email_url.py
# emails the video URL to the presenters
from email_ab import email_ab
class email_url(email_ab):
ready_state = 7
subject_template = "[{{ep.show.name}}] Video up: {{ep.name}}"
body_body = """
The video of your talk is posted:
{{url}}
{% if ep.state == 7 %}
Look at it, make sure the title is spelled right and the audio sounds reasonable.
If you are satisfied, tweet it, blog it, whatever it. No point in making videos if no one watches them.
To approve it click the Approve button at
http://veyepar.nextdayvideo.com/main/approve/{{ep.id}}/{{ep.slug}}/{{ep.edit_key}}/
As soon as you or someone approves your video, it will be tweeted on @NextDayVideo{% if ep.show.client.tweet_prefix %} tagged {{ep.show.client.tweet_prefix}}{% endif %}. It will also be sent to the event organizers in hopes that they add it to the event website.
{% endif %}
{% if ep.twitter_url %}
It has been tweeted: {{ ep.twitter_url }}
Re-tweet it, blog it, whatever it. No point in making videos if no one watches them.
{% endif %}
"""
py_name = "email_url.py"
def more_context(self, ep):
# If there is a Richard (pyvideo) url, use that;
# else use the youtube url.
url = ep.public_url or ep.host_url
return {'url':url}
if __n
|
ame__ == '__main__':
p=email_url
|
()
p.main()
|
opencorato/votainteligente-portal-electoral
|
votainteligente/settings.py
|
Python
|
gpl-3.0
| 2,935
| 0.000681
|
"""
Django settings for votainteligente project.
Generated by 'django-admin startproject' using Django 1.8.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '18_bfrslfj^(m1+k+ks3q@f08rsod46lr0k0=p7+=3z5&cl7gj'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
SITE_ID = 1
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'votainteligente.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
|
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'votainteligente.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3
|
'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'it-it'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_URL = '/cache/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'cache')
from votainteligente.votainteligente_settings import *
if THEME:
INSTALLED_APPS += (THEME, )
INSTALLED_APPS += ('votai_general_theme', )
|
xen/flask-rq
|
docs/conf.py
|
Python
|
mit
| 9,809
| 0.006932
|
# -*- coding: utf-8 -*-
#
# Flask-RQ documentation build configuration file, created by
# sphinx-quickstart on Mon Mar 12 15:35:21 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.append(os.path.abspath('_themes'))
#from setup import __version__
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Flask-RQ'
copyright = u'2012, Matt Wright'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2'
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended
|
to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored
|
prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'flask_small'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'github_fork': 'mattupstate/flask-rq',
'index_logo': False
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Flask-RQdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Flask-RQ.tex', u'Flask-RQ Documentation',
u'Matt Wright', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'flask-rq', u'Flask-RQ Documentation',
[u'Matt Wright'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Flask-RQ', u'Flask-RQ Documentation',
u'Matt Wright', 'Flask-RQ', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'Flask-RQ'
epub_author = u'Matt Wright'
epub_publisher = u'Matt Wright'
epub_copyright = u'201
|
Havate/havate-openstack
|
proto-build/gui/horizon/Horizon_GUI/openstack_dashboard/openstack/common/rpc/impl_fake.py
|
Python
|
apache-2.0
| 5,854
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy
|
of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implie
|
d. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Fake RPC implementation which calls proxy methods directly with no
queues. Casts will block, but this is very useful for tests.
"""
import inspect
# NOTE(russellb): We specifically want to use json, not our own jsonutils.
# jsonutils has some extra logic to automatically convert objects to primitive
# types so that they can be serialized. We want to catch all cases where
# non-primitive types make it into this code and treat it as an error.
import json
import time
import eventlet
from openstack_dashboard.openstack.common.rpc import common as rpc_common
CONSUMERS = {}
class RpcContext(rpc_common.CommonRpcContext):
def __init__(self, **kwargs):
super(RpcContext, self).__init__(**kwargs)
self._response = []
self._done = False
def deepcopy(self):
values = self.to_dict()
new_inst = self.__class__(**values)
new_inst._response = self._response
new_inst._done = self._done
return new_inst
def reply(self, reply=None, failure=None, ending=False):
if ending:
self._done = True
if not self._done:
self._response.append((reply, failure))
class Consumer(object):
def __init__(self, topic, proxy):
self.topic = topic
self.proxy = proxy
def call(self, context, version, method, namespace, args, timeout):
done = eventlet.event.Event()
def _inner():
ctxt = RpcContext.from_dict(context.to_dict())
try:
rval = self.proxy.dispatch(context, version, method,
namespace, **args)
res = []
# Caller might have called ctxt.reply() manually
for (reply, failure) in ctxt._response:
if failure:
raise failure[0], failure[1], failure[2]
res.append(reply)
# if ending not 'sent'...we might have more data to
# return from the function itself
if not ctxt._done:
if inspect.isgenerator(rval):
for val in rval:
res.append(val)
else:
res.append(rval)
done.send(res)
except rpc_common.ClientException as e:
done.send_exception(e._exc_info[1])
except Exception as e:
done.send_exception(e)
thread = eventlet.greenthread.spawn(_inner)
if timeout:
start_time = time.time()
while not done.ready():
eventlet.greenthread.sleep(1)
cur_time = time.time()
if (cur_time - start_time) > timeout:
thread.kill()
raise rpc_common.Timeout()
return done.wait()
class Connection(object):
"""Connection object."""
def __init__(self):
self.consumers = []
def create_consumer(self, topic, proxy, fanout=False):
consumer = Consumer(topic, proxy)
self.consumers.append(consumer)
if topic not in CONSUMERS:
CONSUMERS[topic] = []
CONSUMERS[topic].append(consumer)
def close(self):
for consumer in self.consumers:
CONSUMERS[consumer.topic].remove(consumer)
self.consumers = []
def consume_in_thread(self):
pass
def create_connection(conf, new=True):
"""Create a connection."""
return Connection()
def check_serialize(msg):
"""Make sure a message intended for rpc can be serialized."""
json.dumps(msg)
def multicall(conf, context, topic, msg, timeout=None):
"""Make a call that returns multiple times."""
check_serialize(msg)
method = msg.get('method')
if not method:
return
args = msg.get('args', {})
version = msg.get('version', None)
namespace = msg.get('namespace', None)
try:
consumer = CONSUMERS[topic][0]
except (KeyError, IndexError):
return iter([None])
else:
return consumer.call(context, version, method, namespace, args,
timeout)
def call(conf, context, topic, msg, timeout=None):
"""Sends a message on a topic and wait for a response."""
rv = multicall(conf, context, topic, msg, timeout)
# NOTE(vish): return the last result from the multicall
rv = list(rv)
if not rv:
return
return rv[-1]
def cast(conf, context, topic, msg):
check_serialize(msg)
try:
call(conf, context, topic, msg)
except Exception:
pass
def notify(conf, context, topic, msg, envelope):
check_serialize(msg)
def cleanup():
pass
def fanout_cast(conf, context, topic, msg):
"""Cast to all consumers of a topic."""
check_serialize(msg)
method = msg.get('method')
if not method:
return
args = msg.get('args', {})
version = msg.get('version', None)
namespace = msg.get('namespace', None)
for consumer in CONSUMERS.get(topic, []):
try:
consumer.call(context, version, method, namespace, args, None)
except Exception:
pass
|
feend78/evennia
|
evennia/contrib/tutorial_world/rooms.py
|
Python
|
bsd-3-clause
| 40,655
| 0.001746
|
"""
Room Typeclasses for the TutorialWorld.
This defines special types of Rooms available in the tutorial. To keep
everything in one place we define them together with the custom
commands needed to control them. Those commands could also have been
in a separate module (e.g. if they could have been re-used elsewhere.)
"""
from __future__ import print_function
import random
from evennia import TICKER_HANDLER
from evennia import CmdSet, Command, DefaultRoom
from evennia import utils, create_object, search_object
from evennia import syscmdkeys, default_cmds
from evennia.contrib.tutorial_world.objects import LightSource
# the system error-handling module is defined in the settings. We load the
# given setting here using utils.object_from_module. This way we can use
# it regardless of if we change settings later.
from django.conf import settings
_SEARCH_AT_RESULT = utils.object_from_module(settings.SEARCH_AT_RESULT)
# -------------------------------------------------------------
#
# Tutorial room - parent room class
#
# This room is the parent of all rooms in the tutorial.
# It defines a tutorial command on itself (available to
# all those who are in a tutorial room).
#
# -------------------------------------------------------------
#
# Special command available in all tutorial rooms
class CmdTutorial(Command):
"""
Get help during the tutorial
Usage:
tutorial [obj]
This command allows you to get behind-the-scenes info
about an object or the current location.
"""
key = "tutorial"
aliases = ["tut"]
locks = "cmd:all()"
help_category = "TutorialWorld"
def func(self):
"""
All we do is to scan the current location for an Attribute
called `tutorial_info` and display that.
"""
caller = self.caller
if not self.args:
target = self.obj # this is the room the command is defined on
else:
target = caller.search(self.args.strip())
if not target:
return
helptext = target.db.tutorial_info
if helptext:
caller.msg("|G%s|n" % helptext)
else:
caller.msg("|RSorry, there is no tutorial help available here.|n")
# for the @detail command we inherit from MuxCommand, since
# we want to make use of MuxCommand's pre-parsing of '=' in the
# argument.
class CmdTutorialSetDetail(default_cmds.MuxCommand):
"""
sets a detail on a room
Usage:
@detail <key> = <description>
@detail <key>;<alias>;... = description
Example:
@detail walls = The walls are covered in ...
@detail castle;ruin;tower = The distant ruin ...
This sets a "detail" on the object this command is defined on
(TutorialRoom for this tutorial). This detail can be accessed with
the TutorialRoomLook command sitting on TutorialRoom objects (details
are set as a simple dictionary on the room). This is a Builder command.
We custom parse the key for the ;-separator in order to create
multiple aliases to the detail all at once.
"""
key = "@detail"
locks = "cmd:perm(Builder)"
help_category = "TutorialWorld"
def func(self):
"""
All this does is to check if the object has
the set_detail method and uses it.
"""
if not self.args or not self.rhs:
self.caller.msg("Usage: @detail key = description")
return
if not hasattr(self.obj, "set_detail"):
self.caller.msg("Details cannot be set on %s." % self.obj)
return
for key in self.lhs.split(";"):
# loop over all aliases, if any (if not, this will just be
# the one key to loop over)
self.obj.set_detail(key, self.rhs)
self.caller.msg("Detail set: '%s': '%s'" % (self.lhs, self.rhs))
class CmdTutorialLook(default_cmds.CmdLook):
"""
looks at the room and on details
Usage:
look <obj>
look <room detail>
look *<account>
Observes your location, details at your location or objects
in your vicinity.
Tutorial: This is a child of the default Look command, that also
allows us to look at "details" in the room. These details are
things to examine and offers some extra description without
actually having to be actual database objects. It uses the
return_detail() hook on TutorialRooms for this.
"""
# we don't need to specify key/locks etc, this is already
# set by the parent.
help_category = "TutorialWorld"
def func(self):
"""
Handle the looking. This is a copy of the default look
code except for adding in the details.
"""
caller = self.caller
args = self.args
if args:
# we use quiet=True to turn off automatic error reportin
|
g.
# This tells search that we want to handle error messages
|
# ourself. This also means the search function will always
# return a list (with 0, 1 or more elements) rather than
# result/None.
looking_at_obj = caller.search(args,
# note: excludes room/room aliases
candidates=caller.location.contents + caller.contents,
use_nicks=True, quiet=True)
if len(looking_at_obj) != 1:
# no target found or more than one target found (multimatch)
# look for a detail that may match
detail = self.obj.return_detail(args)
if detail:
self.caller.msg(detail)
return
else:
# no detail found, delegate our result to the normal
# error message handler.
_SEARCH_AT_RESULT(None, caller, args, looking_at_obj)
return
else:
# we found a match, extract it from the list and carry on
# normally with the look handling.
looking_at_obj = looking_at_obj[0]
else:
looking_at_obj = caller.location
if not looking_at_obj:
caller.msg("You have no location to look at!")
return
if not hasattr(looking_at_obj, 'return_appearance'):
# this is likely due to us having an account instead
looking_at_obj = looking_at_obj.character
if not looking_at_obj.access(caller, "view"):
caller.msg("Could not find '%s'." % args)
return
# get object's appearance
caller.msg(looking_at_obj.return_appearance(caller))
# the object's at_desc() method.
looking_at_obj.at_desc(looker=caller)
return
class TutorialRoomCmdSet(CmdSet):
"""
Implements the simple tutorial cmdset. This will overload the look
command in the default CharacterCmdSet since it has a higher
priority (ChracterCmdSet has prio 0)
"""
key = "tutorial_cmdset"
priority = 1
def at_cmdset_creation(self):
"""add the tutorial-room commands"""
self.add(CmdTutorial())
self.add(CmdTutorialSetDetail())
self.add(CmdTutorialLook())
class TutorialRoom(DefaultRoom):
"""
This is the base room type for all rooms in the tutorial world.
It defines a cmdset on itself for reading tutorial info about the location.
"""
def at_object_creation(self):
"""Called when room is first created"""
self.db.tutorial_info = "This is a tutorial room. It allows you to use the 'tutorial' command."
self.cmdset.add_default(TutorialRoomCmdSet)
def at_object_receive(self, new_arrival, source_location):
"""
When an object enter a tutorial room we tell other objects in
the room about it by trying to call a hook on them. The Mob object
uses this to cheaply get notified of enemies without having
to constantly scan for them.
Args:
new_arrival (Object): the object that just entered this room.
source_loc
|
Colibri-Embedded/FABEmu
|
examples/rpiemu.py
|
Python
|
gpl-2.0
| 909
| 0.006601
|
#!/usr/bin/env python
import os, Queue
import sys
from time import sleep
from threading import Thread
from libs.qemu import QemuInstance, UARTLineParser
# External
if len(sys.argv) > 1:
print "ARGS:", str(sys.argv)
sys.path.append(os.path.dirname( sys.argv[1] ))
########################################################################
print("=== Starting RPiEmu v0.5 ===")
# Qemu python wrapper that connects to the TCP server
rpi = QemuInstance()
rpi.start()
#####################################################
from mo
|
dels.totumduino import TotumDuino
from models.fabtotum import FABTotum
# FABTotum model
ft = FABTotum()
# Totumduino model
td = TotumDuino(ft)
|
# Start a TD thread
td.run()
print("* Totumduino thread started")
# UART line parser
parser = UARTLineParser(qemu=rpi, line_handler=td.uart0_transfer)
parser.start()
parser.loop()
# Finish the TD thread
td.finish()
|
OriHoch/pysiogame
|
game_boards/game070.py
|
Python
|
gpl-3.0
| 12,968
| 0.018124
|
# -*- coding: utf-8 -*-
import classes.level_controller as lc
import classes.game_driver as gd
import classes.extras as ex
import classes.board
import random
import pygame
class Board(gd.BoardGame):
def __init__(self, mainloop, speaker, config, screen_w, screen_h):
self.level = lc.Level(self,mainloop,5,10)
gd.BoardGame.__init__(self,mainloop,speaker,config,screen_w,screen_h,13,11)
def create_game_objects(self, level = 1):
self.board.decolorable = False
self.board.draw_grid = False
color = (234,218,225)
self.color = color
self.grey = (200,200,200)
self.font_hl = (100,0,250)
self.task_str_color = ex.hsv_to_rgb(200,200,230)
self.activated_col = self.font_hl
white = (255,255,255)
self.bg_col = white
self.top_line = 3#self.board.scale//2
if self.mainloop.scheme is not None:
if self.mainloop.scheme.dark:
self.bg_col = (0,0,0)
self.level.games_per_lvl = 5
if self.level.lvl == 1:
rngs = [20,50,10,19]
self.level.games_per_lvl = 3
elif self.level.lvl == 2:
rngs = [50,100,20,49]
self.level.games_per_lvl = 3
elif self.level.lvl == 3:
rngs = [100,250,50,99]
self.level.games_per_lvl = 3
elif self.level.lvl == 4:
rngs = [250,500,100,249]
elif self.level.lvl == 5:
rngs = [500,1000,100,499]
elif self.level.lvl == 6:
rngs = [700,1500,250,699]
elif self.level.lvl == 7:
rngs = [1500,2500,500,1499]
elif self.level.lvl == 8:
rngs = [2500,5000,1500,2499]
elif self.level.lvl == 9:
rngs = [5000,10000,2500,4999]
elif self.level.lvl == 10:
rngs = [10000,84999,5000,9999]
data = [39,18]
self.points = self.level.lvl
#stretch width to fit the screen size
x_count = self.get_x_count(data[1],even=None)
if x_count > 39:
data[0] = x_count
self.data = data
self.vis_buttons = [1,1,1,1,1,1,1,0,0]
self.mainloop.info.hide_buttonsa(self.vis_buttons)
self.layout.update_layout(data[0],data[1])
scale = self.layout.scale
self.board.level_start(data[0],data[1],scale)
self.n1 = random.randrange(rngs[0],rngs[1])
self.n2 = random.randrange(rngs[2],rngs[3])
self.sumn1n2 = self.n1-self.n2
self.n1s = str(self.n1)
self.n2s = str(self.n2)
self.sumn1n2s = str(self.sumn1n2)
self.n1sl = len(self.n1s)
self.n2sl = len(self.n2s)
self.sumn1n2sl =len(self.sumn1n2s)
self.cursor_pos = 0
self.correct = False
self.carry1l = []
self.carry10l = []
self.resultl = []
self.nums1l = []
self.nums2l = []
self.ship_id = 0
self.digits = ["0","1","2","3","4","5","6","7","8","9"]
if self.lang.lang == 'el':
qm = ";"
else:
qm = "?"
question = self.n1s + " - " + self.n2s + " = " + qm
self.board.add_unit(1,0,data[0]-3-(max(self.n1sl,self.n2sl))*3 ,3,classes.board.Label,question,self.bg_col,"",21)
self.board.units[-1].align = 1
#borrow 1
for i in range(self.n1sl - 1):
self.board.add_unit(data[0]-6-i*3,0,1,1,classes.board.Label,"-",self.bg_col,"",0)
self.board.add_unit(data[0]-5-i*3,0,1,1,classes.board.
|
Letter,"",self.bg_col,"",1)
self.carry1l.append(self.board.ships[-1])
self.carry1l[-1].set_outline(self.
|
grey, 2)
self.carry1l[-1].pos_id = i
self.board.units[-1].align = 2
#add 10
for i in range(self.n1sl - 1):
self.board.add_unit(data[0]-3-i*3,1,1,1,classes.board.Label,"+",self.bg_col,"",0)
self.board.add_unit(data[0]-2-i*3,1,1,1,classes.board.Letter,"",self.bg_col,"",1)
self.carry10l.append(self.board.ships[-1])
self.carry10l[-1].set_outline(self.grey, 2)
self.carry10l[-1].pos_id = i
self.board.units[-1].align = 2
self.board.add_unit(data[0]-2-self.n1sl*3,0,2,1,classes.board.Label,"-1",self.bg_col,"",0)
self.board.add_unit(data[0]-2-self.n1sl*3,1,2,1,classes.board.Label,"+10",self.bg_col,"",0)
#first number
for i in range(self.n1sl):
self.board.add_unit(data[0]-3-i*3,2,3,3,classes.board.Label,self.n1s[-(i+1)],self.bg_col,"",21)
self.nums1l.append(self.board.units[-1])
self.nums1l[-1].font_color = self.grey
self.nums1l[-1].pos_id = i
#second number
i = 0
for i in range(self.n2sl):
self.board.add_unit(data[0]-3-i*3,5,3,3,classes.board.Label,self.n2s[-(i+1)],self.bg_col,"",21)
self.nums2l.append(self.board.units[-1])
self.nums2l[-1].pos_id = i
i += 1
self.board.add_unit(data[0]-3-i*3,5,3,3,classes.board.Label,"-",self.bg_col,"",21)
self.plus_label = self.board.units[-1]
#line
#line = "―" * (self.sumn1n2sl*2)
self.board.add_unit(data[0]-self.sumn1n2sl*3,8,self.sumn1n2sl*3,1,classes.board.Label,"",self.bg_col,"",21)
self.draw_hori_line(self.board.units[-1])
#self.board.units[-1].text_wrap = False
#result
for i in range(self.sumn1n2sl):
self.board.add_unit(data[0]-3-i*3,9,3,3,classes.board.Letter,"",self.bg_col,"",21)
self.resultl.append(self.board.ships[-1])
self.resultl[-1].set_outline(self.grey, 2)
self.resultl[-1].pos_id = i
self.resultl[0].set_outline(self.activated_col, 3)
self.home_square = self.resultl[0]
self.board.active_ship = self.home_square.unit_id
self.activable_count = len(self.board.ships)
for each in self.board.ships:
each.immobilize()
self.deactivate_colors()
self.reactivate_colors()
def draw_hori_line(self,unit):
w = unit.grid_w*self.board.scale
h = unit.grid_h*self.board.scale
center = [w//2,h//2]
canv = pygame.Surface([w, h-1])
canv.fill(self.bg_col)
pygame.draw.line(canv,self.grey,(0,self.top_line),(w,self.top_line),3)
unit.painting = canv.copy()
unit.update_me = True
def handle(self,event):
gd.BoardGame.handle(self, event) #send event handling up
if self.show_msg == False:
if event.type == pygame.KEYDOWN and event.key == pygame.K_LEFT:
self.home_sqare_switch(self.board.active_ship+1)
elif event.type == pygame.KEYDOWN and event.key == pygame.K_RIGHT:
self.home_sqare_switch(self.board.active_ship-1)
elif event.type == pygame.KEYDOWN and event.key == pygame.K_UP:
if self.home_square in self.resultl:
self.home_sqare_switch(self.board.active_ship-self.n1sl+1)
elif self.home_square in self.carry10l:
self.home_sqare_switch(self.board.active_ship-self.n1sl+1)
elif event.type == pygame.KEYDOWN and event.key == pygame.K_DOWN:
self.home_sqare_switch(self.board.active_ship+self.n1sl-1)
elif event.type == pygame.KEYDOWN and event.key != pygame.K_RETURN and not self.correct:
lhv = len(self.home_square.value)
self.changed_since_check = True
if event.key == pygame.K_BACKSPACE:
if lhv > 0:
self.home_square.value = self.home_square.value[0:lhv-1]
else:
char = event.unicode
if (len(char)>0 and lhv < 3 and char in self.digits):
if self.home_square in self.resultl:
if lhv == 1:
s = self.home_square.value + char
if s[0] == "0":
self.home_square.value = char
else:
|
jbms/mintapi
|
mintapi/api.py
|
Python
|
mit
| 25,488
| 0.000471
|
import json
import random
import time
import re
try:
from StringIO import StringIO # Python 2
except ImportError:
from io import BytesIO as StringIO # Python 3
from datetime import date, datetime, timedelta
import requests
from requests.adapters import HTTPAdapter
try:
from requests.packages.urllib3.poolmanager import PoolManager
except:
from urllib3.poolmanager import PoolManager
import xmltodict
try:
import pandas as pd
except ImportError:
pd = None
def assert_pd():
# Common function to check if pd is installed
if not pd:
raise ImportError(
'transactions data requires pandas; '
'please pip install pandas'
)
DATE_FIELDS = [
'addAccountDate',
'closeDate',
'fiLastUpdated',
'lastUpdated',
]
class MintHTTPSAdapter(HTTPAdapter):
def init_poolmanager(self, connections, maxsize, **kwargs):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize, **kwargs)
class Mint(requests.Session):
json_headers = {'accept': 'application/json'}
request_id = 42 # magic number? random number?
token = None
def __init__(self, email=None, password=None):
requests.Session.__init__(self)
self.mount('https://', MintHTTPSAdapter())
if email and password:
self.login_and_get_token(email, password)
@classmethod
def create(cls, email, password): # {{{
mint = Mint()
mint.login_and_get_token(email, password)
return mint
@classmethod
def get_rnd(cls): # {{{
return (str(int(time.mktime(datetime.now().timetuple())))
+ str(random.randrange(999)).zfill(3))
@classmethod
def parse_float(cls, string): # {{{
for bad_char in ['$', ',', '%']:
string = string.replace(bad_char, '')
try:
return float(string)
except ValueError:
return None
def request_and_check(self, url, method='get',
expected_content_type=None, **kwargs):
"""Performs a request, and checks that the status is OK, and that the
content-type matches expectations.
Args:
url: URL to request
method: either 'get' or 'post'
expected_content_type: prefix to match response content-type against
**kwargs: passed to the request method directly.
Raises:
RuntimeError if status_code does not match.
"""
assert (method == 'get' or method == 'post')
result = getattr(self, method)(url, **kwargs)
if result.status_code != requests.codes.ok:
raise RuntimeError('Error requesting %r, status = %d' %
(url, result.status_code))
if expected_content_type is not None:
content_type = result.headers.get('content-type', '')
if not re.match(expected_content_type, content_type):
raise RuntimeError(
'Error requesting %r, content type %r does not match %r' %
(url, content_type, expected_content_type))
return result
def login_and_get_token(self, email, password): # {{{
# 0: Check to see if we're already logged in.
if self.token is not None:
return
# 1: Login.
login_url = 'https://wwws.mint.com/login.event?task=L'
try:
self.request_and_check(login_url)
except RuntimeError:
raise Exception('Failed to load Mint login page')
data = {'username': email}
response = self.post('https://wwws.mint.com/getUserPod.xevent',
data=data, headers=self.json_headers).text
data = {'username': email, 'password': password, 'task': 'L',
'browser': 'firefox', 'browserVersion': '27', 'os': 'linux'}
response = self.post('https://wwws.mint.com/loginUserSubmit.xevent',
data=data, headers=self.json_headers).text
if 'token' not in response:
raise Exception('Mint.com login failed[1]')
response = json.loads(response)
if not response['sUser']['token']:
raise Exception('Mint.com login failed[2]')
# 2: Grab token.
self.token = response['sUser']['token']
def get_accounts(self, get_detail=False): # {{{
# Issue service request.
req_id = str(self.request_id)
input = {
'args': {
'types': [
'BANK',
'CREDIT',
'INVESTMENT',
'LOAN',
'MORTGAGE',
'OTHER_PROPERTY',
'REAL_ESTATE',
'VEHICLE',
'UNCLASSIFIED'
]
},
'id': req_id,
'service': 'MintAccountService',
'task': 'getAccountsSorted'
# 'task': 'getAccountsSortedByBalanceDescending'
}
data = {'input': json.dumps([input])}
account_data_url = ('https://wwws.mint.com/bundledServiceController.'
'xevent?legacy=false&token=' + self.token)
response = self.post(account_data_url, data=data,
headers=self.json_headers).text
self.request_id = self.request_id + 1
if req_id not in response:
raise Exception('Could not parse account data: ' + response)
# Parse the request
response = json.loads(response)
accounts = response['response'][req_id]['response']
# Return datetime objects for dates
for account in accounts:
for df in DATE_FIELDS:
if df in account:
# Convert from javascript timestamp to unix timestamp
# http://stackoverflow.com/a/9744811/5026
try:
ts = account[df] / 1e3
except TypeError:
# returned data is not a number, don't parse
continue
account[df + 'InDate'] = datetime.fromtimestamp(ts)
|
if get_detail:
accounts = self.populat
|
e_extended_account_detail(accounts)
return accounts
def set_user_property(self, name, value):
url = ('https://wwws.mint.com/bundledServiceController.xevent?' +
'legacy=false&token=' + self.token)
req_id = str(self.request_id)
self.request_id += 1
result = self.post(
url,
data={'input': json.dumps([{'args': {'propertyName': name,
'propertyValue': value},
'service': 'MintUserService',
'task': 'setUserProperty',
'id': req_id}])},
headers=self.json_headers)
if result.status_code != 200:
raise Exception('Received HTTP error %d' % result.status_code)
response = result.text
if req_id not in response:
raise Exception("Could not parse response to set_user_property")
def _dateconvert(self, dateraw):
# Converts dates from json data
cy = datetime.isocalendar(date.today())[0]
try:
newdate = datetime.strptime(dateraw + str(cy), '%b %d%Y')
except:
newdate = datetime.strptime(dateraw, '%m/%d/%y')
return newdate
def _debit_credit(self, row):
# Reverses credit balances
dic = {False: -1, True: 1}
return float(row['amount'][1:].replace(',', '')) * dic[row['isDebit']]
def get_transactions_json(self, include_investment=False,
skip_duplicates=False, start_date=None):
"""Returns the raw JSON transaction data as downloaded from Mint. The JSON
transaction data includes some additional information missing from the
CSV data, such as whether the transaction is pending or completed, but
leaves off the yea
|
lsaffre/lino-welfare
|
lino_welfare/modlib/isip/__init__.py
|
Python
|
agpl-3.0
| 413
| 0
|
# -*- coding: UTF-8 -*-
# Copyrigh
|
t 2012-2015 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
"""See :doc:`/specs/isip`.
"""
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from lino.api import ad
class Plugin(ad.Plugin):
"See :class:`lino.core.plugin.Plugin`."
verbose_name = _("ISIP")
needs_plugins = ['lino_welfare.modlib.inte
|
g']
|
tyagow/AdvancingTheBlog
|
src/posts/migrations/0002_post_user.py
|
Python
|
mit
| 633
| 0.00158
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-05 03:11
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
|
, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('posts', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='post',
name='user',
field=models.ForeignKey(default=1, o
|
n_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
jvs/sourcer
|
sourcer/expressions/sep.py
|
Python
|
mit
| 2,062
| 0.000485
|
from outsourcer import Code
from . i
|
mport utils
from .base import Expression
from .
|
constants import BREAK, POS, RESULT, STATUS
class Sep(Expression):
num_blocks = 2
def __init__(
self,
expr,
separator,
discard_separators=True,
allow_trailer=False,
allow_empty=True,
):
self.expr = expr
self.separator = separator
self.discard_separators = discard_separators
self.allow_trailer = allow_trailer
self.allow_empty = allow_empty
def __str__(self):
op = '/?' if self.allow_trailer else '//'
return utils.infix_str(self.expr, op, self.separator)
def operand_string(self):
return f'({self})'
def always_succeeds(self):
return self.allow_empty
def _compile(self, out):
staging = out.var('staging', [])
checkpoint = out.var('checkpoint', POS)
with out.WHILE(True):
with utils.if_fails(out, self.expr):
# If we're not discarding separators, and if we're also not
# allowing a trailing separator, then we need to pop the last
# separator off of our list.
if not self.discard_separators and not self.allow_trailer:
# But only pop if staging is not empty.
with out.IF(staging):
out += staging.pop()
out += BREAK
out += staging.append(RESULT)
out += checkpoint << POS
with utils.if_fails(out, self.separator):
out += BREAK
if not self.discard_separators:
out += staging.append(RESULT)
if self.allow_trailer:
out += checkpoint << POS
success = [
RESULT << staging,
POS << checkpoint,
STATUS << True,
]
if self.allow_empty:
out.extend(success)
else:
with out.IF(staging):
out.extend(success)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.