repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
RPGOne/Skynet
|
imbalanced-learn-master/examples/over-sampling/plot_smote_svm.py
|
Python
|
bsd-3-clause
| 1,830
| 0.002732
|
"""
=========
SMOTE SVM
=========
An illustration of the random SMOTE SVM method.
"""
print(__doc__)
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
# Define some color for the plotting
almost_black = '#262626'
palette = sns.color_palette()
from sklearn.datasets import make_classification
from sklearn.decomposition import PCA
from imblearn.over_sampling import SMOTE
# Generate the dataset
X, y = make_classification(n_classes=2, class_sep=2, weights=[0.1, 0.9],
n_informative=3, n_redundant=1, flip_y=0,
n_features=20, n_clusters_per_class=1,
n_samples=5000, random_state=10)
# Instanciate a PCA object for the sake of easy visualisation
pca = PCA(n_components=2)
# Fit and transform x to visualise inside a 2D feature space
X_vis = pca.fit_transform(X)
# Apply SMOTE SVM
sm = SMOTE(kind='svm')
X_resampled, y_resampled = sm.fit_sample(X, y)
X_res_vis = pca.transform(X_resampled)
# Two subplots, unpack the axes array immediately
f, (ax1, ax2) = plt.subplots(1, 2)
ax1.scatter(X_vis[y == 0, 0], X_vis[y == 0, 1], label="Class #0", alpha=0.5,
edgecolor=almost_black, facecolor=palette[0], linewidth=0.15)
ax1.scatter(X_vis[y == 1, 0], X_vis[y == 1, 1], label="Class #1", alpha=0.5,
edgecolor=a
|
lmost_black, facecolor=palette[2], linewidth=0.15)
ax1.set_title('Original set')
ax2.scatter(X_res_vis[y_resampled == 0, 0], X_res_vis[y_resampled == 0, 1],
label="Class #0", alpha=.5, edgecolor=almost_black,
facecolor=palette[0], linewidth=0.15)
ax2.scatter(X_res_vis[y_resampled == 1, 0], X_res_vis[y_resampled == 1, 1],
label="Class #1", alpha=.5, edgecolor=almost_
|
black,
facecolor=palette[2], linewidth=0.15)
ax2.set_title('SMOTE svm')
plt.show()
|
sit0x/farmasoft
|
interaccion_usuario.py
|
Python
|
gpl-3.0
| 2,473
| 0.001227
|
#!/usr/bin/env python
# coding: utf-8
#encoding: latin1
def ingresar_numero(numero_minimo, numero_maximo):
''' Muestra un cursor de ingreso al usuario para que ingrese un número
tal que numero_mínimo <= ingreso <= numero_máximo. Ante un ingreso
inválido muestra un mensaje descriptivo de error y repregunta.
Devuelve el número ingresado en formato entero.
'''
while True:
ingreso = input()
if not ingreso.isdigit():
print("El ingreso debe ser numérico.")
elif not numero_minimo <= int(ingreso) <= numero_maximo:
print("El ingreso debe estar entre {} y {}}.".format
(numero_minimo, numero_maximo))
else:
return int(ingreso)
def ingresar_cadena_no_vacia():
''' Muestra un cursor de ingreso al usuario para que ingrese una cadena
no vacía. Ante un ingreso inválido muestra un mensaje descriptivo de
error y repregunta.
Devuelve la cadena ingresada, en mayúsculas.
'''
while True:
ingreso = input()
if len(ingreso) == 0:
print("El ingreso no debe ser vacío.")
else:
return ingreso.upper()
def mostrar_menu_generico(opciones, opcion_por_defecto):
''' Muestra una pantalla de selección dada una lista de opciones y una
opción por defecto. El usuario tendrá la opción de elegir una o
|
pción de
acuerdo a la numeración mostrada, generada por la función. Se valida el
ingreso repreguntando tantas veces como sea necesario.
opciones es una lista de cadena con las opciones a mostrar.
opcion_por_defecto es una opción adicional, obligatoria, no incluida en
la lista de opciones. Se mostrará última y su u
|
so se orienta a una
opción de tipo "cancelar".
Se devuelve un número entero según la elección del usuario. Si
selecciona un elemento de la lista de opciones, devuelve su índice. Si
selecciona la opción por defecto, devuelve -1.
'''
print("Seleccione una opción:")
for numero_opcion in range(len(opciones)):
print("{}. {}.".format(numero_opcion + 1, opciones[numero_opcion]))
print ("{}. {}.".format(len(opciones) + 1, opcion_por_defecto))
print
seleccion = ingresar_numero(1, len(opciones) + 1)
# Caso en el que elija la opción por defecto.
if seleccion == len(opciones) + 1:
return -1
# Caso en el que elija una opción dentro de la lista.
return seleccion - 1
|
amwelch/a10sdk-python
|
a10sdk/core/acos/acos_scaleout_cluster_config_device_groups_device_group.py
|
Python
|
apache-2.0
| 1,370
| 0.010949
|
from a10sdk.common.A10BaseClass import A10BaseClass
class DeviceGroup(A10BaseClass):
"""Class Description::
configure scaleout device groups.
Class device-group supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param device_group: {"description": "scaletout device group", "format": "number", "type": "number", "maximum": 16, "minimum": 1, "optional": false}
:param device_id_start: {"optional": true, "type": "number", "format": "number"}
:param device_id_end: {"optional": true, "type": "number", "format": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/acos-scaleout/cluster-config/device-groups/device-group/{device_group}`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required = [ "device_group"]
self.b_key = "device-group"
self.a10_url="/axapi/v3/acos-scaleout/cluster-config/device
|
-groups/device-group/{device_group}"
self.DeviceProxy = ""
self.device_group = ""
self.device_id_start = ""
self.device_id_end = ""
for keys,
|
value in kwargs.items():
setattr(self,keys, value)
|
eicher31/compassion-switzerland
|
partner_communication_switzerland/models/partner_communication_config.py
|
Python
|
agpl-3.0
| 4,913
| 0.000204
|
##############################################################################
#
# Copyright (C) 2021 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import random
import itertools
from odoo import api, models
class PartnerCommunication(models.Model):
_inherit = "partner.communication.config"
@api.multi
def generate_test_cases_by_language_family_case(self, lang="de_DE", family_case="single", send_mode="digital"):
"""
Generates example communications for our multiple cases in CH
depending on the language and the family case
Outputs the texts in a file
:param lang:
:return: True
"""
self.ensure_one()
comm_obj = self.env["partner.communication.job"].with_context(
must_skip_send_to_printer=True)
res = []
for number_sponsorship in [1, 3, 4]:
partner = self._find_partner(number_sponsorship, lang, family_case)
if partner is None:
continue
object_ids = self._get_test_objects(partner)
object_ids = ",".join([str(id)
for id in object_ids[0:number_sponsorship]])
temp_comm = comm_obj.create({
"partner_id": partner.id,
"config_id": self.id,
"object_ids": object_ids,
"auto_send": False,
"send_mode": send_mode,
})
res.append({
"case": f"{family_case}_{number_sponsorship}_child",
"subject": temp_comm.subject,
"body_html": temp_comm.body_html
})
temp_comm.unlink()
return res
@api.multi
def generate_test_case_by_partner(self, partner=None, send_mode="digital"):
"""
Generates example communications for our multiple cases in CH
depending on part
|
ner
Outputs the texts in a file
:param partner:
:return: True
"""
self.ensure_one()
comm_obj = self.env["partner.communication.job"].with_context(
must_skip_send_to_printer=True)
res = []
object_ids
|
= self._get_test_objects(partner)
object_ids = ",".join([str(id) for id in object_ids])
temp_comm = comm_obj.create({
"partner_id": partner.id,
"config_id": self.id,
"object_ids": object_ids,
"auto_send": False,
"send_mode": send_mode,
})
res = {
"case": "partner",
"subject": temp_comm.subject,
"body_html": temp_comm.body_html
}
temp_comm.unlink()
return res
def open_test_case_wizard(self):
return {
"name": "Test communication cases",
"type": "ir.actions.act_window",
"view_type": "form",
"view_mode": "form",
"context": self.env.context,
"res_model": "partner.communication.test.cases.wizard",
'target': 'current',
}
def _get_test_objects(self, partner):
if self.model == "res.partner":
object_ids = partner.ids
elif self.model == "recurring.contract":
object_ids = partner.sponsorship_ids.ids
elif self.model == "correspondence":
object_ids = partner.mapped("sponsorship_ids.child_letter_ids").ids
elif self.model == "compassion.child":
object_ids = partner.sponsored_child_ids.ids
elif self.model == "account.invoice.line":
object_ids = self.env["account.invoice.line"].search([
("partner_id", "=", partner.id),
("invoice_id.invoice_category", "=", "fund")
], limit=4).ids
elif self.model == "account.invoice":
object_ids = self.env["account.invoice"].search([
("partner_id", "=", partner.id)
], limit=4).ids
return object_ids
def _find_partner(self, number_sponsorships, lang, family_case):
family = self.env.ref("partner_compassion.res_partner_title_family")
query = [
("number_sponsorships", "=", number_sponsorships),
("lang", "=", lang),
]
if family_case == "single":
query += [("title", "!=", family.id), ("title.plural", "=", False)]
else:
query += [("title", "=", family.id)]
answers = self.env["res.partner"].search(query, limit=50)
# check that the query returned a result
if len(answers) <= 0:
return None
# randomly select one
answer = random.choice(answers)
return answer
|
adoublebarrel/geo-tweet-exercise
|
server/assesment_site/geo_tweets/apps.py
|
Python
|
mit
| 94
| 0
|
from django.apps import AppConfi
|
g
class GeoTweetsConfig(AppConfig
|
):
name = 'geo_tweets'
|
jgmanzanas/CMNT_004_15
|
project-addons/stock_deposit/stock_deposit.py
|
Python
|
agpl-3.0
| 10,814
| 0.001017
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Santi Argüeso
# Copyright 2014 Pexego Sistemas Informáticos S.L.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api
from datetime import datetime
class stock_deposit(models.Model):
_name = 'stock.deposit'
_description = "Deposits"
_inherit = ['mail.thread', 'ir.needaction_mixin']
product_id = fields.Many2one(string='Product',
related='move_id.product_id',
store=True, readonly=True)
product_uom_qty = fields.Float('Product qty',
related='move_id.product_uom_qty',
store=True, readonly=True)
product_uom = fields.Many2one(related='move_id.product_uom',
string='Uom',
store=True,
readonly=True)
invoice_id = fields.Many2one('account.invoice', 'Invoice')
move_id = fields.Many2one('stock.move', 'Deposit Move', required=True,
readonly=True, ondelete='cascade', select=1)
picking_id = fields.Many2one(related='move_id.picking_id',
string='Picking',
store=True,
readonly=True)
partner_id = fields.Many2one(related='move_id.partner_id',
string='Destination Address',
store=True,
readonly=True)
sale_id = fields.Many2one(related='move_id.procurement_id.sale_line_id.order_id',
string='Sale',
store=True,
readonly=True)
delivery_date = fields.Datetime('Date of Transfer')
return_date = fields.Date('Return date')
company_id = fields.Many2one(related='move_id.company_id',
|
string='Date of Transfer',
store=True,
readonly=True)
state = fields.Selection([('draft', 'D
|
raft'), ('sale', 'Sale'),
('returned', 'Returned'),
('invoiced', 'Invoiced'),
('loss', 'Loss')], 'State',
readonly=True, required=True)
sale_move_id = fields.Many2one('stock.move', 'Sale Move', required=False,
readonly=True, ondelete='cascade', select=1)
sale_picking_id = fields.Many2one(related='sale_move_id.picking_id',
string='Sale picking',
readonly=True)
return_picking_id = fields.Many2one('stock.picking', 'Return Picking',
required=False, readonly=True,
ondelete='cascade', select=1)
loss_move_id = fields.Many2one('stock.move', 'Loss Move', required=False,
readonly=True, ondelete='cascade', select=1)
loss_picking_id = fields.Many2one(related='loss_move_id.picking_id',
string='Loss picking',
readonly=True)
user_id = fields.Many2one('res.users', 'Comercial', required=False,
readonly=False, ondelete='cascade', select=1)
cost_subtotal = fields.Float('Cost', related='move_id.cost_subtotal',
store=True, readonly=True)
@api.multi
def sale(self):
move_obj = self.env['stock.move']
picking_type_id = self.env.ref('stock.picking_type_out')
for deposit in self:
procurement_id = deposit.sale_id.procurement_group_id
picking = self.env['stock.picking'].create(
{'picking_type_id': picking_type_id.id,
'partner_id': deposit.partner_id.id,
'origin': deposit.sale_id.name,
'date_done': datetime.now(),
'invoice_state': '2binvoiced',
'commercial': deposit.user_id.id,
'group_id': procurement_id.id})
values = {
'product_id': deposit.product_id.id,
'product_uom_qty': deposit.product_uom_qty,
'product_uom': deposit.product_uom.id,
'partner_id': deposit.partner_id.id,
'name': 'Sale Deposit: ' + deposit.move_id.name,
'location_id': deposit.move_id.location_dest_id.id,
'location_dest_id': deposit.partner_id.property_stock_customer.id,
'invoice_state': '2binvoiced',
'picking_id': picking.id,
'procurement_id': deposit.move_id.procurement_id.id,
'commercial': deposit.user_id.id,
'group_id': procurement_id.id
}
move = move_obj.create(values)
move.action_confirm()
move.force_assign()
move.action_done()
deposit.write({'state': 'sale', 'sale_move_id': move.id})
@api.one
def _prepare_deposit_move(self, picking, group):
deposit_id = self.env.ref('stock_deposit.stock_location_deposit')
move_template = {
'name': 'RET' or '',
'product_id': self.product_id.id,
'product_uom': self.product_uom.id,
'product_uom_qty': self.product_uom_qty,
'product_uos': self.product_uom.id,
'location_id': deposit_id.id,
'location_dest_id':
picking.picking_type_id.default_location_dest_id.id,
'picking_id': picking.id,
'partner_id': self.partner_id.id,
'move_dest_id': False,
'state': 'draft',
'company_id': self.company_id.id,
'group_id': group.id,
'procurement_id': False,
'origin': False,
'route_ids':
picking.picking_type_id.warehouse_id and
[(6, 0,
[x.id for x in
picking.picking_type_id.warehouse_id.route_ids])] or [],
'warehouse_id': picking.picking_type_id.warehouse_id.id,
'invoice_state': 'none'
}
return move_template
@api.one
def _create_stock_moves(self, picking=False):
stock_move = self.env['stock.move']
todo_moves = self.env['stock.move']
new_group = self.env['procurement.group'].create(
{'name': 'deposit RET', 'partner_id': self.partner_id.id})
for vals in self._prepare_deposit_move(picking, new_group):
todo_moves += stock_move.create(vals)
todo_moves.action_confirm()
todo_moves.force_assign()
@api.multi
def return_deposit(self):
picking_type_id = self.env.ref('stock.picking_type_in')
for deposit in self:
picking = self.env['stock.picking'].create(
{'picking_type_id': picking_type_id.id,
'partner_id': deposit.partner_id.id})
deposit._create_stock_moves(picking)
deposit.write({'state': 'returned',
'return_picking_id': picking.id})
@api.model
def send_advise_email(self):
deposits = self
|
MeerkatLabs/sleekpromises
|
test/promises/test_2_3_4.py
|
Python
|
bsd-3-clause
| 7,323
| 0.006691
|
"""
2.3.4: If `x` is not an object or function, fulfill `promise` with `x`
https://github.com/promises-aplus/promises-tests/blob/2.1.1/lib/tests/2.3.4.js
"""
from test.promises.helpers import generate_fulfilled_test_case, generate_rejected_test_case
dummy = {'dummy': 'dummy'}
sentinel = {'sentinel': 'sentinel'}
def primitive_fulfilled_wrapper(primitive_value):
def test_method(test_case, promise, done):
def return_primitive(value):
return primitive_value
def retrieve_primitive(value):
test_case.assertEqual(value, primitive_value)
done()
promise.then(return_primitive).then(retrieve_primitive)
return test_method
def primitive_rejected_wrapper(primitive_value):
def test_method(test_case, promise, done):
d
|
ef return_primitive(value):
return primitive_value
def retr
|
ieve_primitive(value):
test_case.assertEqual(value, primitive_value)
done()
promise.then(None, return_primitive).then(retrieve_primitive)
return test_method
None_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(None), dummy,
module=__name__,
name='None_FulfilledTestCase')
Zero_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(0), dummy,
module=__name__,
name='Zero_FulfilledTestCase')
One_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(1), dummy,
module=__name__,
name='One_FulfilledTestCase')
String_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper('asdf'), dummy,
module=__name__,
name='String_FulfilledTestCase')
EmptyString_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(''), dummy,
module=__name__,
name='EmptyString_FulfilledTestCase')
List_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(['asdf', 'value1']), dummy,
module=__name__,
name='List_FulfilledTestCase')
EmptyList_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper([]), dummy,
module=__name__,
name='EmptyList_FulfilledTestCase')
Dict_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(dict(key='value')), dummy,
module=__name__,
name='Dict_FulfilledTestCase')
EmptyDict_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(dict()), dummy,
module=__name__,
name='EmptyDict_FulfilledTestCase')
Tuple_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(('asdf', 'value1', )), dummy,
module=__name__,
name='Tuple_FulfilledTestCase')
EmptyTuple_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(()), dummy,
module=__name__,
name='EmptyTuple_FulfilledTestCase')
Object_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(object()), dummy,
module=__name__,
name='Object_FulfilledTestCase')
None_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(None), dummy,
module=__name__,
name='None_RejectedTestCase')
Zero_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(0), dummy,
module=__name__,
name='Zero_RejectedTestCase')
One_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(1), dummy,
module=__name__,
name='One_RejectedTestCase')
String_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper('asdf'), dummy,
module=__name__,
name='String_RejectedTestCase')
EmptyString_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(''), dummy,
module=__name__,
name='EmptyString_RejectedTestCase')
List_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(['asdf', 'value1']), dummy,
module=__name__,
name='List_RejectedTestCase')
EmptyList_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper([]), dummy,
module=__name__,
name='EmptyList_RejectedTestCase')
Dict_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(dict(key='value')), dummy,
module=__name__,
name='Dict_RejectedTestCase')
EmptyDict_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(dict()), dummy,
module=__name__,
name='EmptyDict_RejectedTestCase')
Tuple_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(('asdf', 'value1', )), dummy,
module=__name__,
name='Tuple_RejectedTestCase')
EmptyTuple_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(()), dummy,
module=__name__,
name='EmptyTuple_RejectedTestCase')
Object_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(object()), dummy,
module=__name__,
name='Object_RejectedTestCase')
|
Teekuningas/mne-python
|
examples/datasets/plot_limo_data.py
|
Python
|
bsd-3-clause
| 13,302
| 0
|
"""
.. _ex-limo-data:
=============================================================
Single trial linear regression analysis with the LIMO dataset
=============================================================
Here we explore the structure of the data contained in the
`LIMO dataset`_.
This example replicates and extends some of the main analysis
and tools integrated in `LIMO MEEG`_, a MATLAB toolbox originally designed
to interface with EEGLAB_.
In summary, the example:
- Fetches epoched data files for a single subject of the LIMO dataset [1]_.
If the LIMO files are not found on disk, the
fetcher :func:`mne.datasets.limo.load_data()` will automatically download
the files from a remote repository.
- During import, information about the data (i.e., sampling rate, number of
epochs per condition, number and name of EEG channels per subject, etc.) is
extracted from the LIMO :file:`.mat` files stored on disk and added to the
epochs structure as metadata.
- Fits linear models on the single subject's data and visualizes inferential
measures to evaluate the significance of the estimated effects.
References
----------
.. [1] Guillaume, Rousselet. (2016). LIMO EEG Dataset, [dataset].
University of Edinburgh, Centre for Clinical Brain Sciences.
https://doi.org/10.7488/ds/1556.
.. [2] Rousselet, G. A., Gaspar, C. M., Pernet, C. R., Husk, J. S.,
Bennett, P. J., & Sekuler, A. B. (2010). Healthy aging delays scalp EEG
sensitivity to noise in a face discrimination task.
Frontiers in psychology, 1, 19. https://doi.org/10.3389/fpsyg.2010.00019
.. [3] Rousselet, G. A., Pernet, C. R., Bennett, P. J., & Sekuler, A. B.
(2008). Parametric study of EEG sensitivity to phase noise during face
processing. BMC neuroscience, 9(1), 98.
https://doi.org/10.1186/1471-2202-9-98
.. _LIMO dataset: https://datashare.is.ed.ac.uk/handle/10283/2189?show=full
.. _LIMO MEEG: https://github.com/LIMO-EEG-Toolbox
.. _EEGLAB: https://sccn.ucsd.edu/eeglab/index.php
.. _Fig 1: https://bmcneurosci.biomedcentral.com/articles/10.1186/1471-2202-9-98/figures/1
.. _least squares: https://docs.scipy.org/doc/scipy/reference/generated/scipy.linalg.lstsq.html
""" # noqa: E501
# Authors: Jose C. Garcia Alanis <alanis.jcg@gmail.com>
#
# License: BSD (3-clause)
import numpy as np
import matplotlib.pyplot as plt
from mne.datasets.limo import load_data
from mne.stats import linear_regression
from mne.viz import plot_events, plot_compare_evokeds
from mne import combine_evoked
print(__doc__)
# subject to use
subj = 1
###############################################################################
# About the data
# --------------
#
# In the original LIMO experiment (see [2]_), participants performed a
# two-alternative forced choice task, discriminating between two face stimuli.
# The same two faces were used during the whole experiment,
# with varying levels of noise added, making the faces more or less
# discernible to the observer (see `Fig 1`_ in [3]_ for a similar approach).
#
# The presented faces varied across a noise-signal (or phase-coherence)
# continuum spanning from 0 to 85% in increasing steps of 5%.
# In other words, faces with high phase-coherence (e.g., 85%) were easy to
# identify, while faces with low phase-coherence (e.g., 5%) were hard to
# identify and by extension very hard to discriminate.
#
#
# Load the data
# -------------
#
# We'll begin by loading the data from subject 1 of the LIMO dataset.
# This step can take a little while if you're loading the data for the
# first time.
limo_epochs = load_data(subject=subj)
###############################################################################
# Note that the result of the loading process is an
# :class:`mne.EpochsArray` containing the data ready to interface
# with MNE-Python.
print(limo_epochs)
###############################################################################
# Visualize events
# ----------------
#
# We can visualise the distribution of the face events contained in the
# ``limo_epochs`` structure. Events should appear clearly grouped, as the
# epochs are ordered by condition.
fig = plot_events(limo_epochs.events, event_id=limo_epochs.event_id)
fig.suptitle("Distribution of events in LIMO epochs")
###############################################################################
# As it can be seen above, conditions are coded as ``Face/A`` and ``Face/B``.
# Information about the phase-coherence of the presented faces is stored in the
# epochs metadata. These information can be easily accessed by calling
# ``limo_epochs.metadata``. As shown below, the epochs metadata also contains
# information about the presented faces for convenience.
print(limo_epochs.metadata.head())
###############################################################################
# Now let's take a closer look at the information in the epochs
# metadata.
# We want include all columns in the summary table
epochs_summary = limo_epochs.metadata.describe(include='all').round(3)
print(epochs_summary)
###############################################################################
# The first column of the summary table above
|
provides more or less the same
# information as the ``print(limo_epochs)`` command we ran before. T
|
here are
# 1055 faces (i.e., epochs), subdivided in 2 conditions (i.e., Face A and
# Face B) and, for this particular subject, there are more epochs for the
# condition Face B.
#
# In addition, we can see in the second column that the values for the
# phase-coherence variable range from -1.619 to 1.642. This is because the
# phase-coherence values are provided as a z-scored variable in the LIMO
# dataset. Note that they have a mean of zero and a standard deviation of 1.
#
#
# Visualize condition ERPs
# ------------------------
#
# Let's plot the ERPs evoked by Face A and Face B, to see how similar they are.
# only show -250 to 500 ms
ts_args = dict(xlim=(-0.25, 0.5))
# plot evoked response for face A
limo_epochs['Face/A'].average().plot_joint(times=[0.15],
title='Evoked response: Face A',
ts_args=ts_args)
# and face B
limo_epochs['Face/B'].average().plot_joint(times=[0.15],
title='Evoked response: Face B',
ts_args=ts_args)
###############################################################################
# We can also compute the difference wave contrasting Face A and Face B.
# Although, looking at the evoked responses above, we shouldn't expect great
# differences among these face-stimuli.
# Face A minus Face B
difference_wave = combine_evoked([limo_epochs['Face/A'].average(),
limo_epochs['Face/B'].average()],
weights=[1, -1])
# plot difference wave
difference_wave.plot_joint(times=[0.15], title='Difference Face A - Face B')
###############################################################################
# As expected, no clear pattern appears when contrasting
# Face A and Face B. However, we could narrow our search a little bit more.
# Since this is a "visual paradigm" it might be best to look at electrodes
# located over the occipital lobe, as differences between stimuli (if any)
# might easier to spot over visual areas.
# Create a dictionary containing the evoked responses
conditions = ["Face/A", "Face/B"]
evokeds = {condition: limo_epochs[condition].average()
for condition in conditions}
# concentrate analysis an occipital electrodes (e.g. B11)
pick = evokeds["Face/A"].ch_names.index('B11')
# compare evoked responses
plot_compare_evokeds(evokeds, picks=pick, ylim=dict(eeg=(-15, 7.5)))
###############################################################################
# We do see a difference between Face A and B, but it is pretty small.
#
#
# Visualize effect of stimulus phase-coherence
# --------------------------------------------
#
# Since phase-coherence
# determined whether a face stimulus could be easily identified,
# one could expect that faces with high phase-coherence shoul
|
IronLanguages/ironpython3
|
Tests/test_surrogatepass.py
|
Python
|
apache-2.0
| 4,002
| 0.010245
|
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
##
## Test surrogatepass encoding error handler
##
import unittest
import codecs
from iptest import run_test
class SurrogatePassTest(unittest.TestCase):
def test_ascii(self):
self.assertEqual("abc".encode("ascii", errors="surrogatepass"
|
), b"abc")
self.assertEqual(b"abc".decode("ascii", errors="sur
|
rogatepass"), "abc")
def test_utf_7(self):
self.assertEqual("abc\ud810xyz".encode("utf_7", errors="surrogatepass"), b"abc+2BA-xyz")
self.assertEqual(b"abc+2BA-xyz".decode("utf_7", errors="surrogatepass"), "abc\ud810xyz")
def test_utf_8(self):
self.assertEqual("abc\ud810xyz".encode("utf_8", errors="surrogatepass"), b"abc\xed\xa0\x90xyz")
self.assertEqual(b"abc\xed\xa0\x90xyz".decode("utf_8", errors="surrogatepass"), "abc\ud810xyz")
def test_utf_16_le(self):
# lone high surrogate
self.assertEqual("\ud810".encode("utf_16_le", errors="surrogatepass"), b"\x10\xd8")
self.assertEqual(b"\x10\xd8".decode("utf_16_le", errors="surrogatepass"), "\ud810")
#lone low surrogate
self.assertEqual("\udc0a".encode("utf_16_le", errors="surrogatepass"), b"\n\xdc")
self.assertEqual(b"\n\xdc".decode("utf_16_le", errors="surrogatepass"), "\udc0a")
# invalid surrogate pair (low, high)
self.assertEqual("\ude51\uda2f".encode("utf_16_le", errors="surrogatepass"), b"Q\xde/\xda")
self.assertEqual(b"Q\xde/\xda".decode("utf_16_le", errors="surrogatepass"), "\ude51\uda2f")
def test_utf_16_be(self):
# lone high surrogate
self.assertEqual("\ud810".encode("utf_16_be", errors="surrogatepass"), b"\xd8\x10")
self.assertEqual(b"\xd8\x10".decode("utf_16_be", errors="surrogatepass"), "\ud810")
#lone low surrogate
self.assertEqual("\udc0a".encode("utf_16_be", errors="surrogatepass"), b"\xdc\n")
self.assertEqual(b"\xdc\n".decode("utf_16_be", errors="surrogatepass"), "\udc0a")
# invalid surrogate pair (low, high)
self.assertEqual("\ude51\uda2f".encode("utf_16_be", errors="surrogatepass"), b"\xdeQ\xda/")
self.assertEqual(b"\xdeQ\xda/".decode("utf_16_be", errors="surrogatepass"), "\ude51\uda2f")
def test_utf_32_le(self):
# lone high surrogate
self.assertEqual("\ud810".encode("utf_32_le", errors="surrogatepass"), b"\x10\xd8\x00\x00")
self.assertEqual(b"\x10\xd8\x00\x00".decode("utf_32_le", errors="surrogatepass"), "\ud810")
#lone low surrogate
self.assertEqual("\udc0a".encode("utf_32_le", errors="surrogatepass"), b"\n\xdc\x00\x00")
self.assertEqual(b"\n\xdc\x00\x00".decode("utf_32_le", errors="surrogatepass"), "\udc0a")
# invalid surrogate pair (low, high)
self.assertEqual("\ude51\uda2f".encode("utf_32_le", errors="surrogatepass"), b"Q\xde\x00\x00/\xda\x00\x00")
self.assertEqual(b"Q\xde\x00\x00/\xda\x00\x00".decode("utf_32_le", errors="surrogatepass"), "\ude51\uda2f")
def test_utf_32_be(self):
# lone high surrogate
self.assertEqual("\ud810".encode("utf_32_be", errors="surrogatepass"), b"\x00\x00\xd8\x10")
self.assertEqual(b"\x00\x00\xd8\x10".decode("utf_32_be", errors="surrogatepass"), "\ud810")
#lone low surrogate
self.assertEqual("\udc0a".encode("utf_32_be", errors="surrogatepass"), b"\x00\x00\xdc\n")
self.assertEqual(b"\x00\x00\xdc\n".decode("utf_32_be", errors="surrogatepass"), "\udc0a")
# invalid surrogate pair (low, high)
self.assertEqual("\ude51\uda2f".encode("utf_32_be", errors="surrogatepass"), b"\x00\x00\xdeQ\x00\x00\xda/")
self.assertEqual(b"\x00\x00\xdeQ\x00\x00\xda/".decode("utf_32_be", errors="surrogatepass"), "\ude51\uda2f")
run_test(__name__)
|
indigo-dc/im
|
test/unit/connectors/OpenNebula.py
|
Python
|
gpl-3.0
| 17,429
| 0.003844
|
#! /usr/bin/env python
#
# IM - Infrastructure Manager
# Copyright (C) 2011 - GRyCAP - Universitat Politecnica de Valencia
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest
sys.path.append(".")
sys.path.append("..")
from .CloudConn import TestCloudConnectorBase
from IM.CloudInfo import CloudInfo
from IM.auth import Authentication
from radl import radl_parse
from IM.VirtualMachine import VirtualMachine
from IM.InfrastructureInfo import InfrastructureInfo
from IM.connectors.OpenNebula import OpenNebulaCloudConnector
from mock import patch, MagicMock, call
class TestONEConnector(TestCloudConnectorBase):
"""
Class to test the IM connectors
"""
@staticmethod
def get_one_cloud():
cloud_info = CloudInfo()
cloud_info.type = "OpenNebula"
cloud_info.server = "server.com"
cloud_info.port = 2633
inf = MagicMock()
inf.id = "1"
one_cloud = OpenNebulaCloudConnector(cloud_info, inf)
return one_cloud
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_05_getONEVersion(self, server_proxy):
one_server = MagicMock()
one_server.system.listMethods.return_value = ["one.system.version"]
one_server.one.system.version.return_value = (True, "5.2.1", "")
server_proxy.return_value = one_server
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
one_cloud.getONEVersion(auth)
def test_10_concrete(self):
radl_data = """
network net ()
system test (
cpu.arch='x86_64' and
cpu.count>=1 and
memory.size>=512m and
net_interface.0.connection = 'net' and
net_interface.0.dns_name = 'test' and
disk.0.os.name = 'linux' and
disk.0.image.url = 'one://server.com/1' and
disk.0.os.credentials.username = 'user' and
disk.0.os.credentials.password = 'pass'
)"""
radl = radl_parse.parse_radl(radl_data)
radl_system = radl.systems[0]
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
concrete = one_cloud.concreteSystem(radl_system, auth)
self.assertEqual(len(concrete), 1)
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
@patch('IM.connectors.OpenNebula.OpenNebulaCloudConnector.getONEVersion')
@patch('IM.InfrastructureList.InfrastructureList.save_data')
def test_20_launch(self, save_data, getONEVersion, server_proxy):
radl_data = """
network net1 (provider_id = 'publica' and outbound = 'yes' and
outports = '8080,9000:9100' and sg_name= 'test')
network net2 ()
system test (
cpu.arch='x86_64' and
cpu.count=1 and
memory.size=512m and
availability_zone='0' and
net_interface.0.connection = 'net1' and
net_interface.0.dns_name = 'test' and
net_interface.1.connection = 'net2' and
instance_tags = 'key=value,key1=value2' and
disk.0.os.name = 'linux' and
disk.0.image.url = 'one://server.com/1' and
disk.0.os.credentials.username = 'user' and
disk.1.size=1GB and
disk.1.device='hdb' and
disk.1.mount_path='/mnt/path'
)"""
radl = radl_parse.parse_radl(radl_data)
radl.check()
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'},
{'type': 'InfrastructureManager', 'username': 'user',
'password': 'pass'}])
one_cloud = self.get_one_cloud()
getONEVersion.return_value = "4.14.0"
one_server = MagicMock()
one_server.one.vm.allocate.return_value = (True, "1", 0)
one_server.one.vnpool.info.return_value = (True, self.read_file_as_string("files/nets.xml"), 0)
one_server.one.secgrouppool.info.return_value = (True, self.read_file_as_string
|
("files/sgs.xml"), 0)
one_server.one.secgroup.allocate.return_value = (True, 1, 0)
server_proxy.return_value = one_server
inf = InfrastructureInfo()
inf.auth = auth
res = one_cloud.launch(inf, radl, radl, 1, auth)
success, _ = res[0]
self.assertTrue(success, msg="ERROR: launching a VM.")
sg_template = ('NAME = test\nRULE = [ PROTOCOL = TCP, RULE_TYP
|
E = inbound, RANGE = 22:22 ]\n'
'RULE = [ PROTOCOL = TCP, RULE_TYPE = inbound, RANGE = 8080:8080 ]\n'
'RULE = [ PROTOCOL = TCP, RULE_TYPE = inbound, RANGE = 9000:9100 ]\n')
self.assertEqual(one_server.one.secgroup.allocate.call_args_list, [call('user:pass', sg_template)])
vm_template = """
NAME = userimage
CPU = 1
VCPU = 1
MEMORY = 512
OS = [ ARCH = "x86_64" ]
DISK = [ IMAGE_ID = "1" ]
DISK = [ SAVE = no, TYPE = fs , FORMAT = ext3, SIZE = 1024, TARGET = hdb ]
SCHED_REQUIREMENTS = "CLUSTER_ID=\\"0\\""\n"""
self.assertIn(vm_template, one_server.one.vm.allocate.call_args_list[0][0][1])
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
# Now test an error in allocate
one_server.one.vm.allocate.return_value = (False, "Error msg", 0)
res = one_cloud.launch(inf, radl, radl, 1, auth)
success, msg = res[0]
self.assertFalse(success)
self.assertEqual(msg, "ERROR: Error msg")
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_30_updateVMInfo(self, server_proxy):
radl_data = """
network net (outbound = 'yes' and provider_id = 'publica')
network net1 (provider_id = 'privada')
system test (
cpu.arch='x86_64' and
cpu.count=1 and
memory.size=512m and
net_interface.0.connection = 'net' and
net_interface.0.dns_name = 'test' and
net_interface.1.connection = 'net1' and
disk.0.os.name = 'linux' and
disk.0.image.url = 'one://server.com/1' and
disk.0.os.credentials.username = 'user' and
disk.0.os.credentials.password = 'pass'
)"""
radl = radl_parse.parse_radl(radl_data)
radl.check()
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
inf = MagicMock()
vm = VirtualMachine(inf, "1", one_cloud.cloud, radl, radl, one_cloud, 1)
one_server = MagicMock()
one_server.one.vm.info.return_value = (True, self.read_file_as_string("files/vm_info.xml"), 0)
server_proxy.return_value = one_server
success, vm = one_cloud.updateVMInfo(vm, auth)
self.assertEquals(vm.info.systems[0].getValue("net_interface.1.ip"), "10.0.0.01")
self.assertEquals(vm.info.systems[0].getValue("net_interface.0.i
|
uw-it-aca/spacescout_web
|
spacescout_web/views/share.py
|
Python
|
apache-2.0
| 8,038
| 0.000498
|
""" Copyright 2012, 2013 UW Information Technology, University of Washington
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, Http404
from spacescout_web.forms.share import ShareForm
from django.conf import settings
from django.utils.http import urlquote
from spacescout_web.spot import Spot, SpotException
from spacescout_web.views.contact imp
|
ort validate_back_link
import oauth2
import socket
import simplejson as json
import logging
logger = logging.getLogger(__name__)
@login_required(login_url='/login')
def share(request, spot_id=None):
if request.method == 'POST':
f
|
orm = ShareForm(request.POST)
try:
back = request.POST['back']
validate_back_link(back)
except:
back = '/'
if form.is_valid():
spot_id = form.cleaned_data['spot_id']
back = form.cleaned_data['back']
sender = form.cleaned_data['sender']
recipient = form.cleaned_data['recipient']
subject = form.cleaned_data['subject']
message = form.cleaned_data['message']
bot_test = form.cleaned_data['email_confirmation']
url = "{0}/api/v1/spot/{1}/share".format(
settings.SS_WEB_SERVER_HOST,
spot_id
)
body = json.dumps({
'to': recipient,
'from': sender,
'comment': message,
'subject': subject
})
headers = {
"X-OAuth-User": "%s" % request.user.username,
'Content-Type': 'application/json',
'Accept': 'application/json'
}
consumer = oauth2.Consumer(
key=settings.SS_WEB_OAUTH_KEY,
secret=settings.SS_WEB_OAUTH_SECRET
)
client = oauth2.Client(consumer)
resp, content = client.request(url,
method='PUT',
body=body,
headers=headers)
if not (resp.status == 200 or resp.status == 201):
logger.error('Share service failure %s: %s' % (
resp.status,
url
))
return HttpResponseRedirect('/share/sorry/')
return HttpResponseRedirect(
'/share/thankyou/?back=' +
urlquote(back)
)
else:
# mask user from silliness
try:
back = request.GET['back']
validate_back_link(back)
except:
back = '/'
if request.user and request.user.is_authenticated():
consumer = oauth2.Consumer(
key=settings.SS_WEB_OAUTH_KEY,
secret=settings.SS_WEB_OAUTH_SECRET
)
client = oauth2.Client(consumer)
url = "{0}/api/v1/user/me".format(settings.SS_WEB_SERVER_HOST)
headers = {
"X-OAuth-User": "%s" % request.user.username,
'Content-Type': 'application/json',
'Accept': 'application/json'
}
resp, content = client.request(url,
method='GET',
headers=headers)
sender = "%s@%s" % (
request.user.username,
getattr(settings, 'SS_MAIL_DOMAIN', 'uw.edu')
)
if resp.status == 200:
me = content = json.loads(content)
if 'email' in me and len(me['email']):
sender = me['email']
else:
sender = ''
form = ShareForm(initial={
'spot_id': spot_id,
'back': back,
'sender': sender,
'subject': 'Check out this space I found on SpaceScout',
})
try:
spot = Spot(spot_id).get()
share_text = [spot["name"], spot["type"]]
if ('extended_info' in spot and
'location_description' in spot['extended_info']):
share_text.append(spot['extended_info']['location_description'])
except SpotException as e:
logger.error('Share failure for spot %s: %s' % (spot_id, e))
return render_to_response('spacescout_web/share-sorry.html', {
'problem': 'Sorry, but the space you wish '
'to share does not exist.',
'back': back,
}, context_instance=RequestContext(request))
share_url = 'http://%s/space/%s/%s' % (getattr(
settings, 'SS_APP_SERVER',
socket.gethostname()),
spot_id, urlquote(spot["name"])
)
return render_to_response('spacescout_web/share-form.html', {
'form': form,
'back': back,
'spot_id': spot_id,
'share_text': share_text,
'share_url': share_url,
'hidden': ["spot_id", "back"],
'is_mobile': (request.MOBILE == 1),
}, context_instance=RequestContext(request))
def thank_you(request, spot_id=None):
share_variables = _share_variables(request, spot_id)
try:
back = request.GET['back']
validate_back_link(back)
except:
back = share_variables['back']
return render_to_response('spacescout_web/share-thankyou.html', {
'spot_id': spot_id,
'back': back,
}, context_instance=RequestContext(request))
def sorry(request, spot_id=None):
share_variables = _share_variables(request, spot_id)
try:
back = request.GET['back']
validate_back_link(back)
except:
back = share_variables['back']
return render_to_response('spacescout_web/share-sorry.html', {
'problem': None,
'back': back
}, context_instance=RequestContext(request))
def _share_variables(request, spot_id):
spot_name = 'Unknown'
spot_description = ''
if spot_id is not None:
try:
spot = Spot(spot_id).get()
except SpotException as ex:
raise Http404
spot_name = spot["name"]
if ('extended_info' in spot and
'location_description' in spot['extended_info']):
spot_description = spot['extended_info']['location_description']
if request.MOBILE == 1:
is_mobile = True
else:
is_mobile = False
if request.GET['back']:
back = request.GET['back']
else:
back = '/'
return {
'spot_name': spot_name,
'spot_description': spot_description,
'is_mobile': is_mobile,
'back': back
}
|
MCGallaspy/kolibri
|
kolibri/core/webpack/utils.py
|
Python
|
mit
| 1,718
| 0.004075
|
"""
This module manages the interface between webpack and Django.
It loads webpack bundle tracker stats files, and catalogues the different files
|
that need to be served in order to inject that frontend code into a Django template.
Originally, it was a monkeypatch of django-webpack-loader - but as our needs are somewhat
different, much of the code has simply been rewritten, and will continue to be done so to better much our use case.
"""
from __future__ import a
|
bsolute_import, print_function, unicode_literals
from django.conf import settings
from django.utils.safestring import mark_safe
def render_as_url(chunk):
"""
This function returns the URL for a particular chunk (JS or CSS file), by
appending the url or public path for the file to the current STATIC_URL set
in settings.
:param chunk: A dictionary with a url or publicPath attribute -
this is generated by Webpack.
:returns: The URL to the file for the client.
"""
static = getattr(settings, 'STATIC_URL')
url = chunk.get('publicPath') or chunk['url']
return "{static}{url}".format(static=static, url=url)
def webpack_asset_render(HookClass, async=False):
"""
This is produces content for a script tag for a WebpackInclusionHook subclass that implement
different render to html methods either sync or async.
:param HookClass: a subclass of WebpackInclusionHook
:param sync: Render sync or async.
:return: HTML of script tags to insert
"""
tags = []
for hook in HookClass().registered_hooks:
tags.append(
hook.render_to_page_load_sync_html() if not async else hook.render_to_page_load_async_html()
)
return mark_safe('\n'.join(tags))
|
UdK-VPT/Open_eQuarter
|
crow_archive/crow_django/ates/models.py
|
Python
|
gpl-2.0
| 33,811
| 0.00627
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
class Buildings(models.Model):
buildingid = models.IntegerField(db_column='buildingID', primary_key=True) # Field name made lowercase.
buildingtoken = models.CharField(db_column='buildingToken', max_length=200, primary_key=True) # Field name made lowercase.
version = models.IntegerField(primary_key=True)
datasource = models.CharField(max_length=200, primary_key=True)
buildingname = models.CharField(db_column='buildingName', max_length=200, blank=True, null=True) # Field name made lowercase.
comment = models.CharField(max_length=2000, blank=True, null=True)
associatedproject = models.CharField(db_column='associatedProject', max_length=200, blank=True, null=True) # Field name made lowercase.
quarter = models.CharField(max_length=200, blank=True, null=True)
street = models.CharField(max_length=200, blank=True, null=True)
housenumber = models.CharField(db_column='houseNumber', max_length=200, blank=True, null=True) # Field name made lowercase.
zipcode = models.IntegerField(db_column='ZIPcode', blank=True, null=True) # Field name made lowercase.
city = models.CharField(max_length=200, blank=True, null=True)
state = models.CharField(max_length=200, blank=True, null=True)
longitude = models.FloatField(blank=True, null=True)
latitude = models.FloatField(blank=True, null=True)
altitude = models.FloatField(blank=True, null=True)
climate = models.CharField(max_length=200, blank=True, null=True)
weatherexposure = models.CharField(db_column='weatherExposure', max_length=200, blank=True, null=True) # Field name made lowercase.
yearofcompletion = models.IntegerField(db_column='yearOfCompletion', blank=True, null=True) # Field name made lowercase.
monumentalprotection = models.CharField(db_column='monumentalProtection', max_length=200, blank=True, null=True) # Field name made lowercase.
buildingtype = models.CharField(db_column='buildingType', max_length=200, blank=True, null=True) # Field name made lowercase.
architecture = models.CharField(max_length=200, blank=True, null=True)
cubatur = models.CharField(max_length=200, blank=True, null=True)
owner = models.CharField(max_length=200, blank=True, null=True)
user = models.CharField(max_length=200, blank=True, null=True)
utilization = models.CharField(max_length=200, blank=True, null=True)
structuralstate = models.CharField(db_column='structuralState', max_length=200, blank=True, null=True) # Field name made lowercase.
technicalstate = models.CharField(db_column='technicalState', max_length=200, blank=True, null=True) # Field name made lowercase.
developmentpotential = models.CharField(db_column='developmentPotential', max_length=200, blank=True, null=True) # Field name made lowercase.
buildingcategorybwz = models.IntegerField(db_column='buildingCategoryBWZ', blank=True, null=True) # Field name made lowercase.
buildingageclass = models.CharField(db_column='buildingAgeClass', max_length=8, blank=True, null=True) # Field name made lowercase.
usablefloorspacenf = models.FloatField(db_column='usableFloorSpaceNF', blank=True, null=True) # Field name made lowercase.
heatedfloorspacengf = models.FloatField(db_column='heatedFloorSpaceNGF', blank=True, null=True) # Field name made lowercase.
grossfloorareabgf = models.FloatField(db_column='grossFloorAreaBGF', blank=True, null=True) # Field name made lowercase.
buildinggroundarea = models.FloatField(db_column='buildingGroundArea', blank=True, null=True) # Field name made lowercase.
avgbuildingheight = models.FloatField(db_column='avgBuildingHeight', blank=True, null=True) # Field name made lowercase.
numberoffloors = models.IntegerField(db_column='numberOfFloors', blank=True, null=True) # Field name made lowercase.
enclosedvolume = models.FloatField(db_column='enclosedVolume', blank=True, null=True) # Field name made lowercase.
wall1area = models.FloatField(db_column='wall1Area', blank=True, null=True) # Field name made lowercase.
wall1azimuth = models.FloatField(db_column='wall1Azimuth', blank=True, null=True) # Field name made lowercase.
wal
|
l2area = models.FloatField(db_column='wall2Area', blank=True, null=True) # Field name made lowercase.
wall2azimuth = models.FloatField(db_column='wall2Azimuth', blank=True, null=True) # Field name made lowercase
|
.
wall3area = models.FloatField(db_column='wall3Area', blank=True, null=True) # Field name made lowercase.
wall3azimuth = models.FloatField(db_column='wall3Azimuth', blank=True, null=True) # Field name made lowercase.
wall4area = models.FloatField(db_column='wall4Area', blank=True, null=True) # Field name made lowercase.
wall4azimuth = models.FloatField(db_column='wall4Azimuth', blank=True, null=True) # Field name made lowercase.
envelopingsurface = models.FloatField(db_column='envelopingSurface', blank=True, null=True) # Field name made lowercase.
windowarea = models.FloatField(db_column='windowArea', blank=True, null=True) # Field name made lowercase.
windowwallratio = models.FloatField(db_column='windowWallRatio', blank=True, null=True) # Field name made lowercase.
avgthicknessouterwall = models.FloatField(db_column='avgThicknessOuterWall', blank=True, null=True) # Field name made lowercase.
avgthicknessinnerwall = models.FloatField(db_column='avgThicknessInnerWall', blank=True, null=True) # Field name made lowercase.
avgthicknessinsulation = models.FloatField(db_column='avgThicknessInsulation', blank=True, null=True) # Field name made lowercase.
materialouterwall = models.CharField(db_column='materialOuterWall', max_length=200, blank=True, null=True) # Field name made lowercase.
materialinnerwall = models.CharField(db_column='materialInnerWall', max_length=200, blank=True, null=True) # Field name made lowercase.
materialinsulation = models.CharField(db_column='materialInsulation', max_length=200, blank=True, null=True) # Field name made lowercase.
materialwindowframe = models.CharField(db_column='materialWindowFrame', max_length=200, blank=True, null=True) # Field name made lowercase.
materialwindowglazing = models.CharField(db_column='materialWindowGlazing', max_length=200, blank=True, null=True) # Field name made lowercase.
basematerial = models.CharField(db_column='baseMaterial', max_length=200, blank=True, null=True) # Field name made lowercase.
avguvalue = models.FloatField(db_column='avgUvalue', blank=True, null=True) # Field name made lowercase.
cellar = models.CharField(max_length=2, blank=True, null=True)
externalshading = models.CharField(db_column='externalShading', max_length=200, blank=True, null=True) # Field name made lowercase.
heating = models.CharField(max_length=2, blank=True, null=True)
cooling = models.CharField(max_length=2, blank=True, null=True)
centralizedcoolingsystem = models.CharField(db_column='centralizedCoolingSystem', max_length=2, blank=True, null=True) # Field name made lowercase.
ahu = models.CharField(db_column='AHU', max_length=2, blank=True, null=True) # Field name made lowercase.
absorptionchiller = models.CharField(db_column='absorptionChiller', max_length=2, blank=True, null=True) # Field name made lowercase.
computercenter = models.CharField(db_column='computerCenter', max_length=2, blank=True, null=True) # Field name made lowercase.
laboratory = models.CharField(max_length=2, blank=True, null=True)
serverroomwiringcenter = models.CharField(db_column='serverRoomWiringCenter', max_le
|
guohongze/adminset
|
elfinder/utils/archivers.py
|
Python
|
gpl-2.0
| 1,102
| 0.006352
|
from zipfile import ZipFile
class ZipFileArchiver(object):
"""
An archiver used to generate .zip files.
This wraps Python's built in :class:`zipfile.ZipFile`
methods to operate exactly like :class:`tarfile.TarFile` does.
"""
def __init__(self, *args, **kwargs):
"""
Create a :class:`.ZipFileArchiver` instance. We create a new
:class:`zipfile.ZipFile`
|
and store it to the ``zipfile`` member.
"""
self.zipfile = ZipFile(*args, **kwargs)
@classmethod
def open(self, *args, **kwargs):
"""
Open the archive. This must be a classmethod.
"""
return ZipFileArchiver(*args,**kwargs)
|
def add(self, *args, **kwargs):
"""
Add file to the archive.
"""
self.zipfile.write(*args, **kwargs)
def extractall(self, *args, **kwargs):
"""
Extract all files from the archive.
"""
self.zipfile.extractall(*args, **kwargs)
def close(self):
"""
Close the archive.
"""
self.zipfile.close()
|
benfinke/ns_python
|
nssrc/com/citrix/netscaler/nitro/resource/config/authentication/authenticationradiuspolicy_authenticationvserver_binding.py
|
Python
|
apache-2.0
| 5,569
| 0.035374
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expr
|
ess or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.
|
netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class authenticationradiuspolicy_authenticationvserver_binding(base_resource) :
""" Binding class showing the authenticationvserver that can be bound to authenticationradiuspolicy.
"""
def __init__(self) :
self._boundto = ""
self._priority = 0
self._activepolicy = 0
self._name = ""
self.___count = 0
@property
def name(self) :
ur"""Name of the RADIUS authentication policy.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""Name of the RADIUS authentication policy.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def boundto(self) :
ur"""The entity name to which policy is bound.
"""
try :
return self._boundto
except Exception as e:
raise e
@boundto.setter
def boundto(self, boundto) :
ur"""The entity name to which policy is bound.
"""
try :
self._boundto = boundto
except Exception as e:
raise e
@property
def priority(self) :
try :
return self._priority
except Exception as e:
raise e
@property
def activepolicy(self) :
try :
return self._activepolicy
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(authenticationradiuspolicy_authenticationvserver_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.authenticationradiuspolicy_authenticationvserver_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
ur""" Use this API to fetch authenticationradiuspolicy_authenticationvserver_binding resources.
"""
try :
obj = authenticationradiuspolicy_authenticationvserver_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
ur""" Use this API to fetch filtered set of authenticationradiuspolicy_authenticationvserver_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = authenticationradiuspolicy_authenticationvserver_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
ur""" Use this API to count authenticationradiuspolicy_authenticationvserver_binding resources configued on NetScaler.
"""
try :
obj = authenticationradiuspolicy_authenticationvserver_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
ur""" Use this API to count the filtered set of authenticationradiuspolicy_authenticationvserver_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = authenticationradiuspolicy_authenticationvserver_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class authenticationradiuspolicy_authenticationvserver_binding_response(base_response) :
def __init__(self, length=1) :
self.authenticationradiuspolicy_authenticationvserver_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.authenticationradiuspolicy_authenticationvserver_binding = [authenticationradiuspolicy_authenticationvserver_binding() for _ in range(length)]
|
ivandgreat/netmiko
|
tests/test_utils.py
|
Python
|
mit
| 567
| 0.001764
|
#!/usr/bin/env python
"""
Implement common functions for tests
"""
from __future__ import print_function
from __future__ import unicode_literals
import io
import sys
def parse_yaml(yaml_file):
"""
Parses a yaml file, returning its contents as a dict.
"""
try:
import yaml
except ImportError:
sys.exit("Unable to import yaml module.")
try:
with i
|
o.open(
|
yaml_file, encoding='utf-8') as fname:
return yaml.load(fname)
except IOError:
sys.exit("Unable to open YAML file: {0}".format(yaml_file))
|
Ophiuchus1312/enigma2-master
|
lib/python/Screens/ParentalControlSetup.py
|
Python
|
gpl-2.0
| 14,944
| 0.02844
|
from Screens.Screen import Screen
from Components.ConfigList import ConfigListScreen
from Components.ActionMap import NumberActionMap
from Components.config import config, getConfigListEntry, ConfigNothing, NoSave, ConfigPIN
from Components.ParentalControlList import ParentalControlEntryComponent, ParentalControlList
from Components.Sources.StaticText import StaticText
from Screens.ChoiceBox import ChoiceBox
from Screens.MessageBox import MessageBox
from Screens.InputBox import PinInput
from Screens.ChannelSelection import service_types_tv
from Tools.BoundFunction import boundFunction
from enigma import eServiceCenter, eTimer, eServiceReference
from operator import itemgetter
class ProtectedScreen:
def __init__(self):
if self.isProtected():
self.onFirstExecBegin.append(boundFunction(self.session.openWithCallback, self.pinEntered, PinInput, pinList = [self.protectedWithPin()], triesEntry = self.getTriesEntry(), title = self.getPinText(), windowTitle = _("Enter pin code")))
def getTriesEntry(self):
return config.ParentalControl.retries.setuppin
def getPinText(self):
return _("Please enter the correct pin code")
def isProtected(self):
return True
def protectedWithPin(self):
return config.ParentalControl.setuppin.getValue()
def pinEntered(self, result):
if result is None:
self.close()
elif not result:
self.session.openWithCallback(self.close, MessageBox, _("The pin code you entered is wrong."), MessageBox.TYPE_ERROR)
class ParentalControlSetup(Screen, ConfigListScreen, ProtectedScreen):
def __init__(self, session):
Screen.__init__(self, session)
ProtectedScreen.__init__(self)
# for the skin: first try ParentalControlSetup, then Setup, this allows individual skinning
self.skinName = ["ParentalControlSetup", "Setup" ]
self.setup_title = _("Parental control setup")
self.onChangedEntry = [ ]
self.list = []
ConfigListScreen.__init__(self, self.list, session = self.session, on_change = self.changedEntry)
self.createSetup()
self["actions"] = NumberActionMap(["SetupActions", "MenuActions"],
{
"cancel": self.keyCancel,
"save": self.keyCancel,
"menu": self.closeRecursive,
}, -2)
self["key_red"] = StaticText(_("Cancel"))
self["key_
|
green"] = StaticText(_("OK"))
self.onLayoutFinish.append(sel
|
f.layoutFinished)
def layoutFinished(self):
self.setTitle(self.setup_title)
def isProtected(self):
return config.ParentalControl.setuppinactive.getValue() and config.ParentalControl.configured.getValue()
def createSetup(self):
self.editListEntry = None
self.changePin = None
self.changeSetupPin = None
self.list = []
self.list.append(getConfigListEntry(_("Enable parental control"), config.ParentalControl.configured))
print "config.ParentalControl.configured.getValue()", config.ParentalControl.configured.getValue()
self.editBouquetListEntry = -1
self.reloadLists = -1
if config.ParentalControl.configured.getValue():
#self.list.append(getConfigListEntry(_("Configuration mode"), config.ParentalControl.mode))
self.list.append(getConfigListEntry(_("Protect setup"), config.ParentalControl.setuppinactive))
if config.ParentalControl.setuppinactive.getValue():
self.changeSetupPin = getConfigListEntry(_("Change setup PIN"), NoSave(ConfigNothing()))
self.list.append(self.changeSetupPin)
self.list.append(getConfigListEntry(_("Protect services"), config.ParentalControl.servicepinactive))
if config.ParentalControl.servicepinactive.getValue():
self.list.append(getConfigListEntry(_("Parental control type"), config.ParentalControl.type))
if config.ParentalControl.mode.getValue() == "complex":
self.changePin = getConfigListEntry(_("Change service PINs"), NoSave(ConfigNothing()))
self.list.append(self.changePin)
elif config.ParentalControl.mode.getValue() == "simple":
self.changePin = getConfigListEntry(_("Change service PIN"), NoSave(ConfigNothing()))
self.list.append(self.changePin)
#Added Option to remember the service pin
self.list.append(getConfigListEntry(_("Remember service PIN"), config.ParentalControl.storeservicepin))
self.editListEntry = getConfigListEntry(_("Edit services list"), NoSave(ConfigNothing()))
self.list.append(self.editListEntry)
#New funtion: Possibility to add Bouquets to whitelist / blacklist
self.editBouquetListEntry = getConfigListEntry(_("Edit bouquets list"), NoSave(ConfigNothing()))
self.list.append(self.editBouquetListEntry)
#New option to reload service lists (for example if bouquets have changed)
self.reloadLists = getConfigListEntry(_("Reload black-/white lists"), NoSave(ConfigNothing()))
self.list.append(self.reloadLists)
self["config"].list = self.list
self["config"].setList(self.list)
def keyOK(self):
print "self[\"config\"].l.getCurrentSelection()", self["config"].l.getCurrentSelection()
if self["config"].l.getCurrentSelection() == self.editListEntry:
self.session.open(ParentalControlEditor)
elif self["config"].l.getCurrentSelection() == self.editBouquetListEntry:
self.session.open(ParentalControlBouquetEditor)
elif self["config"].l.getCurrentSelection() == self.changePin:
if config.ParentalControl.mode.getValue() == "complex":
pass
else:
self.session.open(ParentalControlChangePin, config.ParentalControl.servicepin[0], _("service PIN"))
elif self["config"].l.getCurrentSelection() == self.changeSetupPin:
self.session.open(ParentalControlChangePin, config.ParentalControl.setuppin, _("setup PIN"))
elif self["config"].l.getCurrentSelection() == self.reloadLists:
from Components.ParentalControl import parentalControl
parentalControl.open()
else:
ConfigListScreen.keyRight(self)
print "current selection:", self["config"].l.getCurrentSelection()
self.createSetup()
def keyLeft(self):
ConfigListScreen.keyLeft(self)
print "current selection:", self["config"].l.getCurrentSelection()
self.createSetup()
def keyRight(self):
ConfigListScreen.keyRight(self)
print "current selection:", self["config"].l.getCurrentSelection()
self.createSetup()
def SetupPinMessageCallback(self, value):
if value:
self.session.openWithCallback(self.cancelCB, ParentalControlChangePin, config.ParentalControl.setuppin, _("setup PIN"))
else:
config.ParentalControl.setuppinactive.setValue(False)
self.keyCancel()
def ServicePinMessageCallback(self, value):
if value:
self.session.openWithCallback(self.cancelCB, ParentalControlChangePin, config.ParentalControl.servicepin[0], _("service PIN"))
else:
config.ParentalControl.servicepinactive.setValue(False)
self.keyCancel()
def cancelCB(self,value):
self.keyCancel()
def keyCancel(self):
if config.ParentalControl.setuppinactive.getValue() and config.ParentalControl.setuppin.getValue() == 'aaaa':
self.session.openWithCallback(self.SetupPinMessageCallback, MessageBox, _("No valid setup PIN found!\nDo you like to change the setup PIN now?\nWhen you say 'No' here the setup protection stay disabled!"), MessageBox.TYPE_YESNO)
elif config.ParentalControl.servicepinactive.getValue() and config.ParentalControl.servicepin[0].getValue() == 'aaaa':
self.session.openWithCallback(self.ServicePinMessageCallback, MessageBox, _("No valid service PIN found!\nDo you like to change the service PIN now?\nWhen you say 'No' here the service protection stay disabled!"), MessageBox.TYPE_YESNO)
else:
for x in self["config"].list:
x[1].save()
self.close()
def keyNumberGlobal(self, number):
pass
# for summary:
def changedEntry(self):
for x in self.onChangedEntry:
x()
def getCurrentEntry(self):
return self["config"].getCurrent()[0]
def getCurrentValue(self):
return str(self["config"].getCurrent()[1].getText())
def createSummary(self):
from Screens.Setup import SetupSummary
return SetupSummary
SPECIAL_CHAR = 96
class ParentalControlEditor(Screen):
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("Parental control editor"))
self.list = []
self.servicelist = ParentalControlList(self.list)
self["servicelist"] = self.servicelist
#self.onShown.append(self.chooseLette
|
FNST-OpenStack/cloudkitty-dashboard
|
cloudkittydashboard/dashboards/project/billing_overview/views.py
|
Python
|
apache-2.0
| 14,849
| 0.027079
|
# coding=utf-8
import json
from django.utils.translation import ugettext_lazy as _
from django.http import HttpResponse
import django.views
from django.template import defaultfilters as template_filters
from horizon import tables
from horizon import exceptions
from cloudkittydashboard.api import cloudkitty as api
from openstack_dashboard.api import keystone
from cloudkittydashboard.dashboards.project.billing_overview import tables as project_tables
import time
from datetime import date, timedelta, datetime
import calendar
from django.http import JsonResponse,HttpResponse
import json
import xlsxwriter
import StringIO
import logging
LOG = logging.getLogger(__name__)
def detail(request, org_id):
if org_id == None:
org_id = get_tenant_id(request)
try:
details = api.cloudkittyclient(request).billings.list_services_cost(get_month(request), org_id)
except Exception:
details = []
exceptions.handle(request, _('Unable to retrieve billing list.'))
return HttpResponse(json.dumps(details),content_type="application/json")
class IndexView(tables.DataTableView):
# A very simple class-based view...
template_name = "project/billing_overview/index.html"
table_class = project_tables.BillingOverviewTable
page_title = _("Billing Overview")
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context["tenant_id"] = get_tenant_id(self.request)
context["selected_month"] = get_month(self.request)
context["organizations"] = get_tenant_list(self.request)
year = time.strftime("%Y",time.localtime())
month = time.strftime("%m",time.localtime())
if int(month) == 1:
last_month = 12
last_year = int(year) - 1
else:
last_month = int(month) - 1
last_year = year
try:
context["year_begin"] = str((int(year)-1)) + "/" + str((int(month)))
context["year_end"] = str(last_year) + "/" + str(last_month)
# get last 12 months total cost
total_year = api.cloudkittyclient(self.request).billings.get_consumer_trends("month",
12,
get_tenant_id(self.request))
year_sum = 0
for billing_month in total_year["consumerTrends"]:
year_sum += billing_month["cost"]
context["billing_year"] = year_sum
#get current month cost
context["time_current_month"] = year+"/"+month
services_rate_list = api.cloudkittyclient(self.request).billings.list_services_cost(year+"-"+month,
get_tenant_id(self.request))
current_sum = 0
for rate in services_rate_list["servicesRate"]:
current_sum += rate["rate"]
context["billing_current_month"] = current_sum
#get last month cost
context["time_last_month"] = str(last_year)+"/"+str(last_month)
context["billing_last_month"] = api.cloudkittyclient(self.request).billings.get_consumer_trends("month",
1,
get_tenant_id(self.request))["consumerTrends"][0]["cost"]
except Exception:
exceptions.handle(self.request,_("Unable to retrieve month cost"))
today = date.today()
context["last_12_months"] = last_12_months()
return context;
def get_data(self):
try:
billings = api.cloudkittyclient(self.request).billings.get_total_cost(get_month(self.request), get_tenant_id(self.request))["totals"]
except Exception:
billings = []
exceptions.handle(self.request, _('Unable to retrieve billing list.'))
return billings
class ReportView(dj
|
ango.views.generic.TemplateView):
def get(self,request,*args,**kwargs):
tenant_id = get_tenant_id(
|
self.request)
billing_month = get_month(self.request)
tenants = get_tenant_list(self.request)
for tenant in tenants:
if tenant.id == tenant_id:
tenant_name = tenant.name
break
reports = api.cloudkittyclient(self.request).billings.list_month_report(tenant_id,billing_month)
output = StringIO.StringIO()
workbook = xlsxwriter.Workbook(output)
month_sheet = workbook.add_worksheet(tenant_name)
#设置列宽度
month_sheet.set_column('A:Z',9)
#表头
head = (u'部门',u'资源',
u'1月',u'2月',u'3月', u'1Q合计',
u'4月',u'5月',u'6月', u'2Q合计', u'上半年计',
u'7月',u'8月',u'9月', u'3Q合计',
u'10月',u'11月',u'12月',u'4Q合计',u'下半年计',u'全年合计'
)
# 设置表头字符串和格式
head_format = workbook.add_format({
'bold':True,
'font_size':20,
'font_name':'Microsoft YaHei'
})
row = 1
col = 0
head_str = billing_month.split('-')[0] + u'年度月别计费一览表'
head_str1 = u'资源及使用费用情况'
month_sheet.write(row,col,head_str,head_format)
row += 1
month_sheet.write(row,col,u'如需查看季、年度合计,请在月份对应位置取消隐藏')
row += 2
month_sheet.write(row,col,head_str1,head_format)
explain_format = workbook.add_format({'align':'right'})
year_month = billing_month.split('-')
if billing_month == template_filters.date(date.today(), "Y-m"):
tab_date = u'制表日期:%d月%d日' %(int(year_month[1]),date.today().day-1)
else:
tab_date = u'制表日期:%d月%d日' %(int(year_month[1]),calendar.monthrange(int(year_month[0]),int(year_month[1]))[1])
month_sheet.write(row,len(head)-1,u'单位:元 ' + tab_date, explain_format)
row += 1
col = 0
head2_format = workbook.add_format({
'bold':True,
'align':'center',
'valign':'vcenter',
'bg_color':'#D8E4BC',
'left':1,
'font_name':'Microsoft YaHei'
})
#设置行高
month_sheet.set_row(row,30)
for index_str in head:
month_sheet.write(row,col,index_str,head2_format)
col += 1
row += 1
month_sheet.set_column('A:A',15)
#资源和合计所占行数
names = ['Compute','Volume',u'合计']
even_format = workbook.add_format({
'border':1,
'font_name':'Microsoft YaHei',
'num_format': '#,##0.00'
})
odd_format=workbook.add_format({
'border':1,
'font_name':'Microsoft YaHei',
'bg_color':'#D9D9D9',
'num_format': '#,##0.00'
})
resource_total_rows = 3
# 处理每个部门
merge_format = workbook.add_format({
'bold':True,
'font_name':'Microsoft YaHei',
'font_size':14,
'align':'center',
'valign':'vcenter',
'border':1
})
for depart in reports['departs']:
col = 1
for index,name in enumerate(names):
if index % 2 != 0:
month_sheet.set_row(row+index,None,odd_format)
else:
month_sheet.set_row(row+index,None,even_format)
month_sheet.write(row+index,col,name)
month_sheet.merge_range(row,0,row+resource_total_rows-1,0,depart['tenant_name'],merge_format)
tmp_row = row
write_col = col + 1
for month_report in depart['month_reports']:
for res_tpye in month_report['res_types']:
if res_tpye['res_type'] == "compute":
write_row = tmp_row
elif res_tpye['res_type'] == "volume":
write_row = tmp_row + 1
month_sheet.write(write_row,write_col,res_tpye['rate'])
write_col += 1
month = int(month_report["month"].split('-')[1])
if month == 3:
for index in range(resource_total_rows-1):
index_row = tmp_
|
fomars/yandex-tank
|
setup.py
|
Python
|
lgpl-2.1
| 3,327
| 0.000902
|
from setuptools import setup, find_packages
setup(
name='yandextank',
version='1.12.7',
description='a performance measurement tool',
longer_description='''
Yandex.Tank is a performance measurement and load testing automatization tool.
It uses other load generators such as JMeter, ab or phantom inside of it for
load generation and provides a common configuration system for them and
analytic tools for the results they produce.
''',
maintainer='Yandex Load Team',
maintainer_email='load@yandex-team.ru',
url='http://yandex.github.io/yandex-tank/',
namespace_packages=["yandextank", "yandextank.plugins"],
packages=find_packages(exclude=["tests", "tmp", "docs", "data"]),
install_requires=[
'cryptography>=2.2.1', 'pyopenssl==18.0.0',
'psutil>=1.2.1', 'requests>=2.5.1', 'paramiko>=1.16.0',
'pandas==0.24.2', 'numpy==1.15.4', 'future>=0.16.0',
'pip>=8.1.2',
'pyyaml>=4.2b1', 'cerberus==1.3.1', 'influxdb>=5.0.0', 'netort>=0.7.6',
'retrying>=1.3.3', 'pytest-runner', 'typing'
],
setup_requires=[
],
tests_require=[
'pytest==4.6.3', 'flake8', 'pytest-benchmark'
],
license='LGPLv2',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)',
'Operating System :: POSIX',
'Topic :: Software Development :: Quality Assurance',
'Topic :: S
|
oftware Development :: Testing',
'Topic :: Software Development :: Testing :: Traffic Generation',
'Programming Language :: Python :: 2',
],
entry_points={
'console_scripts': [
'yandex-tank = yandextank
|
.core.cli:main',
'yandex-tank-check-ssh = yandextank.common.util:check_ssh_connection',
'tank-postloader = yandextank.plugins.DataUploader.cli:post_loader',
'tank-docs-gen = yandextank.validator.docs_gen:main'
],
},
package_data={
'yandextank.api': ['config/*'],
'yandextank.core': ['config/*'],
'yandextank.aggregator': ['config/*'],
'yandextank.plugins.Android': ['binary/*', 'config/*'],
'yandextank.plugins.Autostop': ['config/*'],
'yandextank.plugins.Bfg': ['config/*'],
'yandextank.plugins.Console': ['config/*'],
'yandextank.plugins.DataUploader': ['config/*'],
'yandextank.plugins.InfluxUploader': ['config/*'],
'yandextank.plugins.OpenTSDBUploader': ['config/*'],
'yandextank.plugins.JMeter': ['config/*'],
'yandextank.plugins.JsonReport': ['config/*'],
'yandextank.plugins.Pandora': ['config/*'],
'yandextank.plugins.Phantom': ['config/*'],
'yandextank.plugins.RCAssert': ['config/*'],
'yandextank.plugins.ResourceCheck': ['config/*'],
'yandextank.plugins.ShellExec': ['config/*'],
'yandextank.plugins.ShootExec': ['config/*'],
'yandextank.plugins.Telegraf': ['config/*'],
'yandextank.plugins.NeUploader': ['config/*']
},
use_2to3=False, )
|
Crowdlink/lever
|
lever/tests/unit_tests.py
|
Python
|
bsd-2-clause
| 14,490
| 0.000897
|
import unittest
import types
import datetime
from flask import Flask
from pprint import pprint
from sqlalchemy import (Column, create_engine, DateTime, Date, Float,
ForeignKey, Integer, Boolean, Unicode, create_engine)
from lever import API, preprocess, postprocess, ModelBasedACL, ImpersonateMixin
from lever.tests.model_helpers import FlaskTestBase, TestUserACL
class ProcessTests(unittest.TestCase):
""" Ensures our metaclasses and decorators operate as we want for assigning
preprocessors and postprocessors """
def test_basic_preprocess(self):
class APIAwesome(API):
@preprocess(method='post')
def preprocess_those(self):
pass
@preprocess(action='something')
def preprocess_that(self):
pass
assert isinstance(APIAwesome._pre_method['post'][0],
types.FunctionType)
assert isinstance(APIAwesome._pre_action['something'][0],
types.FunctionType)
def test_inheritence_mixins(self):
class APIParent(object):
@preprocess(method='post')
def preprocess_those(self):
pass
class APIAwesome(API, APIParent):
pass
assert isinstance(APIAwesome._pre_method['post'][0],
types.FunctionType)
def test_inheritence(self):
class APIParent(API):
@preprocess(method='post')
def preprocess_those(self):
pass
class APIAwesome(APIParent):
pass
assert isinstance(APIAwesome._pre_method['post'][0],
types.FunctionType)
def test_inheritence_reversal(self):
class APIParent(API):
pass
class APIAwesome(APIParent):
@preprocess(method='post')
def preprocess_those(self):
pass
assert isinstance(APIAwesome._pre_method['post'][0],
types.FunctionType)
def test_multi_preprocess(self):
class APIAwesome(API):
@preprocess(method=['post', 'get'])
def preprocess_those(self):
pass
@preprocess(action=['create', 'other'])
def preprocess_that(self):
pass
assert isinstance(APIAwesome._pre_method['post'][0],
types.FunctionType)
assert isinstance(APIAwesome._pre_method['get'][0],
types.FunctionType)
assert isinstance(APIAwesome._pre_action['other'][0],
types.FunctionType)
assert isinstance(APIAwesome._pre_action['create'][0],
types.FunctionType)
def test_basic_postprocess(self):
class APIAwesome(API):
@postprocess(method='post')
def preprocess_those(self):
pass
@postprocess(action='something')
def preprocess_that(self):
pass
assert isinstance(APIAwesome._post_method['post'][0],
types.FunctionType)
assert isinstance(APIAwesome._post_action['something'][0],
types.FunctionType)
def test_multi_postprocess(self):
class APIAwesome(API):
@postprocess(method=['post', 'get'])
def preprocess_those(self):
pass
@postprocess(action=['create', 'other'])
def preprocess_that(self):
pass
assert isinstance(APIAwesome._post_method['post'][0],
types.FunctionType)
assert isinstance(APIAwesome._post_method['get'][0],
types.FunctionType)
assert isinstance(APIAwesome._post_action['other'][0],
types.FunctionType)
assert isinstance(APIAwesome._post_action['create'][0],
types.FunctionType)
def test_preprocess_priority(self):
class APIAwesome(API):
@postprocess(method='post', pri=0)
def preprocess_those(self):
|
pass
@postprocess(method='post')
def preprocess_that(self):
pass
self.assertEqual(
APIAwesome._post_method['post'][0].__name__, 'preprocess_those')
def test_none(self):
class APIAwesome(API):
pass
assert APIAwesome._pre_method == {}
assert APIAwesome._pre_action == {}
class TestProcessorUsage(FlaskTestBase):
""" These tests ensure that preprocessors and postprocessors are getting
called when they should be """
def test_methods_preprocess(self):
for meth in ['post', 'get', 'delete', 'put']:
class APIAwesome(API):
@preprocess(method=meth)
def preprocessor_one(self):
raise SyntaxError # pick an obscure one to catch..
inst = APIAwesome()
self.assertRaises(SyntaxError, getattr(inst, meth))
def test_methods_postprocess(self):
obj = self.provision_single_asset()
data = [('post', {'name': 'test'}),
('get', {}),
('put', {'id': obj.id, 'name': 'test2'}),
('delete', {'id': obj.id})]
for meth, vals in data:
class APIAwesome(self.widget_api):
@postprocess(method=meth)
def postprocess_one(self, retval):
raise SyntaxError # pick an obscure one to catch..
self.app.add_url_rule('/' + meth, view_func=APIAwesome.as_view(meth))
for meth, vals in data:
self.assertRaises(SyntaxError, getattr(self, meth), meth, 500, params=vals)
class TestAPICreation(FlaskTestBase):
def test_create_bad_pkey(self):
""" ensure that exception is thrown for invalid primary_key """
class Testing(self.base):
__tablename__ = "testing_table"
bad_id = Column(Integer, primary_key=True)
class UserAPI(API):
model = Testing
session = self.session
t = UserAPI()
self.assertRaises(AttributeError, lambda: t.pkey)
class TestGet(FlaskTestBase):
""" Test facets of our get method """
def test_get_pkey(self):
obj = self.provision_single_asset()
d = self.get('widget', 200, {'id': obj.id})
assert len(d['objects']) > 0
assert d['objects'][0]['id'] == obj.id
def test_many_query(self):
self.provision_many_asset()
d = self.get('widget', 200)
assert len(d['objects']) >= 4
class TestPut(FlaskTestBase):
""" Test facets of our get method """
def test_update(self):
""" can we change an object """
obj = self.provision_single_asset()
test_string = "testing this thing"
p = {'id': obj.id, 'description': test_string}
self.put('widget', 200, params=p)
self.session.refresh(obj)
assert obj.description == test_string
def test_cant_find(self):
self.basic_api()
self.base.metadata.create_all(self.engine)
ret = self.put('widget', 404, params={'id': 123})
assert 'not be found' in ret['message']
def test_cant_find_invalid_key(self):
self.basic_api()
self.base.metadata.create_all(self.engine)
ret = self.put('widget', 404, params={'tid': 123})
assert 'any object to update' in ret['message']
class TestDelete(FlaskTestBase):
def test_delete(self):
""" can we delete an object """
obj = self.provision_single_asset()
obj_id = obj.id
self.delete('widget', 200, params={'id': obj_id})
obj = self.session.query(self.widget_model).filter_by(id=obj_id).first()
assert obj is None
def test_cant_find_put_delete(self):
self.basic_api()
self.base.metadata.create_all(self.engine)
ret = self.delete('widget', 404, params={'id': 123})
assert 'Object could not be found' in ret['message']
def test_cant_find(self):
self.basic_api()
self.base.metadata.c
|
|
thefab/tornadis
|
tests/test_pubsub.py
|
Python
|
mit
| 3,713
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import tornado.testing
import tornado.ioloop
import tornado.gen
from tornadis.pubsub import PubSubClient
from tornadis.client import Client
from support import test_redis_or_raise_skiptest, mock
class PubSubClientTestCase(tornado.testing.AsyncTestCase):
def setUp(self):
test_redis_or_raise_skiptest()
super(PubSubClientTestCase, self).setUp()
def get_new_ioloop(self):
return tornado.ioloop.IOLoop.instance()
@tornado.gen.coroutine
def publish(self, c2):
yield tornado.gen.sleep(1)
yield c2.call("PUBLISH", "null", "value0")
yield c2.call("PUBLISH", "foo1", "value1")
yield c2.call("PUBLISH", "foo2", "value2")
yield c2.call("PUBLISH", "bar111", "value3")
yield c2.call("PUBLISH", "bar222", "value4")
@tornado.testing.gen_test
def test_pubsub(self):
c = PubSubClient()
c2 = Client()
yield c.connect()
yield c2.connect()
try:
yield c.pubsub_pop_message()
raise Exception("exception not raised")
except Exception:
pass
res = yield c.pubsub_subscribe("foo1", "foo2")
self.assertTrue(res)
self.assertTrue(c.subscribed)
self.assertFalse(c2.subscribed)
try:
yield c.call("PING")
raise Exception("exception not raised")
except Exception:
pass
res = yield c.pubsub_psubscribe("bar1*", "bar2*")
self.assertTrue(res)
tornado.ioloop.IOLoop.instance().add_future(self.publish(c2), None)
msg = yield c.pubsub_pop_message()
self.assertEqual(msg[2], b"value1")
msg = yield c.pubsub_pop_message()
self.assertEqual(msg[2], b"value2")
msg = yield c.pubsub_pop_message()
self.assertEqual(msg[3], b"value3")
msg = yield c.pubsub_pop_message()
self.assertEqual(msg[3], b"value4")
msg = yield c.pubsub_pop_message(deadline=1)
self.assertEqual(msg, None)
yield c.pubsub_unsubscribe("foo1")
yield c2.call("PUBLISH", "foo1", "value1")
c2.disconnect()
msg = yield c.pubsub_pop_message(deadline=1)
self.assertEqual(msg, None)
yield c.pubsub_unsubscribe("foo2")
yield c.pubsub_unsubscribe("foobar")
yield c.pubsub_punsubscribe("foobar*")
yield c.pubsub_punsubscribe("bar1*")
yield c.pubsub_punsubscribe("bar2*")
self.assertFalse(c.subscribed)
c.disconnect()
@tornado.testing.gen_test
def test_issue17(self):
c = PubSubClient()
yield c.connect()
res = yield c.pubsub_subscribe("foo")
self.assertTrue(res)
self.assertTrue(c.subscribed)
res = yield c.pubsub_unsubscribe()
self.assertTrue(res)
self.assertFalse(c.subscribed)
c.disconnect()
@tornado.testing.gen_test
def test_empty_subscribe(self):
c = PubSubClient()
|
yield c.connect()
res = yield c.pubsub_subscribe()
self.assertFalse(res)
c.disconnect()
@tornado.testing.gen_test
def test_subscribe_no_redis(self):
c = PubSubClient()
with mock.patch.object(c, "is_connected", return_value=False):
|
res = yield c.pubsub_subscribe("foo")
self.assertFalse(res)
self.assertFalse(c.subscribed)
@tornado.testing.gen_test
def test_unsubscribe_no_redis(self):
c = PubSubClient()
yield c.pubsub_subscribe("foo")
with mock.patch.object(c, "is_connected", return_value=False):
res = yield c.pubsub_unsubscribe("foo")
self.assertFalse(res)
|
oblique-labs/pyVM
|
rpython/translator/goal/targetsegfault.py
|
Python
|
mit
| 291
| 0.013746
|
def getitem(list, ind
|
ex):
return list[index]
def entry_point(i):
return getitem([i, 2, 3, 4], 2) + getitem(None, i)
def target(*args):
return entry_point, [int]
def get_llinterp_args():
return [1]
# ____
|
_ Run translated _____
def run(c_entry_point):
c_entry_point(0)
|
buguelos/odoo
|
yowsup/Interfaces/DBus/__init__.py
|
Python
|
agpl-3.0
| 22
| 0.045455
|
#import DBusInterface
| ||
telefonicaid/fiware-IoTAgent-Cplusplus
|
third_party/mosquitto-1.4.4/test/broker/04-retain-qos1-qos0.py
|
Python
|
agpl-3.0
| 1,749
| 0.005718
|
#!/usr/bin/env python
# Test whether a retained PUBLISH to a topic with QoS 1 is retained.
# Subscription is made with QoS 0 so the retained message should also have QoS
# 0.
import subprocess
import socket
import time
import inspect, os, sys
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
keepalive = 60
connect_packet = mosq_test.gen_connect("retain-qos1-test", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=0)
mid = 6
publish_packet = mosq_test.gen_publish("retain/qos1/test", qos=1, mid=mid, payload="retained message", retain=True)
puback_packet = mosq_test.gen_puback(mid)
mid = 18
subscribe_packet = mosq_test.gen_subscribe(mid, "retain/qos1/test", 0)
suback_packet = mosq_test.gen_suback(mid, 0)
publish0_packet = mosq_test.gen_publish("retain/qos1/test", qos=0, payload="retained message", retain=True)
cmd = ['../../src/mosquitto', '-p', '1888']
broker = mosq_test.start_broker(filename=os.path.basename(__file__), cmd=cmd)
try:
sock = mosq_test.do_client_connect(connect_packet, connack_packet)
sock.send(publish_packet)
if mosq_test.expect_packet(sock, "puback", puback_packet):
sock.send(subscribe_packet)
if mosq_test.expect_packet(sock, "suback", suback_packet):
if mosq_test.expect_packet(sock, "publish0", publish0_packet):
rc = 0
sock.close()
finally:
broker.terminate()
broker.
|
wait()
if rc:
(stdo, stde) = broker.communicate()
print(std
|
e)
exit(rc)
|
pyhmsa/pyhmsa
|
pyhmsa/fileformat/exporter/raw.py
|
Python
|
mit
| 3,504
| 0.001427
|
"""
Export to RAW/RPL file format
Based on:
http://www.nist.gov/lispix/doc/image-file-formats/raw-file-format.htm
"""
# Standard library modules.
import os
# Third party modules.
# Local modules.
from pyhmsa.fileformat.exporter.exporter import _Exporter, _ExporterThread
from pyhmsa.spec.datum.analysislist import AnalysisList2D
from pyhmsa.spec.dat
|
um.imageraster import ImageRaster2D, ImageRaster2DSpectral
# Globals and constants variables.
class _ExporterRAWThread(_ExporterThread):
def _run
|
(self, datafile, dirpath, *args, **kwargs):
basefilename = datafile.header.title or 'Untitled'
keys = set(datafile.data.findkeys(AnalysisList2D)) | \
set(datafile.data.findkeys(ImageRaster2D)) | \
set(datafile.data.findkeys(ImageRaster2DSpectral))
length = len(keys)
filepaths = []
for i, identifier in enumerate(keys):
datum = datafile.data[identifier]
self._update_status(i / length, 'Exporting %s' % identifier)
filename = basefilename + '_' + identifier
lines = self._create_rpl_lines(identifier, datum)
rpl_filepath = os.path.join(dirpath, filename + '.rpl')
with open(rpl_filepath, 'w') as fp:
fp.write('\n'.join(lines))
raw_filepath = os.path.join(dirpath, filename + '.raw')
with open(raw_filepath, 'wb') as fp:
datum = datum.copy()
datum.dtype.newbyteorder('<')
fp.write(datum.tobytes())
filepaths.append(raw_filepath)
return filepaths
def _create_rpl_lines(self, identifier, datum):
lines = []
lines.append('key\t%s' % identifier)
lines.append('offset\t0')
if isinstance(datum, ImageRaster2D):
width, height = datum.shape
depth = 1
record_by = 'dont-care'
elif isinstance(datum, ImageRaster2DSpectral):
width, height, depth = datum.shape
record_by = 'vector'
elif isinstance(datum, AnalysisList2D):
depth, width, height = datum.shape
record_by = 'image'
else:
raise IOError('Unkmown datum type')
lines.append('width\t%i' % width)
lines.append('height\t%i' % height)
lines.append('depth\t%i' % depth)
lines.append('record-by\t%s' % record_by)
dtype = datum.dtype
lines.append('data-length\t%i' % dtype.itemsize)
byteorder = 'little-endian' if dtype.itemsize > 1 else 'dont-care'
lines.append('byte-order\t%s' % byteorder)
if dtype.kind == 'f':
data_type = 'float'
elif dtype.kind == 'u':
data_type = 'unsigned'
else:
data_type = 'signed'
lines.append('data-type\t%s' % data_type)
return lines
class ExporterRAW(_Exporter):
def _create_thread(self, datafile, dirpath, *args, **kwargs):
return _ExporterRAWThread(datafile, dirpath)
def validate(self, datafile):
super().validate(datafile)
identifiers = set(datafile.data.findkeys(AnalysisList2D)) | \
set(datafile.data.findkeys(ImageRaster2D)) | \
set(datafile.data.findkeys(ImageRaster2DSpectral))
if not identifiers:
raise ValueError('Datafile must contain at least one ' + \
'AnalysisList2D, ImageRaster2D or ' + \
'ImageRaster2DSpectral datum')
|
att-comdev/deckhand
|
deckhand/tests/unit/db/test_revision_diffing.py
|
Python
|
apache-2.0
| 14,036
| 0
|
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from deckhand.db.sqlalchemy import api as db_api
from deckhand.tests import test_utils
from deckhand.tests.unit.db import base
class TestRevisionDiffing(base.TestDbBase):
def _verify_buckets_status(self, revision_id, comparison_revision_id,
expected):
# Verify that actual and expected results match, despite the order of
# `comparison_revision_id` and `revision_id` args.
revision_ids = [revision_id, comparison_revision_id]
for rev_ids in (revision_ids, reversed(revision_ids)):
actual = db_api.revision_diff(*rev_ids)
self.assertEqual(expected, actual)
def test_revision_diff_null(self):
self._verify_buckets_status(0, 0, {})
def test_revision_diff_created(self):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload)
revision_id = documents[0]['revision_id']
self._verify_buckets_status(
0, revision_id, {bucket_name: 'created'})
def test_revision_diff_multi_bucket_created(self):
revision_ids = []
bucket_names = []
for _ in range(3):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
bucket_name = test_utils.rand_name('bucket')
bucket_names.append(bucket_name)
documents = self.create_documents(bucket_name, payload)
revision_id = documents[0]['revision_id']
revision_ids.append(revision_id)
# Between revision 1 and 0, 1 bucket is created.
self._verify_buckets_status(
0, revision_ids[0], {b: 'created' for b in bucket_names[:1]})
# Between revision 2 and 0, 2 buckets are created.
self._verify_buckets_status(
0, revision_ids[1], {b: 'created' for b in bucket_names[:2]})
# Between revision 3 and 0, 3 buckets are created.
self._verify_buckets_status(
0, revision_ids[2], {b: 'created' for b in bucket_names})
def test_revision_diff_self(self):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload)
revision_id = documents[0]['revision_id']
self._verify_buckets_status(
revision_id, revision_id, {bucket_name: 'unmodified'})
def test_revision_diff_multi_bucket_self(self):
bucket_names = []
revision_ids = []
for _ in range(3):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
bucket_name = test_utils.rand_name('bucket')
# Store each bucket that was created.
bucket_names.append(bucket_name)
documents = self.create_documents(bucket_name, payload)
# Store each revision that was created.
revision_id = documents[0]['revision_id']
revision_ids.append(revision_id)
# The last revision should contain history for the previous 2 revisions
# such that its diff history will show history for 3 buckets. Similarly
# the 2nd revision will have history for 2 buckets and the 1st revision
# for 1 bucket.
# 1st revision has revision history for 1 bucket.
self._verify_buckets_status(
revision_ids[0], revision_ids[0], {bucket_names[0]: 'unmodified'})
# 2nd revision has revision history for 2 buckets.
self._verify_buckets_status(
revision_ids[1], revision_ids[1],
{b: 'unmodified' for b in bucket_names[:2]})
# 3rd revision has revision history for 3 buckets.
self._verify_buckets_status(
revision_ids[2], revision_ids[2],
{b: 'unmodified' for b in bucket_names})
def test_revision_diff_modified(self):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload)
revision_id = documents[0]['revision_id']
payload[0]['data'] = {'modified': 'modified'}
comparison_documents = self.create_documents(bucket_name, payload)
comparison_revision_id = comparison_documents[0]['revision_id']
self._verify_buckets_status(
revision_id, comparison_revision_id, {bucket_name: 'modified'})
def test_revision_diff_multi_revision_modified(self):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
bucket_name = test_utils.rand_name('bucket')
revision_ids = []
for _ in range(3):
payload[0]['data'] = {'modified': test_utils.rand_name('modified')}
documents = self.create_documents(bucket_name, payload)
revision_id = documents[0]['revision_id']
revision_ids.append(revision_id)
for pair in [(0, 1), (0, 2), (1, 2)]:
self._verify_buckets_status(
revision_ids[pair[0]], revision_ids[pair[1]],
{bucket_name: 'modified'})
def test_revision_diff_multi_revision_multi_bucket_modified(self):
revision_ids = []
bucket_name = test_utils.rand_name('bucket')
alt_bucket_name = test_utils.rand_name('bucket')
bucket_names = [bucket_name, alt_bucket_name] * 2
# Create revisions by modifying documents in `bucket_name` and
# `alt_bucket_name`.
for bucket_idx in range(4):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
documents = self.create_documents(
bucket_names[bucket_idx], payload)
revision_id = documents[0]['revision_id']
revision_ids.append(revision_id)
# Between revision_ids[0] and [1], bucket_name is unmodified and
# alt_bucket_name is created.
self._verify_buckets_status(
revision_ids[0], revision_ids[1],
{bucket_name: 'unmodified', alt_bucket_name: 'created'})
|
# Between revision_ids[0] and [2], bucket_name is modified (by 2) and
# alt_bucket_name is created (by 1).
self._verify_buckets_status(
revision_ids[0], revision_ids[2],
{bucket_name: 'modified', alt_bucket_name: 'created'})
# Between revision_ids[0] and [3], bucket_name is modified (by [2]) and
# a
|
lt_bucket_name is created (by [1]) (as well as modified by [3]).
self._verify_buckets_status(
revision_ids[0], revision_ids[3],
{bucket_name: 'modified', alt_bucket_name: 'created'})
# Between revision_ids[1] and [2], bucket_name is modified but
# alt_bucket_name remains unmodified.
self._verify_buckets_status(
revision_ids[1], revision_ids[2],
{bucket_name: 'modified', alt_bucket_name: 'unmodified'})
# Between revision_ids[1] and [3], bucket_name is modified (by [2]) and
# alt_bucket_name is modified by [3].
self._verify_buckets_status(
revision_ids[1], revision_ids[3],
{bucket_name: 'modified', alt_bucket_name: 'modified'})
# Between revision_ids[2] and [3], alt_bucket_name is modified but
# bucket_name remains unmodified.
self._verify_buckets_status(
revision_ids[2], revision_ids[3],
{bucket_name: 'unmodified', alt_bucket_name: 'modified'})
def test_revis
|
megasan/210-CT
|
coursework 6.py
|
Python
|
mit
| 903
| 0.006645
|
""" Write the pseudocode and code for a function that reverses the words in a sentence. Input: "This is awesome" Output: "awesome is This". Give the Big O notation. """
def reverse(sentence):
""" split original sentence into a list, then append elements of the old list to the new list starting from last to first. the
|
n join the list back toghether. """
original = sentence.split()
reverse = []
count = len(original) - 1
while count >= 0:
reverse.append(original[count])
count = count - 1
result = " ".join(reverse)
return result
""" sentence <- input sentence
result <- empty list
split_sentence <- sentence split into array
index <- length of split_sentence - 1
while index >= 0
result append split_sentence[index
|
]
index <- index - 1
end while
return result
O(N)
"""
|
michaelkirk/QGIS
|
python/plugins/processing/algs/lidar/fusion/FirstLastReturn.py
|
Python
|
gpl-2.0
| 2,653
| 0.001131
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
FirstLastReturn.py
---------------------
Date : May 2014
Copyright : (C) 2014 by Niccolo' Marchi
Email : sciurusurbanus at hotmail dot it
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = "Niccolo' Marchi"
__date__ = 'May 2014'
__copyright__ = "(C) 2014 by Niccolo' Marchi"
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from processing.core.parameters import ParameterFile
from processing.core.parameters import ParameterBoolean
from processing.core.outputs import OutputFile
from FusionAlgorithm import FusionAlgorithm
from FusionUtils import FusionUtils
class FirstLastReturn(FusionAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
SWITCH = 'SWITCH'
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('First&Last Return')
self.group, self.i18n_group = self.trAlgorithm('Points')
self.addParameter(ParameterFile(self.INPUT, self.tr('Input LAS layer')))
self.addParameter(ParameterBoolean(
self.SWITCH, self.tr('Use LAS info'), True))
self.addOutput(OutputFile(self.OUTPUT, self.tr('Output layers')))
self.addAdvancedModifiers()
def processAlgorithm(self, progress):
commands = [os.path.join(FusionUtils.
|
FusionPath(), 'FirstLastReturn.exe')]
commands.append('/verbose')
if self.getPa
|
rameterValue(self.SWITCH):
commands.append('/uselas')
self.addAdvancedModifiersToCommand(commands)
outFile = self.getOutputValue(self.OUTPUT)
commands.append(outFile)
files = self.getParameterValue(self.INPUT).split(';')
if len(files) == 1:
commands.append(self.getParameterValue(self.INPUT))
else:
FusionUtils.createFileList(files)
commands.append(FusionUtils.tempFileListFilepath())
FusionUtils.runFusion(commands, progress)
|
jejung/godot
|
doc/tools/doc_status.py
|
Python
|
mit
| 15,221
| 0.003679
|
#!/usr/bin/env python3
import os
import sys
import re
import math
import platform
import xml.etree.ElementTree as ET
################################################################################
# Config #
################################################################################
flags = {
'c': platform.platform() != 'Windows', # Disable by default on windows, since we use ANSI escape codes
'b': False,
'g': False,
's': False,
'u': False,
'h': False,
'p': False,
'o': True,
'i': False,
'a': True,
}
flag_descriptions = {
'c': 'Toggle colors when outputting.',
'b': 'Toggle showing only not fully described classes.',
'g': 'Toggle showing only completed classes.',
's': 'Toggle showing comments about the status.',
'u': 'Toggle URLs to docs.',
'h': 'Show help and exit.',
'p': 'Toggle showing percentage as well as counts.',
'o': 'Toggle overall column.',
'i': 'Toggle collapse of class items columns.',
'a': 'Toggle showing all items.',
}
long_flags = {
'colors': 'c',
'use-colors': 'c',
'bad': 'b',
'only-bad': 'b',
'good': 'g',
'only-good': 'g',
'comments': 's',
'status': 's',
'urls': 'u',
'gen-url': 'u',
'help': 'h',
'percent': 'p',
'use-percentages': 'p',
'overall': 'o',
'use-overall': 'o',
'items': 'i',
'collapse': 'i',
'all': 'a',
}
table_columns = ['name', 'brief_description', 'description', 'methods', 'constants', 'members', 'signals']
table_column_names = ['Name', 'Brief Desc.', 'Desc.', 'Methods', 'Constants', 'Members', 'Signals']
colors = {
'name': [36], # cyan
'part_big_problem': [4, 31], # underline, red
'part_problem': [31], # red
'part_mostly_good': [33], # yellow
'part_good': [32], # green
'url': [4, 34], # underline, blue
'section': [1, 4], # bold, underline
'state_off': [36], # cyan
'state_on': [1, 35], # bold, magenta/plum
}
overall_progress_description_weigth = 10
################################################################################
# Utils #
################################################################################
def validate_tag(elem, tag):
if elem.tag != tag:
print('Tag mismatch, expected "' + tag + '", got ' + elem.tag)
sys.exit(255)
def color(color, string):
if flags['c']:
color_format = ''
for code in colors[color]:
color_format += '\033[' + str(code) + 'm'
return color_format + string + '\033[0m'
else:
return string
ansi_escape = re.compile(r'\x1b[^m]*m')
def nonescape_len(s):
return len(ansi_escape.sub('', s))
################################################################################
# Classes #
################################################################################
class ClassStatusProgress:
def __init__(self, described=0, total=0):
self.described = described
self.total = total
def __add__(self, other):
return ClassStatusProgress(self.described + other.described, self.total + other.total)
def increment(self, described):
if described:
self.described += 1
self.total += 1
def is_ok(self):
return self.described >= self.total
def to_configured_colored_string(self):
if flags['p']:
return self.to_colored_string('{percent}% ({has}/{total})', '{pad_percent}{pad_described}{s}{pad_total}')
else:
return self.to_colored_string()
def to_colored_string(self, format='{has}/{total}', pad_format='{pad_described}{s}{pad_total}'):
ratio = self.described / self.total if self.total != 0 else 1
percent = round(100 * ratio)
s = format.format(has=str(self.described), total=str(self.total), percent=str(percent))
if self.described >= self.total:
s = color('part_good', s)
elif self.described >= self.total / 4 * 3:
s = color('part_mostly_good', s)
elif self.described > 0:
s = color('part
|
_problem', s)
else:
s = color('part_big_problem', s)
pad_size = max(len(str(self.described)), len(str(self.total)))
pad_described = ''.ljust(pad_size - len(str(self.described)))
pad_percent = ''.ljust(3 - len(str(percent)))
pad_total = ''.ljust(pad_size - len(str(self.total)))
return pad_format.format(pad_described=pad_described, pad_total=pad_total, pad_percent=pad_percent, s=s)
class ClassStatus:
|
def __init__(self, name=''):
self.name = name
self.has_brief_description = True
self.has_description = True
self.progresses = {
'methods': ClassStatusProgress(),
'constants': ClassStatusProgress(),
'members': ClassStatusProgress(),
'signals': ClassStatusProgress()
}
def __add__(self, other):
new_status = ClassStatus()
new_status.name = self.name
new_status.has_brief_description = self.has_brief_description and other.has_brief_description
new_status.has_description = self.has_description and other.has_description
for k in self.progresses:
new_status.progresses[k] = self.progresses[k] + other.progresses[k]
return new_status
def is_ok(self):
ok = True
ok = ok and self.has_brief_description
ok = ok and self.has_description
for k in self.progresses:
ok = ok and self.progresses[k].is_ok()
return ok
def make_output(self):
output = {}
output['name'] = color('name', self.name)
ok_string = color('part_good', 'OK')
missing_string = color('part_big_problem', 'MISSING')
output['brief_description'] = ok_string if self.has_brief_description else missing_string
output['description'] = ok_string if self.has_description else missing_string
description_progress = ClassStatusProgress(
(self.has_brief_description + self.has_description) * overall_progress_description_weigth,
2 * overall_progress_description_weigth
)
items_progress = ClassStatusProgress()
for k in ['methods', 'constants', 'members', 'signals']:
items_progress += self.progresses[k]
output[k] = self.progresses[k].to_configured_colored_string()
output['items'] = items_progress.to_configured_colored_string()
output['overall'] = (description_progress + items_progress).to_colored_string('{percent}%', '{pad_percent}{s}')
if self.name.startswith('Total'):
output['url'] = color('url', 'http://docs.godotengine.org/en/latest/classes/')
if flags['s']:
output['comment'] = color('part_good', 'ALL OK')
else:
output['url'] = color('url', 'http://docs.godotengine.org/en/latest/classes/class_{name}.html'.format(name=self.name.lower()))
if flags['s'] and not flags['g'] and self.is_ok():
output['comment'] = color('part_good', 'ALL OK')
return output
def generate_for_class(c):
status = ClassStatus()
status.name = c.attrib['name']
# setgets do not count
methods = []
for tag in list(c):
if tag.tag in ['methods']:
for sub_tag in list(tag):
methods.append(sub_tag.find('name'))
if tag.tag in ['members']:
for sub_tag in list(tag):
try:
methods.remove(sub_tag.find('setter'))
methods.remove(sub_tag.find('getter'))
except:
pass
for tag in list(c):
if tag.tag == 'brief_description':
status.has_brief_description = len(tag.text.strip()) > 0
elif tag.tag == 'description':
st
|
vDial-up/client
|
libs/vDialing.py
|
Python
|
gpl-3.0
| 5,879
| 0.004252
|
# vDial-up client
# Copyright (C) 2015 - 2017 Nathaniel Olsen
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from time import sleep
import socket
import libs.vDialupcore as core
from multiprocessing import Process
import sys
import struct
def MD5SUM_mismatch(vNumber_to_connect, sock):
print("*Warning: The server's MD5SUM does not match with the one listed on file, Do you wish to continue? (Y/N)")
if vNumber_to_connect == core.RegServ_vNumber:
MD5SUM_on_file = core.RegServ_MD5SUM
else:
pass # Right now, there is no way to retrieve a server's md5sum until I implement md5sum retriving in RegServ.
print("MD5SUM on file: %s" % (MD5SUM_on_file))
print("MD5SUM according to server: %s" % (received.split()[1]))
print("")
choice = input("Enter choice (Y/N): ")
if choice == 'Y' or choice == 'y':
init(sock, vNumber_to_connect)
if choice == 'N' or choice == 'n':
sys.exit() # Exit for now.
class main():
def send_msg(sock, msg):
# Prefix each message with a 4-byte length (network byte order)
msg = struct.pack('>I', len(msg)) + str.encode(msg)
sock.sendall(msg)
def recv_msg(sock):
# Read message length and unpack it into an integer
raw_msglen = main.recvall(sock, 4)
if not raw_msglen:
return None
msglen = struct.unpack('>I', str.encode(raw_msglen))[0]
return main.recvall(sock, msglen)
def recvall(sock, n):
# Helper function to recv n bytes or return None if EOF is hit
data = ''
while len(data) < n:
packet = (sock.recv(n - len(data)).decode('utf-8'))
if not packet:
return None
data += packet
return data
def servping(sock):
while 1:
sleep(20)
sock.sendall(bytes("SERVPING
|
" + "\n", "utf-8"))
if main.listen_for_data(sock) == "PONG":
|
break
else:
print("Disconnected: Connection timeout.")
def vdialing(vNumber_to_connect, vNumber_IP):
if core.config['use_ipv6_when_possible']:
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print("vDialing %s..." % (vNumber_to_connect))
if core.config['vDial-up Settings']['vNumber'] == "000000000":
core.dialupnoise()
try:
sock.connect((vNumber_IP, 5000))
except ConnectionRefusedError:
print("Error: Connection Refused.")
sys.exit()
main.send_msg(sock, "INITPING")
if main.recv_msg(sock) == "PONG":
print("Connected.")
#Process(target=main.servping, args=[sock]).start() # The ability to check if a server connection is still alive is coming soon.
main.send_msg(sock, "MD5SUMCHECK")
if main.recv_msg(sock).split()[0] == "MD5SUM:":
if main.recv_msg(sock).split()[1] == core.RegServ_MD5SUM:
print("MD5SUM verification was succeeded.")
else:
MD5SUM_mismatch(vNumber_to_connect, sock)
else:
print("Error: Unable to retrieve MD5SUM.")
main.init(sock, vNumber_to_connect)
else:
print("Error: Server did not properly respond to INITPING, disconnecting.")
else:
Process(target=core.dialupnoise()).start()
sock.connect((vNumber_IP, 5000))
main.send_msg(sock, "INITPING")
if main.recv_msg(sock) == "PONG":
print("Connected to Registation Server!")
main.send_msg(sock, "MD5SUMCHECK")
if main.recv_msg(sock).split()[0] == "MD5SUM:":
if main.recv_msg(sock).split()[1] == core.RegServ_MD5SUM:
print("MD5SUM verification was succeeded.")
else:
MD5SUM_mismatch(vNumber_to_connect, sock)
else:
print("Error: Unable to retrieve MD5SUM.")
else:
print("Error: Server did not properly respond to INITPING, disconnecting.")
def init(sock, vNumber_to_connect):
main.send_msg(sock, "VNUMBER: {}".format(core.config['vDial-up Settings']['vNumber']))
if core.config['vDial-up Settings']['vNumber'] == "000000000":
main.send_msg(sock, "CLIENTREGISTER")
if main.recv_msg(sock).split()[0] == "CONFIG:":
if main.recv_msg(sock).split()[1] == "vNumber":
core.config['vDial-up Settings']['vNumber'] = main.recv_msg(sock).split()[2]
core.saveconfig()
if main.recv_msg(sock).split()[1] == "Key":
core.config['vDial-up Settings']['Key'] = main.recv_msg(sock).split()[2]
core.saveconfig()
if main.recv_msg(sock).split()[0] == "TOCLIENT:":
print(" ".join(main.recv_msg(sock).split()[2:]))
else:
main.send_msg(sock, "KEY: {}".format(core.config['vDial-up Settings']['Key']))
main.send_msg(sock, "INIT")
|
PLyczkowski/Sticky-Keymap
|
2.74/scripts/addons_contrib/cursor_control/history.py
|
Python
|
gpl-2.0
| 9,295
| 0.006885
|
# -*- coding: utf-8 -*-
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License
|
for more details.
#
# You should have received a copy of the GNU General Public License
# along with this
|
program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
"""
TODO:
IDEAS:
LATER:
ISSUES:
Bugs:
Seg-faults when unregistering addon...
Mites:
* History back button does not light up on first cursor move.
It does light up on the second, or when mouse enters the tool-area
* Switching between local and global view triggers new cursor position in history trace.
* Each consecutive click on the linex operator triggers new cursor position in history trace.
(2011-01-16) Was not able to fix this because of some strange script behaviour
while trying to clear linexChoice from addHistoryLocation
QUESTIONS:
"""
import bpy
import bgl
import math
from mathutils import Vector, Matrix
from mathutils import geometry
from misc_utils import *
from constants_utils import *
from cursor_utils import *
from ui_utils import *
class CursorHistoryData(bpy.types.PropertyGroup):
# History tracker
historyDraw = bpy.props.BoolProperty(description="Draw history trace in 3D view",default=1)
historyDepth = 144
historyWindow = 12
historyPosition = [-1] # Integer must be in a list or else it can not be written to
historyLocation = []
#historySuppression = [False] # Boolean must be in a list or else it can not be written to
def addHistoryLocation(self, l):
if(self.historyPosition[0]==-1):
self.historyLocation.append(l.copy())
self.historyPosition[0]=0
return
if(l==self.historyLocation[self.historyPosition[0]]):
return
#if self.historySuppression[0]:
#self.historyPosition[0] = self.historyPosition[0] - 1
#else:
#self.hideLinexChoice()
while(len(self.historyLocation)>self.historyPosition[0]+1):
self.historyLocation.pop(self.historyPosition[0]+1)
#self.historySuppression[0] = False
self.historyLocation.append(l.copy())
if(len(self.historyLocation)>self.historyDepth):
self.historyLocation.pop(0)
self.historyPosition[0] = len(self.historyLocation)-1
#print (self.historyLocation)
#def enableHistorySuppression(self):
#self.historySuppression[0] = True
def previousLocation(self):
if(self.historyPosition[0]<=0):
return
self.historyPosition[0] = self.historyPosition[0] - 1
CursorAccess.setCursor(self.historyLocation[self.historyPosition[0]].copy())
def nextLocation(self):
if(self.historyPosition[0]<0):
return
if(self.historyPosition[0]+1==len(self.historyLocation)):
return
self.historyPosition[0] = self.historyPosition[0] + 1
CursorAccess.setCursor(self.historyLocation[self.historyPosition[0]].copy())
class VIEW3D_OT_cursor_previous(bpy.types.Operator):
"""Previous cursor location"""
bl_idname = "view3d.cursor_previous"
bl_label = "Previous cursor location"
bl_options = {'REGISTER'}
def modal(self, context, event):
return {'FINISHED'}
def execute(self, context):
cc = context.scene.cursor_history
cc.previousLocation()
return {'FINISHED'}
class VIEW3D_OT_cursor_next(bpy.types.Operator):
"""Next cursor location"""
bl_idname = "view3d.cursor_next"
bl_label = "Next cursor location"
bl_options = {'REGISTER'}
def modal(self, context, event):
return {'FINISHED'}
def execute(self, context):
cc = context.scene.cursor_history
cc.nextLocation()
return {'FINISHED'}
class VIEW3D_OT_cursor_history_show(bpy.types.Operator):
"""Show cursor trace"""
bl_idname = "view3d.cursor_history_show"
bl_label = "Show cursor trace"
bl_options = {'REGISTER'}
def modal(self, context, event):
return {'FINISHED'}
def execute(self, context):
cc = context.scene.cursor_history
cc.historyDraw = True
BlenderFake.forceRedraw()
return {'FINISHED'}
class VIEW3D_OT_cursor_history_hide(bpy.types.Operator):
"""Hide cursor trace"""
bl_idname = "view3d.cursor_history_hide"
bl_label = "Hide cursor trace"
bl_options = {'REGISTER'}
def modal(self, context, event):
return {'FINISHED'}
def execute(self, context):
cc = context.scene.cursor_history
cc.historyDraw = False
BlenderFake.forceRedraw()
return {'FINISHED'}
class VIEW3D_PT_cursor_history(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_label = "Cursor History"
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(self, context):
# Display in object or edit mode.
cc = context.scene.cursor_history
cc.addHistoryLocation(CursorAccess.getCursor())
if (context.area.type == 'VIEW_3D' and
(context.mode == 'EDIT_MESH'
or context.mode == 'OBJECT')):
return 1
return 0
def draw_header(self, context):
layout = self.layout
cc = context.scene.cursor_history
if cc.historyDraw:
GUI.drawIconButton(True, layout, 'RESTRICT_VIEW_OFF', "view3d.cursor_history_hide", False)
else:
GUI.drawIconButton(True, layout, 'RESTRICT_VIEW_ON' , "view3d.cursor_history_show", False)
def draw(self, context):
layout = self.layout
sce = context.scene
cc = context.scene.cursor_history
row = layout.row()
row.label("Navigation: ")
GUI.drawIconButton(cc.historyPosition[0]>0, row, 'PLAY_REVERSE', "view3d.cursor_previous")
#if(cc.historyPosition[0]<0):
#row.label(" -- ")
#else:
#row.label(" "+str(cc.historyPosition[0])+" ")
GUI.drawIconButton(cc.historyPosition[0]<len(cc.historyLocation)-1, row, 'PLAY', "view3d.cursor_next")
row = layout.row()
col = row.column()
col.prop(CursorAccess.findSpace(), "cursor_location")
class VIEW3D_PT_cursor_history_init(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_label = "Register callback"
bl_options = {'DEFAULT_CLOSED'}
initDone = False
_handle = None
@staticmethod
def handle_add(self, context):
VIEW3D_PT_cursor_history_init._handle = bpy.types.SpaceView3D.draw_handler_add(
cursor_history_draw, (self, context), 'WINDOW', 'POST_PIXEL')
@staticmethod
def handle_remove():
if VIEW3D_PT_cursor_history_init._handle is not None:
bpy.types.SpaceView3D.draw_handler_remove(VIEW3D_PT_cursor_history_init._handle, 'WINDOW')
VIEW3D_PT_cursor_history_init._handle = None
@classmethod
def poll(cls, context):
if VIEW3D_PT_cursor_history_init.initDone:
return False
print ("Cursor History draw-callback registration...")
sce = context.scene
if context.area.type == 'VIEW_3D':
VIEW3D_PT_cursor_history_init.handle_add(cls, context)
VIEW3D_PT_cursor_history_init.initDone = True
print ("Cursor History draw-callback registered")
# Unregister to prevent double registration...
# Started to fail after v2.57
# bpy.types.unregister(VIEW3D_PT_cursor_history_init)
|
bvancsics/TCMC
|
maximum_matching.py
|
Python
|
gpl-3.0
| 35,707
| 0.002745
|
"""Weighted maximum matching in general graphs.
The algorithm is taken from "Efficient Algorithms for Finding Maximum
Matching in Graphs" by Zvi Galil, ACM Computing Surveys, 1986.
It is based on the "blossom" method for finding augmenting paths and
the "primal-dual" method for finding a matching of maximum weight, both
due to Jack Edmonds.
Some ideas came from "Implementation of algorithms for maximum matching
on non-bipartite graphs" by H.J. Gabow, Standford Ph.D. thesis, 1973.
A C program for maximum weight matching by Ed Rothberg was used extensively
to validate this new code.
http://jorisvr.nl/article/maximum-matching#ref:4
"""
#
# Changes:
#
# 2013-04-07
# * Added Python 3 compatibility with contributions from Daniel Saunders.
#
# 2008-06-08
# * First release.
#
from __future__ import print_function
# If assigned, DEBUG(str) is called with lots of debug messages.
DEBUG = None
"""def DEBUG(s):
from sys import stderr
print('DEBUG:', s, file=stderr)
"""
# Check delta2/delta3 computation after every substage;
# only works on integer weights, slows down the algorithm to O(n^4).
CHECK_DELTA = False
# Check optimality of solution before returning; only works on integer weights.
CHECK_OPTIMUM = True
def maxWeightMatching(edges, maxcardinality=False):
"""Compute a maximum-weighted matching in the general undirected
weighted graph given by "edges". If "maxcardinality" is true,
only maximum-cardinality matchings are considered as solutions.
Edges is a sequence of tuples (i, j, wt) describing an undirected
edge between vertex i and vertex j with weight wt. There is at most
one edge between any two vertices; no vertex has an edge to itself.
Vertices are identified by consecutive, non-negative integers.
Return a list "mate", such that mate[i] == j if vertex i is
matched to vertex j, and mate[i] == -1 if vertex i is not matched.
This function takes time O(n ** 3)."""
#
# Vertices are numbered 0 .. (nvertex-1).
# Non-trivial blossoms are numbered nvertex .. (2*nvertex-1)
#
# Edges are numbered 0 .. (nedge-1).
# Edge endpoints are numbered 0 .. (2*nedge-1), such that endpoints
# (2*k) and (2*k+1) both belong to edge k.
#
# Many terms used in the comments (sub-blossom, T-vertex) come from
# the paper by Galil; read the paper before reading this code.
#
# Python 2/3 compatibility.
from sys import version as sys_version
if sys_version < '3':
integer_types = (int, long)
else:
integer_types = (int,)
# Deal swiftly with empty graphs.
if not edges:
return [ ]
# Count vertices.
nedge = len(edges)
nvertex = 0
for (i, j, w) in edges:
assert i >= 0 and j >= 0 and i != j
if i >= nvertex:
nvertex = i + 1
if j >= nvertex:
nvertex = j + 1
# Find the maximum edge weight.
maxweight = max(0, max([ wt for (i, j, wt) in edges ]))
# If p is an edge endpoint,
# endpoint[p] is the vertex to which endpoint p is attached.
# Not modified by the algorithm.
endpoint = [ edges[p//2][p%2] for p in range(2*nedge)
|
]
# If v is a vertex,
|
# neighbend[v] is the list of remote endpoints of the edges attached to v.
# Not modified by the algorithm.
neighbend = [ [ ] for i in range(nvertex) ]
for k in range(len(edges)):
(i, j, w) = edges[k]
neighbend[i].append(2*k+1)
neighbend[j].append(2*k)
# If v is a vertex,
# mate[v] is the remote endpoint of its matched edge, or -1 if it is single
# (i.e. endpoint[mate[v]] is v's partner vertex).
# Initially all vertices are single; updated during augmentation.
mate = nvertex * [ -1 ]
# If b is a top-level blossom,
# label[b] is 0 if b is unlabeled (free);
# 1 if b is an S-vertex/blossom;
# 2 if b is a T-vertex/blossom.
# The label of a vertex is found by looking at the label of its
# top-level containing blossom.
# If v is a vertex inside a T-blossom,
# label[v] is 2 iff v is reachable from an S-vertex outside the blossom.
# Labels are assigned during a stage and reset after each augmentation.
label = (2 * nvertex) * [ 0 ]
# If b is a labeled top-level blossom,
# labelend[b] is the remote endpoint of the edge through which b obtained
# its label, or -1 if b's base vertex is single.
# If v is a vertex inside a T-blossom and label[v] == 2,
# labelend[v] is the remote endpoint of the edge through which v is
# reachable from outside the blossom.
labelend = (2 * nvertex) * [ -1 ]
# If v is a vertex,
# inblossom[v] is the top-level blossom to which v belongs.
# If v is a top-level vertex, v is itself a blossom (a trivial blossom)
# and inblossom[v] == v.
# Initially all vertices are top-level trivial blossoms.
inblossom = list(range(nvertex))
# If b is a sub-blossom,
# blossomparent[b] is its immediate parent (sub-)blossom.
# If b is a top-level blossom, blossomparent[b] is -1.
blossomparent = (2 * nvertex) * [ -1 ]
# If b is a non-trivial (sub-)blossom,
# blossomchilds[b] is an ordered list of its sub-blossoms, starting with
# the base and going round the blossom.
blossomchilds = (2 * nvertex) * [ None ]
# If b is a (sub-)blossom,
# blossombase[b] is its base VERTEX (i.e. recursive sub-blossom).
blossombase = list(range(nvertex)) + nvertex * [ -1 ]
# If b is a non-trivial (sub-)blossom,
# blossomendps[b] is a list of endpoints on its connecting edges,
# such that blossomendps[b][i] is the local endpoint of blossomchilds[b][i]
# on the edge that connects it to blossomchilds[b][wrap(i+1)].
blossomendps = (2 * nvertex) * [ None ]
# If v is a free vertex (or an unreached vertex inside a T-blossom),
# bestedge[v] is the edge to an S-vertex with least slack,
# or -1 if there is no such edge.
# If b is a (possibly trivial) top-level S-blossom,
# bestedge[b] is the least-slack edge to a different S-blossom,
# or -1 if there is no such edge.
# This is used for efficient computation of delta2 and delta3.
bestedge = (2 * nvertex) * [ -1 ]
# If b is a non-trivial top-level S-blossom,
# blossombestedges[b] is a list of least-slack edges to neighbouring
# S-blossoms, or None if no such list has been computed yet.
# This is used for efficient computation of delta3.
blossombestedges = (2 * nvertex) * [ None ]
# List of currently unused blossom numbers.
unusedblossoms = list(range(nvertex, 2*nvertex))
# If v is a vertex,
# dualvar[v] = 2 * u(v) where u(v) is the v's variable in the dual
# optimization problem (multiplication by two ensures integer values
# throughout the algorithm if all edge weights are integers).
# If b is a non-trivial blossom,
# dualvar[b] = z(b) where z(b) is b's variable in the dual optimization
# problem.
dualvar = nvertex * [ maxweight ] + nvertex * [ 0 ]
# If allowedge[k] is true, edge k has zero slack in the optimization
# problem; if allowedge[k] is false, the edge's slack may or may not
# be zero.
allowedge = nedge * [ False ]
# Queue of newly discovered S-vertices.
queue = [ ]
# Return 2 * slack of edge k (does not work inside blossoms).
def slack(k):
(i, j, wt) = edges[k]
return dualvar[i] + dualvar[j] - 2 * wt
# Generate the leaf vertices of a blossom.
def blossomLeaves(b):
if b < nvertex:
yield b
else:
for t in blossomchilds[b]:
if t < nvertex:
yield t
else:
for v in blossomLeaves(t):
yield v
# Assign label t to the top-level blossom containing vertex w
# and record the fact that w was reached through the edge with
# remote endpoint p.
def assignLabel(w, t, p):
if DEBUG: DEBUG('assignLabel(%d,%d,%d)' % (w, t, p))
b = inblossom[w]
assert label[w] == 0 and label[b] == 0
label[w] = label[b] = t
|
quittle/bazel_toolbox
|
actions/scripts/jinja_helper.py
|
Python
|
apache-2.0
| 2,197
| 0.005007
|
# Copyright (c) 2016 Dustin Doloff
# Licensed under Apache License v2.0
import jinja2
import os
MESSAGE_FILL = '`'
AUTO_GEN_MESSAGE = """
``````````````````````````````````````````````````````
``````````````````````````````````````````````````````
````````______________________________________ ``````
```````/ /\ `````
``````/ /..\ ````
`````/ AUTO-GENERATED FILE. DO NOT EDIT /....\ ```
````/ /______\ ``
```/_____________________________________/````````````
``````````````````````````````````````````````````````
``````````````````````````````````````````````````````
"""
def reverse(v):
"""
Reverses any iterable value
"""
return v[::-1]
def auto_gen_message(open, fill, close):
"""
Produces the auto-generated warning header with language-spcific syntax
open - str - The language-specific opening of the comment
fill - str - The values to fill the background with
close - str - The language-specific closing of the comment
"""
assert open or fill or close
message = AUTO_GEN_MESSAGE.s
|
trip()
if open:
message = message.replace(MESSAGE_FILL * len(open), open, 1)
if close:
message = reverse(reverse(message).replace(MESSAGE_FILL * len(close), close[::-1], 1))
if
|
fill:
message = message.replace(MESSAGE_FILL * len(fill), fill)
return message
def generate(template, config, out_file, pretty=False):
path, ext = os.path.splitext(out_file.name)
ext = ext[1:]
if pretty:
if ext == 'py':
out_file.write(auto_gen_message('#', '#', ''))
elif ext == 'html':
out_file.write(auto_gen_message('<!--', '-', '-->'))
template_path, template_filename = os.path.split(template)
env = jinja2.Environment(loader = jinja2.FileSystemLoader([template_path]))
template = env.get_template(template_filename)
template.stream(config).dump(out_file)
# There needs to be an extra line at the end to make it a valid text file. Jinja strips trailing
# whitespace
if pretty:
out_file.write(os.linesep)
|
cityscapesc/specobs
|
main/tools/USRP_N200_UBX40_Filter_Testing/GRC/FFTshift.py
|
Python
|
apache-2.0
| 837
| 0.04779
|
#!/usr/bin/env python
from gnuradio import gr
from gnuradio import blocks
from gnuradio import digital
import numpy
#applies fftshift to a vecter.
#
class FFTshift(gr.basic_block):
#constructor
def __init__(self,size,drop_when_overloaded):
gr.basic_block.__init__(self, name="FFT_Shift",
in_sig=[(numpy.float32,size)],
out_sig=[(numpy.float32,size)])
self.drop_true = drop_when_overloaded
#run
def general_work(self, i
|
nput_items, output_items):
in0 = input_items[0]
out = output_items[0]
|
if len(out) >= len(in0):
ps_len = len(in0)
consume_len = ps_len
elif self.drop_true:
ps_len = len(out)
consume_len = len(in0)
else:
ps_len = len(out)
consume_len = ps_len
for cnt in range(0,ps_len):
out[cnt] = numpy.fft.fftshift(in0[cnt])
self.consume_each(consume_len)
return ps_len
|
bartoldeman/easybuild-easyblocks
|
easybuild/easyblocks/r/rosetta.py
|
Python
|
gpl-2.0
| 12,847
| 0.003269
|
##
# Copyright 2009-2018 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for building and installing Rosetta, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
import fileinput
import os
import re
import shutil
import sys
import easybuild.tools.toolchain as toolchain
from easybuild.easyblocks.icc import get_icc_version
from easybuild.framework.easyblock import EasyBlock
from easybuild.framework.easyconfig import CUSTOM
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import extract_file
from easybuild.tools.modules import get_software_root, get_software_version
from easybuild.tools.run import run_cmd
from easybuild.tools.systemtools import get_shared_lib_ext
class EB_Rosetta(EasyBlock):
"""Support for building/installing Rosetta."""
def __init__(self, *args, **kwargs):
"""Add extra config options specific to Rosetta."""
super(EB_Rosetta, self).__init__(*args, **kwargs)
self.srcdir = None
self.cxx = None
def extract_step(self):
"""Extract sources, if they haven't been already."""
super(EB_Rosetta, self).extract_step()
# locate sources, and unpack if necessary
# old 'bundles' tarballs contain a gzipped tarball for source, recent ones contain unpacked source
try:
subdirs = os.listdir(self.builddir)
if len(subdirs) == 1:
prefix = os.path.join(self.builddir, subdirs[0])
else:
raise EasyBuildError("Found no or multiple subdirectories, expected exactly one: %s", subdirs)
self.srcdir = os.path.join(prefix, 'rosetta_source')
if not os.path.exists(self.srcdir):
self.srcdir = os.path.join(prefix, 'main', 'source')
if not os.path.exists(self.srcdir):
src_tarball = os.path.join(prefix, 'rosetta%s_source.tgz' % self.version)
if os.path.isfile(src_tarball):
self.srcdir = extract_file(src_tarball, prefix)
else:
raise EasyBuildError("Neither source directory '%s', nor source tarball '%s' found.",
self.srcdir, src_tarball)
except OSError, err:
raise EasyBuildError("Getting Rosetta sources dir ready failed: %s", err)
def detect_cxx(self):
"""Detect compiler name"""
# 'cxx' configure option excepts compiler name like 'gcc', 'icc', 'clang'; i.e. actually the C compiler command
# see also main/source/tools/build/basic.settings in Rosetta sources
self.cxx = os.getenv('CC_SEQ')
if self.cxx is None:
self.cxx = os.getenv('CC')
def configure_step(self):
"""
Configure build by creating tools/build/user.settings from configure options.
"""
# construct build options
defines = ['NDEBUG']
self.cfg.update('buildopts', "mode=release")
self.detect_cxx()
cxx_ver = None
if s
|
elf.toolchain.comp_family() in [toolchain.GCC]: #@UndefinedVariable
cxx_ver = '.'.join(get_software_version('GCC').split('.')[:2])
elif self.toolchain.comp_family() in [toolchain.INTELCOMP]: #@UndefinedVariable
cxx_ver = '.'.join(get_icc_version().split('.')[:2])
else:
raise EasyBuildError("Don't know how to determine C++ compiler version.")
self.cfg.update('buildopts', "cxx=%s cxx_ver=%s" % (self.cxx, cxx_ve
|
r))
if self.toolchain.options.get('usempi', None):
self.cfg.update('buildopts', 'extras=mpi')
defines.extend(['USEMPI', 'MPICH_IGNORE_CXX_SEEK'])
# make sure important environment variables are passed down
# e.g., compiler env vars for MPI wrappers
env_vars = {}
for (key, val) in os.environ.items():
if key in ['I_MPI_CC', 'I_MPI_CXX', 'MPICH_CC', 'MPICH_CXX', 'OMPI_CC', 'OMPI_CXX']:
env_vars.update({key: val})
self.log.debug("List of extra environment variables to pass down: %s" % str(env_vars))
# create user.settings file
paths = os.getenv('PATH').split(':')
ld_library_paths = os.getenv('LD_LIBRARY_PATH').split(':')
cpaths = os.getenv('CPATH').split(':')
flags = [str(f).strip('-') for f in self.toolchain.variables['CXXFLAGS'].copy()]
txt = '\n'.join([
"settings = {",
" 'user': {",
" 'prepends': {",
" 'library_path': %s," % str(ld_library_paths),
" 'include_path': %s," % str(cpaths),
" },",
" 'appends': {",
" 'program_path': %s," % str(paths),
" 'flags': {",
" 'compile': %s," % str(flags),
#" 'mode': %s," % str(o_flags),
" },",
" 'defines': %s," % str(defines),
" },",
" 'overrides': {",
" 'cc': '%s'," % os.getenv('CC'),
" 'cxx': '%s'," % os.getenv('CXX'),
" 'ENV': {",
" 'INTEL_LICENSE_FILE': '%s'," % os.getenv('INTEL_LICENSE_FILE'), # Intel license file
" 'PATH': %s," % str(paths),
" 'LD_LIBRARY_PATH': %s," % str(ld_library_paths),
])
txt += '\n'
for (key, val) in env_vars.items():
txt += " '%s': '%s',\n" % (key, val)
txt += '\n'.join([
" },",
" },",
" 'removes': {",
" },",
" },",
"}",
])
us_fp = os.path.join(self.srcdir, "tools/build/user.settings")
try:
self.log.debug("Creating '%s' with: %s" % (us_fp, txt))
f = file(us_fp, 'w')
f.write(txt)
f.close()
except IOError, err:
raise EasyBuildError("Failed to write settings file %s: %s", us_fp, err)
# make sure specified compiler version is accepted by patching it in
os_fp = os.path.join(self.srcdir, "tools/build/options.settings")
cxxver_re = re.compile('(.*"%s".*)(,\s*"\*"\s*],.*)' % self.cxx, re.M)
for line in fileinput.input(os_fp, inplace=1, backup='.orig.eb'):
line = cxxver_re.sub(r'\1, "%s"\2' % cxx_ver, line)
sys.stdout.write(line)
def build_step(self):
"""
Build Rosetta using 'python ./scons.py bin <opts> -j <N>'
"""
try:
os.chdir(self.srcdir)
except OSError, err:
raise EasyBuildError("Failed to change to %s: %s", self.srcdir, err)
par = ''
if self.cfg['parallel']:
par = "-j %s" % self.cfg['parallel']
cmd = "python ./scons.py %s %s bin" % (self.cfg['buildopts'], par)
run_cmd(cmd, l
|
lemonad/jaikuengine
|
common/tests.py
|
Python
|
apache-2.0
| 2,923
| 0.008211
|
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from django import test
from common import api
from common import util
from common import validate
from common.test import base
class CommonViewTest(base.ViewTestCase):
def test_redirect_slash(self):
r = self.login_and_get('popular', '/user/popular/overview/')
redirected = self.assertRedirectsPrefix(r, '/user/popular/overview')
self.assertTemplateUsed(redirected, 'actor/templates/overview.html')
def test_confirm(self):
nonce = util.create_nonce('popular', 'entry_remove')
entry = 'stream/popular%40example.com/presence/12345'
path = '/user/popular/overview'
r = self.login_and_get('popular', path, {'entry_remove': entry,
'_nonce': nonce})
r = self.assertRedirectsPrefix(r, '/confirm')
self.assertContains(r, nonce)
self.assertContains(r, entry)
self.assertContains(r, path)
class UtilTestCase(test.TestCase):
def test_get_user_from_topic(self):
topics = [('root@example.com', 'inbox/root@example.com/presence'),
('root@example.com', 'inbox/root@example.com/overview'),
('root@example.com', 'stream/root@example.com/presence/12345'),
(None, 'stream//presence'),
|
(None, 'stream/something/else'),
('duuom+aasdd@gmail.com', 'crazy/duuom+aasdd@gmail.com/dddfff$$%%///'),
('asdad@asdasd@asdasd', 'multi/asdad@asdasd@asdasd/cllad/asdff'
|
)]
for t in topics:
self.assertEqual(util.get_user_from_topic(t[1]), t[0], t[1])
# We're going to import the rest of the test cases into the local
# namespace so that we can run them as
# python manage.py test common.WhateverTest
from common.test.api import *
from common.test.clean import *
from common.test.db import *
from common.test.domain import *
from common.test.monitor import *
from common.test.notification import *
from common.test.patterns import *
from common.test.queue import *
from common.test.sms import *
from common.test.throttle import *
from common.test.validate import *
from common.templatetags.test.avatar import *
from common.templatetags.test.format import *
from common.templatetags.test.presence import *
# This is for legacy compat with older tests
# TODO(termie): remove me when no longer needed
from common.test.base import *
from common.test.util import *
|
rkibria/yapyg
|
yapyg_widgets/screen_widget.py
|
Python
|
mit
| 18,072
| 0.012561
|
# Copyright (c) 2015 Raihan Kibria
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from kivy.uix.floatlayout import FloatLayout
from kivy.core.window import Window
from kivy.clock import Clock
from kivy.uix.button import Button
from kivy.uix.image import Image
from kivy.uix.label import Label
from kivy.core.window import Keyboard
from kivy import platform
from yapyg import texture_db
from yapyg import controls
from yapyg import debug
from yapyg_widgets.display_widget import DisplayWidget
from yapyg_widgets.joystick_widget import JoystickWidget
class ScreenWidget(FloatLayout):
KEYCODE_SPACE = Keyboard.keycodes['spacebar']
KEYCODE_R = Keyboard.keycodes['r']
def __init__(self, state, on_exit_function=None, debugging=False, **kwargs):
super(ScreenWidget, self).__init__(**kwargs)
self.state = state
texture_db.insert_color_rect(state, 1.0, 1.0, "tl_null", 0.0, 0.0, 0.0)
self.display_widget = DisplayWidget(state)
self.on_exit_function = on_exit_function
self.add_widget(self.display_widget)
self.joystick = None
joystick_panel_height = 0.25
joystick_x = 0.01
joystick_y = 0.01
if controls.need_joystick(state) or controls.need_buttons(state):
joystick_height = 0.24
joystick_width = (joystick_height * Window.height) / Window.width
self.add_widget(Image(source="assets/img/ui/joy_panel.png",
size_hint=(1.0, joystick_panel_height),
pos_hint = {"x" : 0.0, "y" : 0.0},
allow_stretch = True,
),
)
if controls.need_joystick(state):
self.joystick = JoystickWidget(
size_hint=(joystick_width, joystick_height),
pos_hint = {"x" : joystick_x, "y" : joystick_y},)
self.add_widget(self.joystick)
Clock.schedule_interval(self.on_timer, 0.1)
if controls.need_buttons(state):
button_width = joystick_width / 2.1
button_height = joystick_height / 2.1
button_width_big = 2 * button_width
button_height_big = 2 * button_height
background_file = "assets/img/ui/joy_button.png"
background_down_file = "assets/img/ui/joy_button_down.png"
background_file_big = "assets/img/ui/joy_button_big.png"
background_down_file_big = "assets/img/ui/joy_button_down_big.png"
button_defs = controls.get_buttons(state)
if button_defs:
if button_defs[0][controls.IDX_CONTROL_BUTTON_POS] == "right":
if button_defs[0][controls.IDX_CONTROL_BUTTON_SIZE] == "small":
button_0 = Button(
background_normal=background_file,
background_down=background_down_file,
size_hint=(button_width, button_height),
pos_hint = {"x" : 1.0 - button_width - 0.01, "y" : 0.0 + 0.01},
)
else:
button_0 = Button(
background_normal=background_file_big,
background_down=background_down_file_big,
size_hint=(button_width_big, button_height_big),
pos_hint={"x" : 1.0 - button_width_big - 0.01, "y" : 0.0 + 0.01},
)
elif button_defs[0][controls.IDX_CONTROL_BUTTON_POS] == "left":
if button_defs[0][controls.IDX_CONTROL_BUTTON_SIZE] == "small":
button_0 = Button(
backgr
|
ound_normal=background_file,
background_down=background_down_file,
|
size_hint=(button_width, button_height),
pos_hint = {"x" : joystick_x, "y" : joystick_y},
)
else:
button_0 = Button(
background_normal=background_file_big,
background_down=background_down_file_big,
size_hint=(button_width_big, button_height_big),
pos_hint = {"x" : joystick_x, "y" : joystick_y},
)
self.add_widget(button_0)
if button_defs[0][controls.IDX_CONTROL_BUTTON_POS] == "right":
if button_defs[0][controls.IDX_CONTROL_BUTTON_SIZE] == "small":
pass
else:
self.add_widget(Image(source="assets/img/ui/button_a.png",
allow_stretch = True,
pos_hint = {"x" : 1.0 - button_width_big - 0.01, "y" : 0.0 + 0.01},
size_hint=(button_width_big, button_height_big),
))
else:
if button_defs[0][controls.IDX_CONTROL_BUTTON_SIZE] == "small":
pass
else:
self.add_widget(Image(source="assets/img/ui/button_a.png",
allow_stretch = True,
size_hint=(button_width_big, button_height_big),
pos_hint = {"x" : joystick_x, "y" : joystick_y},
|
frankosan/pypers
|
pypers/steps/picard/reordersam.py
|
Python
|
gpl-3.0
| 1,863
| 0.011809
|
import os
from pypers.core.step import CmdLineStep
class ReorderSam(CmdLineStep):
spec = {
"version": "0.0.1",
"descr": [
"Runs ReorderSam to reorder chromosomes into GATK order"
],
"args":
{
"inputs": [
{
"name" : "input_bam",
"type" : "file",
"iterable" : True,
"descr" : "the input bam file",
},
{
"name" : "reference",
"type" : "ref_genome",
"tool" : "reordersam",
"descr" : "Reference whole genome fasta"
}
],
"outputs": [
{
"name" : "output_bam",
"type" : "file",
"value" : "dummy",
"descr" : "the reordered output bam",
}
],
"params": [
{
"name" : "jvm_args",
"value" : "-Xmx{{jvm_memory}}g -Djava.io.tmpdir={{output_dir}}",
"descr" : "java virtual machine arguments",
"readonly" : True
}
]
},
"cmd": [
"/usr/bin/java {{jvm_args}} -jar /software/pypers/picard-tools/picard-tools-1.119/picard-tools-1.119/ReorderSam.jar",
" I
|
={{input_bam}} O={{output_bam}} CREATE_INDEX=True R={{reference}}"
],
"requirem
|
ents": {
"memory": '8'
}
}
def preprocess(self):
"""
Set output bam name
"""
file_name = os.path.basename(self.input_bam)
self.output_bam = file_name.replace('.bam','.reord.bam')
super(ReorderSam, self).preprocess()
|
cpennington/edx-platform
|
pavelib/paver_tests/test_servers.py
|
Python
|
agpl-3.0
| 13,743
| 0.002401
|
"""Unit tests for the Paver server tasks."""
import ddt
from paver.easy import call_task
from ..utils.envs import Env
from .utils import PaverTestCase
EXPECTED_SASS_COMMAND = (
u"libsass {sass_directory}"
)
EXPECTED_COMMON_SASS_DIRECTORIES = [
u"common/static/sass",
]
EXPECTED_LMS_SASS_DIRECTORIES = [
u"lms/static/sass",
u"lms/static/certificates/sass",
]
EXPECTED_CMS_SASS_DIRECTORIES = [
u"cms/static/sass",
]
EXPECTED_LMS_SASS_COMMAND = [
u"python manage.py lms --settings={asset_settings} compile_sass lms ",
]
EXPECTED_CMS_SASS_COMMAND = [
u"python manage.py cms --settings={asset_settings} compile_sass cms ",
]
EXPECTED_COLLECT_STATIC_COMMAND = (
u'python manage.py {system} --settings={asset_settings} collectstatic '
u'--ignore "fixtures" --ignore "karma_*.js" --ignore "spec" '
u'--ignore "spec_helpers" --ignore "spec-helpers" --ignore "xmodule_js" '
u'--ignore "geoip" --ignore "sass" '
u'--noinput {log_string}'
)
EXPECTED_CELERY_COMMAND = (
u"DJANGO_SETTINGS_MODULE=lms.envs.{settings} celery worker "
u"--app=lms.celery:APP --beat --loglevel=INFO --pythonpath=."
)
EXPECTED_RUN_SERVER_COMMAND = (
u"python manage.py {system} --settings={settings} runserver --traceback --pythonpath=. 0.0.0.0:{port}"
)
EXPECTED_INDEX_COURSE_COMMAND = (
u"python manage.py {system} --settings={settings} reindex_course --setup"
)
EXPECTED_PRINT_SETTINGS_COMMAND = [
u"python manage.py lms --settings={settings} print_setting STATIC_ROOT 2>{log_file}",
u"python manage.py cms --settings={settings} print_setting STATIC_ROOT 2>{log_file}",
u"python manage.py lms --settings={settings} print_setting LMS_ROOT_URL 2>{log_file}",
u"python manage.py lms --settings={settings} print_setting JWT_AUTH 2>{log_file}",
u"python manage.py lms --settings={settings} print_setting EDXMKTG_USER_INFO_COOKIE_NAME 2>{log_file}",
u"python manage.py lms --settings={settings} print_setting WEBPACK_CONFIG_PATH 2>{log_file}"
]
EXPECTED_WEBPACK_COMMAND = (
u"NODE_ENV={node_env} STATIC_ROOT_LMS={static_root_lms} STATIC_ROOT_CMS={static_root_cms} "
u"LMS_ROOT_URL={lms_root_url} JWT_AUTH_COOKIE_HEADER_PAYLOAD={jwt_auth_cookie_header_payload_name} "
u"EDXMKTG_USER_INFO_
|
COOKIE_NAME={user_info_cookie_name} "
u"$(npm bin)/webpack --config={webpack_config_path}"
|
)
@ddt.ddt
class TestPaverServerTasks(PaverTestCase):
"""
Test the Paver server tasks.
"""
@ddt.data(
[{}],
[{"settings": "aws"}],
[{"asset-settings": "test_static_optimized"}],
[{"settings": "devstack_optimized", "asset-settings": "test_static_optimized"}],
[{"fast": True}],
[{"port": 8030}],
)
@ddt.unpack
def test_lms(self, options):
"""
Test the "devstack" task.
"""
self.verify_server_task("lms", options)
@ddt.data(
[{}],
[{"settings": "aws"}],
[{"asset-settings": "test_static_optimized"}],
[{"settings": "devstack_optimized", "asset-settings": "test_static_optimized"}],
[{"fast": True}],
[{"port": 8031}],
)
@ddt.unpack
def test_studio(self, options):
"""
Test the "devstack" task.
"""
self.verify_server_task("studio", options)
@ddt.data(
[{}],
[{"settings": "aws"}],
[{"asset-settings": "test_static_optimized"}],
[{"settings": "devstack_optimized", "asset-settings": "test_static_optimized"}],
[{"fast": True}],
[{"optimized": True}],
[{"optimized": True, "fast": True}],
[{"no-contracts": True}],
)
@ddt.unpack
def test_devstack(self, server_options):
"""
Test the "devstack" task.
"""
options = server_options.copy()
is_optimized = options.get("optimized", False)
expected_settings = "devstack_optimized" if is_optimized else options.get("settings", Env.DEVSTACK_SETTINGS)
# First test with LMS
options["system"] = "lms"
options["expected_messages"] = [
EXPECTED_INDEX_COURSE_COMMAND.format(
system="cms",
settings=expected_settings,
)
]
self.verify_server_task("devstack", options, contracts_default=True)
# Then test with Studio
options["system"] = "cms"
options["expected_messages"] = [
EXPECTED_INDEX_COURSE_COMMAND.format(
system="cms",
settings=expected_settings,
)
]
self.verify_server_task("devstack", options, contracts_default=True)
@ddt.data(
[{}],
[{"settings": "aws"}],
[{"asset_settings": "test_static_optimized"}],
[{"settings": "devstack_optimized", "asset-settings": "test_static_optimized"}],
[{"fast": True}],
[{"optimized": True}],
[{"optimized": True, "fast": True}],
)
@ddt.unpack
def test_run_all_servers(self, options):
"""
Test the "run_all_servers" task.
"""
self.verify_run_all_servers_task(options)
@ddt.data(
[{}],
[{"settings": "aws"}],
)
@ddt.unpack
def test_celery(self, options):
"""
Test the "celery" task.
"""
settings = options.get("settings", "devstack_with_worker")
call_task("pavelib.servers.celery", options=options)
self.assertEqual(self.task_messages, [EXPECTED_CELERY_COMMAND.format(settings=settings)])
@ddt.data(
[{}],
[{"settings": "aws"}],
)
@ddt.unpack
def test_update_db(self, options):
"""
Test the "update_db" task.
"""
settings = options.get("settings", Env.DEVSTACK_SETTINGS)
call_task("pavelib.servers.update_db", options=options)
# pylint: disable=line-too-long
db_command = u"NO_EDXAPP_SUDO=1 EDX_PLATFORM_SETTINGS_OVERRIDE={settings} /edx/bin/edxapp-migrate-{server} --traceback --pythonpath=. "
self.assertEqual(
self.task_messages,
[
db_command.format(server="lms", settings=settings),
db_command.format(server="cms", settings=settings),
]
)
@ddt.data(
["lms", {}],
["lms", {"settings": "aws"}],
["cms", {}],
["cms", {"settings": "aws"}],
)
@ddt.unpack
def test_check_settings(self, system, options):
"""
Test the "check_settings" task.
"""
settings = options.get("settings", Env.DEVSTACK_SETTINGS)
call_task("pavelib.servers.check_settings", args=[system, settings])
self.assertEqual(
self.task_messages,
[
u"echo 'import {system}.envs.{settings}' "
u"| python manage.py {system} --settings={settings} shell --plain --pythonpath=.".format(
system=system, settings=settings
),
]
)
def verify_server_task(self, task_name, options, contracts_default=False):
"""
Verify the output of a server task.
"""
log_string = options.get("log_string", "> /dev/null")
settings = options.get("settings", None)
asset_settings = options.get("asset-settings", None)
is_optimized = options.get("optimized", False)
is_fast = options.get("fast", False)
no_contracts = options.get("no-contracts", not contracts_default)
if task_name == "devstack":
system = options.get("system")
elif task_name == "studio":
system = "cms"
else:
system = "lms"
port = options.get("port", "8000" if system == "lms" else "8001")
self.reset_task_messages()
if task_name == "devstack":
args = ["studio" if system == "cms" else system]
if settings:
args.append("--settings={settings}".format(settings=settings))
if asset_settings:
args.append("--asset-settings={asset_settings}".format(asset_settings=asset_settings))
if is_optimized:
args.append("-
|
imvu/bluesteel
|
app/logic/gitrepo/models/GitParentModel.py
|
Python
|
mit
| 713
| 0.007013
|
"""
|
Git Parent model """
from django.db import models
class GitParentEntry(models.Model):
""" Git Parent """
project = models.ForeignKey('gitrepo.GitProjectEntry', related_name='git_parent_project')
parent = models.ForeignKey('gitrepo.GitCommitEntry', related
|
_name='git_parent_commit')
son = models.ForeignKey('gitrepo.GitCommitEntry', related_name='git_son_commit')
order = models.IntegerField(default=0)
created_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
def __unicode__(self):
return u'Parent:{0}, Son:{1}, order:{2}'.format(self.parent.commit_hash, self.son.commit_hash, self.order)
|
mikemoorester/ESM
|
nadirSiteSolution.py
|
Python
|
mit
| 7,907
| 0.020362
|
#!/usr/bin/env python
from __future__ import division, print_function, absolute_import
import matplotlib.pyplot as plt
from matplotlib import cm
import numpy as np
#import calendar
#import datetime as dt
#import pprint
#import pickle
import sys
def plotFontSize(ax,fontsize=8):
for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] +
ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize(8)
return ax
#=====================================
if __name__ == "__main__":
# import warnings
# warnings.filterwarnings("ignore")
import argparse
parser = argparse.ArgumentParser(prog='nadirSiteSolution',description='Plot and analyase the pickle data object obatined from a nadir processing run',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
Example:
To create a consolidated phase residual file:
> python nadirSolution.py --model -f ./t/YAR2.2012.CL3
''')
#===================================================================
parser.add_argument('--about','-a',dest='about',default=False,action='store_true',help="Print meta data from solution file then exit")
#===================================================================
parser.add_argument('-f','--f1', dest='solutionFile', default='',help="Pickled solution file")
parser.add_argument('-n', dest='nfile', default='',help="Numpy solution file")
parser.add_argument('--pf',dest='post_fit',default=False,action='store_true',help="Plot post fit residuals")
#===================================================================
# Plot options
#===================================================================
parser.add_argument('--plot',dest='plot', default=False, action='store_true', help="Produce an elevation dependent plot of ESM phase residuals")
parser.add_argument('--SITEPCV',dest='sitePCV', default=False, action='store_true', help="Plot the site PCV estimates")
parser.add_argument('--ps','--plot_save',dest='plot_save',default=False,action='store_true', help="Save the plots in png format")
#===================================================================
# Compare Solutions
#===================================================================
parser.add_argument('--compare',dest='compare',default=False,action='store_true',help="Compare two solutions")
parser.add_argument('--f2', dest='comp2', default='',help="Pickled solution file")
# Debug function, not needed
args = parser.parse_args()
#if len(args.nfile) < 1 :
# args.nfile = args.solutionFile + ".sol.npz"
#args.compare_nfile = args.comp2 + ".sol.npz"
#=======================================================================================================
#
# Parse pickle data structure
#
#=======================================================================================================
# with open(args.solutionFile,'rb') as pklID:
# meta = pickle.load(pklID)
# # Just print the meta data and exit
# if args.about:
# pprint.pprint(meta)
# sys.exit(0)
# if args.post_fit:
# npzfile = np.load(args.nfile)
# prefit = npzfile['prefit']
# prefit_sums = npzfile['prefit_sums']
# prefit_res = npzfile['prefit_res']
# postfit = npzfile['postfit']
# postfit_sums = npzfile['postfit_sums']
# postfit_res = npzfile['postfit_res']
# numObs = npzfile['numObs']
# numObs_sums = npzfile['numObs_sums']
# fig = plt.figure()
# #fig.canvas.set_window_title("All SVNs")
# ax = fig.add_subplot(111)
# ax.plot(nad,np.sqrt(postfit_sums[siz:eiz]/numObs_sums[siz:eiz])/np.sqrt(prefit_sums[siz:eiz]/numObs_sums[siz:eiz]),'r-')
# plt.show()
# sys.exit(0)
npzfile = np.load(args.nfile)
model = npzfile['model']
stdev = npzfile['stdev']
site_freq = npzfile['site_freq']
ele_model = npzfile['ele_model']
ele_stdev = npzfile['ele_model_stdev']
ele_site_freq = npzfile['ele_site_freq']
#if args.compare:
# compare_npzfile = np.load(args.compare_nfile)
# compare_Sol = compare_npzfile['sol']
# compare_Cov = compare_npzfile['cov']
# compare_nadir_freq = compare_npzfile['nadirfreq']
# compare_variances = np.diag(compare_Cov)
#zen = np.linspace(0,90, int(90./meta['zen_grid'])+1 )
#az = np.linspace(0,360. - meta['zen_grid'], int(360./meta['zen_grid']) )
print("Shape of model:",np.shape(model))
zen = np.linspace(0,90, np.shape(model)[1] )
print("zen:",zen,np.shape(model)[1])
az = np.linspace(0,360. - 360./np.shape(model)[0], np.shape(model)[0] )
print("az:",az,np.shape(model)[0])
#============================================
# Plot the Elevation depndent phase residual corrections
#============================================
fig = plt.figure()
#fig.canvas.set_window_title("All SVNs")
ax = fig.add_subplot(111)
ax.errorbar(zen,ele_model[0,:],yerr=ele_stdev[0,:]/2.,linewidth=2)
ax1 = ax.twinx()
ax1.bar(zen,ele_site_freq[0,:],0.1,color='gray',alpha=0.75)
ax1.set_ylabel('Number of observations',fontsize=8)
ax.set_xlabel('Zenith angle (degrees)',fontsize=8)
ax.set_ylabel('Correction to PCV (mm)',fontsize=8)
ax = plotFontSize(ax,8)
ax1 = plotFontSize(ax1,8)
plt.tight_layout()
#============================================
fig = plt.figure()
#fig.canvas.set_window_title("All SVNs")
ax = fig.add_subplot(111)
for i in range(0,np.size(az)):
for j in range(0,np.size(zen)):
ax.errorbar(zen[j],model[i,j],yerr=np.sqrt(stdev[i,j])/2.,linewidth=2)
#ax.plot(zen[j],model[i,j],'b.')
#ax1 = ax.twinx()
#ax1.bar(nad,nadir_freq[ctr,:],0.1,color='gray',alpha=0.75)
#ax1.set_ylabel('Number of observations',fontsize=8)
ax.set_xlabel('Zenith angle (degrees)',fontsize=8)
ax.set_ylabel('Correction to PCV (mm)',fontsize=8)
ax = plotFontSize(ax,8)
plt.tight_layout()
#============================================
# Do a polar plot
#============================================
fig = plt.figure()
#fig.canvas.set_window_title("All SVNs")
ax = fig.add_subplot(111,polar='true')
ax.set_theta_direction(-1)
ax.set_theta_offset(np.radians(90.))
ax.set_ylim([0,1])
ax.set_rgrids((0.00001, np.radians(20.)/np.pi*2, np.radians(40.)/np.pi*2,np.radians(60.)/np.pi*2,np.radians(80.)/np.pi*2),labels=('0', '20', '40', '60', '80'),angle=180)
ma,mz = np.meshgrid(az,zen,indexing='ij')
ma = ma.reshape(ma.size,)
mz = mz.reshape(mz.size,)
polar = ax.scatter(np.radians(ma), np.radians(mz)/np.pi*2., c=model[:,:], s=50, alpha=1., cmap=cm.RdBu,vmin=-15,
|
vmax=15, lw=0)
cbar = plt.colorbar(polar,shrink=0.75,pad=.10)
cbar.ax.tick_params(labelsize=8)
cbar.set_label('ESM (mm)',size=8)
ax = plotFontSize(ax,8)
plt.tight_layout()
fig = plt.figur
|
e()
#fig.canvas.set_window_title("All SVNs")
ax = fig.add_subplot(111,polar='true')
ax.set_theta_direction(-1)
ax.set_theta_offset(np.radians(90.))
ax.set_ylim([0,1])
ax.set_rgrids((0.00001, np.radians(20.)/np.pi*2, np.radians(40.)/np.pi*2,np.radians(60.)/np.pi*2,np.radians(80.)/np.pi*2),labels=('0', '20', '40', '60', '80'),angle=180)
polar = ax.scatter(np.radians(ma), np.radians(mz)/np.pi*2., c=stdev[:,:], s=50, alpha=1., cmap=cm.RdBu,vmin=-15,vmax=15, lw=0)
cbar = plt.colorbar(polar,shrink=0.75,pad=.10)
cbar.ax.tick_params(labelsize=8)
cbar.set_label('Standard Deviation (mm)',size=8)
ax = plotFontSize(ax,8)
plt.tight_layout()
plt.show()
print("FINISHED")
|
OCA/account-financial-reporting
|
account_financial_report/tests/test_trial_balance.py
|
Python
|
agpl-3.0
| 33,159
| 0.000513
|
# Author: Julien Coux
# Copyright 2016 Camptocamp SA
# Copyright 2020 ForgeFlow S.L. (https://www.forgeflow.com)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo.tests import common
class TestTrialBalanceReport(common.TransactionCase):
def setUp(self):
super(TestTrialBalanceReport, self).setUp()
group_obj = self.env["account.group"]
acc_obj = self.env["account.account"]
self.group1 = group_obj.create({"code_prefix": "1", "name": "Group 1"})
self.group11 = group_obj.create(
{"code_prefix": "11", "name": "Group 11", "parent_id": self.group1.id}
)
self.group2 = group_obj.create({"code_prefix": "2", "name": "Group 2"})
self.account100 = acc_obj.create(
{
"code": "100",
"name": "Account 100",
"group_id": self.group1.id,
"user_type_id": self.env.ref("account.data_account_type_receivable").id,
"reconcile": True,
}
)
self.account110 = self.env["account.account"].search(
[
(
"user_type_id",
"=",
self.env.ref("account.data_unaffected_earnings").id,
)
],
limit=1,
)
self.account200 = acc_obj.create(
{
"code": "200",
"name": "Account 200",
"group_id": self.group2.id,
"user_type_id": self.env.ref(
"account.data_account_type_other_income"
).id,
}
)
self.account300 = acc_obj.create(
{
"code": "300",
"name": "Account 300",
"user_type_id": self.env.ref(
"account.data_account_type_other_income"
).id,
}
)
self.account301 = acc_obj.create(
{
"code": "301",
"name": "Account 301",
"group_id": self.group2.id,
"user_type_id": self.env.ref(
"account.data_account_type_other_income"
).id,
}
)
self.previous_fy_date_start = "2015-01-01"
self.previous_fy_date_end = "2015-12-31"
self.fy_date_start = "2016-01-01"
self.fy_date_end = "2016-12-31"
self.date_start = "2016-01-01"
self.date_end = "2016-12-31"
self.partner = self.env.ref("base.res_partner_12")
self.unaffected_account = self.env["account.account"].search(
[
(
"user_type_id",
"=",
self.env.ref("account.data_unaffected_earnings").id,
)
],
limit=1,
)
def _add_move(
self,
date,
receivable_debit,
receivable_credit,
income_debit,
income_credit,
unaffected_debit=0,
unaffected_credit=0,
):
journal = self.env["account.journal"].search([], limit=1)
partner = self.env.ref("base.res_partner_12")
move_vals = {
"journal_id": journal.id,
"date": date,
"line_ids": [
(
0,
0,
{
"debit": receivable_debit,
"credit": receivable_credit,
"partner_id": partner.id,
"account_id": self.account100.id,
},
),
(
0,
0,
{
"debit": income_debit,
"credit": income_credit,
"partner_id": partner.id,
"account_id": self.account200.id,
},
),
(
0,
0,
{
"debit": unaffected_debit,
"credit": unaffected_credit,
"partner_id": partner.id,
"account_id": self.account110.id,
},
),
(
0,
0,
{
"debit": receivable_debit,
"credit": receivable_credit,
"partner_id": partner.id,
"account_id": self.account300.id,
},
),
(
0,
0,
{
"debit": receivable_credit,
|
"credit": receivable_debit,
"partner_id": partner.id,
"account_id": self.account301.id,
},
),
],
}
move = self.env["account.move"].create(move_vals)
move.post()
def _get_report_lines(
|
self, with_partners=False, account_ids=False, hierarchy_on="computed"
):
company = self.env.ref("base.main_company")
trial_balance = self.env["trial.balance.report.wizard"].create(
{
"date_from": self.date_start,
"date_to": self.date_end,
"target_move": "posted",
"hide_account_at_0": True,
"hierarchy_on": hierarchy_on,
"company_id": company.id,
"account_ids": account_ids,
"fy_start_date": self.fy_date_start,
"show_partner_details": with_partners,
}
)
data = trial_balance._prepare_report_trial_balance()
res_data = self.env[
"report.account_financial_report.trial_balance"
]._get_report_values(trial_balance, data)
return res_data
def check_account_in_report(self, account_id, trial_balance):
account_in_report = False
for account in trial_balance:
if account["id"] == account_id and account["type"] == "account_type":
account_in_report = True
break
return account_in_report
def _get_account_lines(self, account_id, trial_balance):
lines = False
for account in trial_balance:
if account["id"] == account_id and account["type"] == "account_type":
lines = {
"initial_balance": account["initial_balance"],
"debit": account["debit"],
"credit": account["credit"],
"final_balance": account["ending_balance"],
}
return lines
def _get_group_lines(self, group_id, trial_balance):
lines = False
for group in trial_balance:
if group["id"] == group_id and group["type"] == "group_type":
lines = {
"initial_balance": group["initial_balance"],
"debit": group["debit"],
"credit": group["credit"],
"final_balance": group["ending_balance"],
}
return lines
def check_partner_in_report(self, account_id, partner_id, total_amount):
partner_in_report = False
if account_id in total_amount.keys():
if partner_id in total_amount[account_id]:
partner_in_report = True
return partner_in_report
def _get_partner_lines(self, account_id, partner_id, total_amount):
acc_id = account_id
prt_id = partner_id
lines = {
"initial_balance": total_amount[acc_id][prt_id]["initial_balance"],
"debit": total_amount[acc_id][prt_id]["debit"],
"credit": total_amount[acc_id][prt_id]["credit"],
"final_balance": total_amount[acc_id][prt_id]["ending_balance"],
}
return lines
def _sum_all_accounts(self, trial_balance, feature):
total = 0.0
for account in trial_balance:
|
JALusk/SuperBoL
|
tests/test_mag2flux.py
|
Python
|
mit
| 2,006
| 0.011964
|
import unittest
import numpy as np
from astropy import constants as const
from astropy import units as u
from .context import superbol
from superbol.mag2flux import *
from yaml import load
class TestMag2Flux(unittest.TestC
|
ase):
def setUp(self):
self.filter_band = "V"
self.magnitude = 8.8
self.uncertainty = 0.02
self.effective_wl = 5450.0 * u.AA
self.flux_at_zero_mag = 3.631E-9 * (u.erg / (u.s * u.cm**2 * u.AA))
def test_mag2flux_converts_mag_to_correct_flux(self):
expected = self.flux_at_zero_mag * 10**(-0.4 * self.magnit
|
ude)
result_flux, result_uncertainty = mag2flux(self.magnitude,
self.uncertainty,
self.effective_wl,
self.flux_at_zero_mag)
self.assertEqual(expected.value, result_flux)
def test_mag2flux_converts_mag_to_correct_flux_uncertainty(self):
expected = np.sqrt((self.flux_at_zero_mag * -0.4 * np.log(10) * 10**(-0.4 * self.magnitude) * self.uncertainty)**2)
result_flux, result_uncertainty = mag2flux(self.magnitude,
self.uncertainty,
self.effective_wl,
self.flux_at_zero_mag)
self.assertAlmostEqual(expected.value, result_uncertainty)
def test_flux_at_mag_zero(self):
mag = 0.0
expected = self.flux_at_zero_mag
result_flux, result_uncertainty = mag2flux(0.0,
self.uncertainty,
self.effective_wl,
self.flux_at_zero_mag)
self.assertEqual(expected.value, result_flux)
|
kingvuplus/gui_test4
|
upgrade.py
|
Python
|
gpl-2.0
| 3,532
| 0.003681
|
#Embedded file name: /usr/lib/enigma2/python/upgrade.py
import os
from subprocess import Popen, PIPE
opkgDestinations = ['/']
opkgStatusPath = ''
overwriteSettingsFiles = False
overwriteDriversFiles = True
overwriteEmusFiles = True
overwritePiconsFiles = True
overwriteBootlogoFiles = True
overwriteSpinnerFiles = True
def findMountPoint(path):
path = os.path.abspath(path)
while not os.path.ismount(path):
path = os.path.dirname(path)
return path
def opkgExtraDestinations():
global opkgDestinations
return ''.join([ ' --add-dest %s:%s' % (i, i) for i in opkgDestinations ])
def opkgAddDestination(mountpoint):
if mountpoint not in opkgDestinations:
opkgDestinations.append(mountpoint)
print '[Ipkg] Added to OPKG destinations:', mountpoint
mounts = os.listdir('/media')
for mount in mounts:
mount = os.path.join('/media', mount)
if mount and not mount.startswith('/media/net'):
if opkgStatusPath == '':
opkgStatusPath = 'var/lib/opkg/status'
if not os.path.exists(os.path.join('/', opkgStatusPath)):
opkgStatusPath = 'usr/lib/o
|
pkg/status'
if os.path.exists(os.path.join(mount, opkgStatusPath)):
opkgAddDestination(mount)
def getValue(line):
dummy = line.split('=')
if len(dummy) != 2:
print 'Error: Wr
|
ong formatted settings file'
exit
if dummy[1] == 'false':
return False
elif dummy[1] == 'true':
return True
else:
return False
p = Popen('opkg list-upgradable', stdout=PIPE, stderr=PIPE, shell=True)
stdout, stderr = p.communicate()
if stderr != '':
print 'Error occured:', stderr
exit
try:
f = open('/etc/enigma2/settings', 'r')
lines = f.readlines()
f.close()
except:
print 'Error opening /etc/enigma2/settings file'
for line in lines:
if line.startswith('config.plugins.softwaremanager.overwriteSettingsFiles'):
overwriteSettingsFiles = getValue(line)
elif line.startswith('config.plugins.softwaremanager.overwriteDriversFiles'):
overwriteDriversFiles = getValue(line)
elif line.startswith('config.plugins.softwaremanager.overwriteEmusFiles'):
overwriteEmusFiles = getValue(line)
elif line.startswith('config.plugins.softwaremanager.overwritePiconsFiles'):
overwritePiconsFiles = getValue(line)
elif line.startswith('config.plugins.softwaremanager.overwriteBootlogoFiles'):
overwriteBootlogoFiles = getValue(line)
elif line.startswith('config.plugins.softwaremanager.overwriteSpinnerFiles'):
overwriteSpinnerFiles = getValue(line)
packages = stdout.split('\n')
try:
packages.remove('')
except:
pass
upgradePackages = []
for package in packages:
item = package.split(' - ', 2)
if item[0].find('-settings-') > -1 and not overwriteSettingsFiles:
continue
elif item[0].find('kernel-module-') > -1 and not overwriteDriversFiles:
continue
elif item[0].find('-softcams-') > -1 and not overwriteEmusFiles:
continue
elif item[0].find('-picons-') > -1 and not overwritePiconsFiles:
continue
elif item[0].find('-bootlogo') > -1 and not overwriteBootlogoFiles:
continue
elif item[0].find('italysat-spinner') > -1 and not overwriteSpinnerFiles:
continue
else:
upgradePackages.append(item[0])
for p in upgradePackages:
os.system('opkg ' + opkgExtraDestinations() + ' upgrade ' + p + ' 2>&1 | tee /home/root/ipkgupgrade.log')
os.system('reboot')
|
eicher31/compassion-switzerland
|
report_compassion/models/contract_group.py
|
Python
|
agpl-3.0
| 5,996
| 0.001167
|
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from datetime import datetime
from babel.dates import format_date
from odoo import api, models, fields, _
from odoo.exceptions import UserError
logger = logging.getLogger(__name__)
COMPASSION_QRR = "CH2430808007681434347"
class ContractGroup(models.Model):
_inherit = ["recurring.contract.group", "translatable.model"]
_name = "recurring.contract.group"
@api.multi
def get_months(self, months, sponsorships):
"""
Given the list of months to p
|
rint,
returns the list of months grouped by the frequency payment
of the contract group and only containing unpaid sponsorships.
:param months: list of dates (date, datetime or string)
:param sponsorships: recordset of included sponsorships
:return: list of dates grouped in string format
"""
self.ensure_one()
freq = self.advance_billing_months
payment_mode = self.with_context(lang="en_US").payment_mode_i
|
d
# Take first open invoice or next_invoice_date
open_invoice = min([i for i in sponsorships.mapped("first_open_invoice") if i])
if open_invoice:
first_invoice_date = open_invoice.replace(day=1)
else:
raise UserError(_("No open invoice found !"))
for i, month in enumerate(months):
if isinstance(month, str):
months[i] = fields.Date.from_string(month)
if isinstance(month, datetime):
months[i] = month.date()
# check if first invoice is after last month
if first_invoice_date > months[-1]:
raise UserError(_(f"First invoice is after Date Stop"))
# Only keep unpaid months
valid_months = [
fields.Date.to_string(month) for month in months
if month >= first_invoice_date
]
if "Permanent" in payment_mode.name:
return valid_months[:1]
if freq == 1:
return valid_months
else:
# Group months
result = list()
count = 1
month_start = ""
for month in valid_months:
if not month_start:
month_start = month
if count < freq:
count += 1
else:
result.append(month_start + " - " + month)
month_start = ""
count = 1
if not result:
result.append(month_start + " - " + month)
return result
@api.multi
def get_communication(self, start, stop, sponsorships):
"""
Get the communication to print on the payment slip for sponsorship
:param start: the month start for which we print the payment slip (string)
:param stop: the month stop for which we print the payment slip (string)
:param sponsorships: recordset of sponsorships for which to print the
payment slips
:return: string of the communication
"""
self.ensure_one()
payment_mode = self.with_context(lang="en_US").payment_mode_id
amount = self.get_amount(start, stop, sponsorships)
valid = sponsorships
number_sponsorship = len(sponsorships)
date_start = fields.Date.to_date(start)
date_stop = fields.Date.to_date(stop)
vals = {
"amount": f"CHF {amount:.0f}",
"subject": _("for") + " ",
"date": "",
}
locale = self.partner_id.lang
context = {"lang": locale}
if start and stop:
start_date = format_date(date_start, format="MMMM yyyy", locale=locale)
stop_date = format_date(date_stop, format="MMMM yyyy", locale=locale)
if start == stop:
vals["date"] = start_date
else:
vals["date"] = f"{start_date} - {stop_date}"
if "Permanent" in payment_mode.name:
vals["payment_type"] = _("ISR for standing order")
vals["date"] = ""
else:
vals["payment_type"] = (
_("ISR") + " " + self.contract_ids[0].with_context(
context).group_freq
)
if number_sponsorship > 1:
vals["subject"] += str(number_sponsorship) + " " + _("sponsorships")
elif number_sponsorship and valid.child_id:
vals["subject"] = valid.child_id.preferred_name + " ({})".format(
valid.child_id.local_id
)
elif number_sponsorship and not valid.child_id and valid.display_name:
product_name = self.env["product.product"].search(
[("id", "in", valid.mapped("contract_line_ids.product_id").ids)]
)
vals["subject"] = ", ".join(product_name.mapped("thanks_name"))
return (
f"{vals['payment_type']} {vals['amount']}"
f"<br/>{vals['subject']}<br/>{vals['date']}"
)
@api.model
def get_company_qrr_account(self):
""" Utility to find the bvr account of the company. """
return self.env["res.partner.bank"].search([
('acc_number', '=', COMPASSION_QRR)])
def get_amount(self, start, stop, sponsorships):
self.ensure_one()
amount = sum(sponsorships.mapped("total_amount"))
months = int(stop.split("-")[1]) - int(start.split("-")[1]) + 1
payment_mode = self.with_context(lang="en_US").payment_mode_id
if "Permanent" in payment_mode.name:
months = self.advance_billing_months
return amount * months
|
Deepomatic/DIGITS
|
digits/model/images/forms.py
|
Python
|
bsd-3-clause
| 3,851
| 0.001298
|
# Copyright (c) 2014-2017, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
from wtforms import validators
from ..forms import ModelForm
from digits import utils
class ImageModelForm(ModelForm):
"""
Defines the form used to create a new ImageModelJob
"""
crop_size = utils.forms.IntegerField(
'Crop Size',
validators=[
validators.NumberRange(min=1),
validators.Optional()
],
tooltip=("If specified, during training a random square crop will be "
"taken from the input image before using as input for the network.")
)
use_mean = utils.forms.SelectField(
'Subtract Mean',
choices=[
('none', 'None'),
('image', 'Image'),
('pixel', 'Pixel'),
],
default='image',
tooltip="Subtract the mean file or mean pixel for this dataset from each image."
)
aug_flip = utils.forms.SelectField(
'Flipping',
choices=[
('none', 'None'),
('fliplr', 'Horizontal'),
('flipud', 'Vertical'),
('fliplrud', 'Horizontal and/or Vertical'
|
),
],
default='none',
tooltip="Randomly flips each image during batch preprocessing."
)
aug_quad_rot = utils.forms.SelectField(
'Quadrilateral Rotation',
choices=[
('none', 'None'),
('rot90', '0, 90 or 270 degrees'),
('rot180', '0 or 180 degrees'),
('rotall', '0, 90, 180 or 270 degrees.'),
],
default='none',
tooltip="Randomly rotates (90
|
degree steps) each image during batch preprocessing."
)
aug_rot = utils.forms.IntegerField(
'Rotation (+- deg)',
default=0,
validators=[
validators.NumberRange(min=0, max=180)
],
tooltip="The uniform-random rotation angle that will be performed during batch preprocessing."
)
aug_scale = utils.forms.FloatField(
'Rescale (stddev)',
default=0,
validators=[
validators.NumberRange(min=0, max=1)
],
tooltip=("Retaining image size, the image is rescaled with a "
"+-stddev of this parameter. Suggested value is 0.07.")
)
aug_noise = utils.forms.FloatField(
'Noise (stddev)',
default=0,
validators=[
validators.NumberRange(min=0, max=1)
],
tooltip=("Adds AWGN (Additive White Gaussian Noise) during batch "
"preprocessing, assuming [0 1] pixel-value range. Suggested value is 0.03.")
)
aug_hsv_use = utils.forms.BooleanField(
'HSV Shifting',
default=False,
tooltip=("Augmentation by normal-distributed random shifts in HSV "
"color space, assuming [0 1] pixel-value range."),
)
aug_hsv_h = utils.forms.FloatField(
'Hue',
default=0.02,
validators=[
validators.NumberRange(min=0, max=0.5)
],
tooltip=("Standard deviation of a shift that will be performed during "
"preprocessing, assuming [0 1] pixel-value range.")
)
aug_hsv_s = utils.forms.FloatField(
'Saturation',
default=0.04,
validators=[
validators.NumberRange(min=0, max=0.5)
],
tooltip=("Standard deviation of a shift that will be performed during "
"preprocessing, assuming [0 1] pixel-value range.")
)
aug_hsv_v = utils.forms.FloatField(
'Value',
default=0.06,
validators=[
validators.NumberRange(min=0, max=0.5)
],
tooltip=("Standard deviation of a shift that will be performed during "
"preprocessing, assuming [0 1] pixel-value range.")
)
|
laborautonomo/Mailpile
|
mailpile/plugins/setup_magic.py
|
Python
|
apache-2.0
| 40,094
| 0.000499
|
import os
import random
import sys
from datetime import date
from urllib import urlencode
import mailpile.auth
from mailpile.defaults import CONFIG_RULES
from mailpile.i18n import ListTranslations, ActivateTranslation, gettext
from mailpile.i18n import gettext as _
from mailpile.i18n import ngettext as _n
from mailpile.plugins import PluginManager
from mailpile.plugins import PLUGINS
from mailpile.plugins.contacts import AddProfile
from mailpile.plugins.contacts import ListProfiles
from mailpile.plugins.migrate import Migrate
from mailpile.plugins.tags import AddTag
from mailpile.commands import Command
from mailpile.config import SecurePassphraseStorage
from mailpile.crypto.gpgi import GnuPG, SignatureInfo, EncryptionInfo
from mailpile.crypto.gpgi import GnuPGKeyGenerator, GnuPGKeyEditor
from mailpile.httpd import BLOCK_HTTPD_LOCK, Idle_HTTPD
from mailpile.smtp_client import SendMail, SendMailError
from mailpile.urlmap import UrlMap
from mailpile.ui import Session
from mailpile.util import *
_ = lambda s: s
_plugins = PluginManager(builtin=__file__)
##[ Commands ]################################################################
class SetupMagic(Command):
"""Perform initial setup"""
SYNOPSIS = (None, None, None, None)
ORDER = ('Internals', 0)
LOG_PROGRESS = True
TAGS = {
'New': {
'type': 'unread',
'label': False,
'display': 'invisible',
'icon': 'icon-new',
'label_color': '03-gray-dark',
'name': _('New'),
},
'Inbox': {
'type': 'inbox',
'display': 'priority',
'display_order': 2,
'icon': 'icon-inbox',
'label_color': '06-blue',
'name': _('Inbox'),
},
'Blank': {
'type': 'blank',
'flag_editable': True,
'display': 'invisible',
'name'
|
: _('Blank'),
},
'Drafts': {
'type': 'drafts',
'flag_editable': True,
'display': 'priority',
'display_order': 1,
'icon': 'icon-compose',
'label_color': '03-gray-dark',
'name': _('Drafts'),
},
'Outbox': {
'type': 'outbox',
'display': 'priority',
'display_order': 3,
'icon': 'icon-outbox',
'label_color': '06-blue',
'name
|
': _('Outbox'),
},
'Sent': {
'type': 'sent',
'display': 'priority',
'display_order': 4,
'icon': 'icon-sent',
'label_color': '03-gray-dark',
'name': _('Sent'),
},
'Spam': {
'type': 'spam',
'flag_hides': True,
'display': 'priority',
'display_order': 5,
'icon': 'icon-spam',
'label_color': '10-orange',
'name': _('Spam'),
},
'MaybeSpam': {
'display': 'invisible',
'icon': 'icon-spam',
'label_color': '10-orange',
'name': _('MaybeSpam'),
},
'Ham': {
'type': 'ham',
'display': 'invisible',
'name': _('Ham'),
},
'Trash': {
'type': 'trash',
'flag_hides': True,
'display': 'priority',
'display_order': 6,
'icon': 'icon-trash',
'label_color': '13-brown',
'name': _('Trash'),
},
# These are magical tags that perform searches and show
# messages in contextual views.
'All Mail': {
'type': 'tag',
'icon': 'icon-logo',
'label_color': '06-blue',
'search_terms': 'all:mail',
'name': _('All Mail'),
'display_order': 1000,
},
'Photos': {
'type': 'tag',
'icon': 'icon-photos',
'label_color': '08-green',
'search_terms': 'att:jpg',
'name': _('Photos'),
'template': 'photos',
'display_order': 1001,
},
'Files': {
'type': 'tag',
'icon': 'icon-document',
'label_color': '06-blue',
'search_terms': 'has:attachment',
'name': _('Files'),
'template': 'files',
'display_order': 1002,
},
'Links': {
'type': 'tag',
'icon': 'icon-links',
'label_color': '12-red',
'search_terms': 'http',
'name': _('Links'),
'display_order': 1003,
},
# These are internal tags, used for tracking user actions on
# messages, as input for machine learning algorithms. These get
# automatically added, and may be automatically removed as well
# to keep the working sets reasonably small.
'mp_rpl': {'type': 'replied', 'label': False, 'display': 'invisible'},
'mp_fwd': {'type': 'fwded', 'label': False, 'display': 'invisible'},
'mp_tag': {'type': 'tagged', 'label': False, 'display': 'invisible'},
'mp_read': {'type': 'read', 'label': False, 'display': 'invisible'},
'mp_ham': {'type': 'ham', 'label': False, 'display': 'invisible'},
}
def basic_app_config(self, session,
save_and_update_workers=True,
want_daemons=True):
# Create local mailboxes
session.config.open_local_mailbox(session)
# Create standard tags and filters
created = []
for t in self.TAGS:
if not session.config.get_tag_id(t):
AddTag(session, arg=[t]).run(save=False)
created.append(t)
session.config.get_tag(t).update(self.TAGS[t])
for stype, statuses in (('sig', SignatureInfo.STATUSES),
('enc', EncryptionInfo.STATUSES)):
for status in statuses:
tagname = 'mp_%s-%s' % (stype, status)
if not session.config.get_tag_id(tagname):
AddTag(session, arg=[tagname]).run(save=False)
created.append(tagname)
session.config.get_tag(tagname).update({
'type': 'attribute',
'display': 'invisible',
'label': False,
})
if 'New' in created:
session.ui.notify(_('Created default tags'))
# Import all the basic plugins
reload_config = False
for plugin in PLUGINS:
if plugin not in session.config.sys.plugins:
session.config.sys.plugins.append(plugin)
reload_config = True
for plugin in session.config.plugins.WANTED:
if plugin in session.config.plugins.available():
session.config.sys.plugins.append(plugin)
if reload_config:
with session.config._lock:
session.config.save()
session.config.load(session)
try:
# If spambayes is not installed, this will fail
import mailpile.plugins.autotag_sb
if 'autotag_sb' not in session.config.sys.plugins:
session.config.sys.plugins.append('autotag_sb')
session.ui.notify(_('Enabling spambayes autotagger'))
except ImportError:
session.ui.warning(_('Please install spambayes '
'for super awesome spam filtering'))
vcard_importers = session.config.prefs.vcard.importers
if not vcard_importers.gravatar:
vcard_importers.gravatar.append({'active': True})
session.ui.notify(_('Enabling gravatar image importer'))
gpg_home = os.path.expanduser('~/.gnupg')
if os.path.exists(gpg_home) and not vcard_importers.gpg:
vcard_importers.gpg.append({'active': True,
'gpg_home': gpg_home})
session.ui.notify(_('Importing contacts from GPG keyring'))
if ('autotag_sb' in session.config.sys.plugins and
len(session.config.prefs.autotag
|
thunderhoser/GewitterGefahr
|
gewittergefahr/deep_learning/input_examples.py
|
Python
|
mit
| 103,640
| 0.000251
|
"""Deals with input examples for deep learning.
One "input example" is one storm object.
--- NOTATION ---
The following letters will be used throughout this module.
E = number of examples (storm objects)
M = number of rows in each radar image
N = number of columns in each radar image
H_r = number of radar heights
F_r = number of radar fields (or "variables" or "channels")
H_s = number of sounding heights
F_s = number of sounding fields (or "variables" or "channels")
C = number of radar field/height pairs
"""
import copy
import glob
import os.path
import numpy
import netCDF4
from gewittergefahr.gg_utils import radar_utils
from gewittergefahr.gg_utils import soundings
from gewittergefahr.gg_utils import target_val_utils
from gewittergefahr.gg_utils import time_conversion
from gewittergefahr.gg_utils import number_rounding
from gewittergefahr.gg_utils import temperature_conversions as temp_conversion
from gewittergefahr.gg_utils import storm_tracking_utils as tracking_utils
from gewittergefahr.gg_utils import file_system_utils
from gewittergefahr.gg_utils import error_checking
from gewittergefahr.deep_learning import storm_images
from gewittergefahr.deep_learning import deep_learning_utils as dl_utils
SEPARATOR_STRING = '\n\n' + '*' * 50 + '\n\n'
BATCH_NUMBER_REGEX = '[0-9][0-9][0-9][0-9][0-9][0-9][0-9]'
TIME_FORMAT_IN_FILE_NAMES = '%Y-%m-%d-%H%M%S'
DEFAULT_NUM_EXAMPLES_PER_OUT_CHUNK = 8
DEFAULT_NUM_EXAMPLES_PER_OUT_FILE = 128
NUM_BATCHES_PER_DIRECTORY = 1000
AZIMUTHAL_SHEAR_FIELD_NAMES = [
radar_utils.LOW_LEVEL_SHEAR_NAME, radar_utils.MID_LEVEL_SHEAR_NAME
]
TARGET_NAMES_KEY = 'target_names'
ROTATED_GRIDS_KEY = 'rotated_grids'
ROTATED_GRID_SPACING_KEY = 'rotated_grid_spacing_metres'
FULL_IDS_KEY = 'full_storm_id_strings'
STORM_TIMES_KEY = 'storm_times_unix_sec'
TARGET_MATRIX_KEY = 'target_matrix'
RADAR_IMAGE_MATRIX_KEY = 'radar_image_matrix'
RADAR_FIELDS_KEY = 'radar_field_names'
RADAR_HEIGHTS_KEY = 'radar_heights_m_agl'
SOUNDING_FIELDS_KEY = 'sounding_field_names'
SOUNDING_MATRIX_KEY = 'sounding_matrix'
SOUNDING_HEIGHTS_KEY = 'sounding_heights_m_agl'
REFL_IMAGE_MATRIX_KEY = 'reflectivity_image_matrix_dbz'
AZ_SHEAR_IMAGE_MATRIX_KEY = 'az_shear_image_matrix_s01'
MAIN_KEYS = [
FULL_IDS_KEY, STORM_TIMES_KEY, RADAR_IMAGE_MATRIX_KEY,
REFL_IMAGE_MATRIX_KEY, AZ_SHEAR_IMAGE_MATRIX_KEY, TARGET_MATRIX_KEY,
SOUNDING_MATRIX_KEY
]
REQUIRED_MAIN_KEYS = [
FULL_IDS_KEY, STORM
|
_TIMES_KEY, TARGET_MATRIX_KEY
]
METADATA
|
_KEYS = [
TARGET_NAMES_KEY, ROTATED_GRIDS_KEY, ROTATED_GRID_SPACING_KEY,
RADAR_FIELDS_KEY, RADAR_HEIGHTS_KEY, SOUNDING_FIELDS_KEY,
SOUNDING_HEIGHTS_KEY
]
TARGET_NAME_KEY = 'target_name'
TARGET_VALUES_KEY = 'target_values'
EXAMPLE_DIMENSION_KEY = 'storm_object'
ROW_DIMENSION_KEY = 'grid_row'
COLUMN_DIMENSION_KEY = 'grid_column'
REFL_ROW_DIMENSION_KEY = 'reflectivity_grid_row'
REFL_COLUMN_DIMENSION_KEY = 'reflectivity_grid_column'
AZ_SHEAR_ROW_DIMENSION_KEY = 'az_shear_grid_row'
AZ_SHEAR_COLUMN_DIMENSION_KEY = 'az_shear_grid_column'
RADAR_FIELD_DIM_KEY = 'radar_field'
RADAR_HEIGHT_DIM_KEY = 'radar_height'
RADAR_CHANNEL_DIM_KEY = 'radar_channel'
SOUNDING_FIELD_DIM_KEY = 'sounding_field'
SOUNDING_HEIGHT_DIM_KEY = 'sounding_height'
TARGET_VARIABLE_DIM_KEY = 'target_variable'
STORM_ID_CHAR_DIM_KEY = 'storm_id_character'
RADAR_FIELD_CHAR_DIM_KEY = 'radar_field_name_character'
SOUNDING_FIELD_CHAR_DIM_KEY = 'sounding_field_name_character'
TARGET_NAME_CHAR_DIM_KEY = 'target_name_character'
RADAR_FIELD_KEY = 'radar_field_name'
OPERATION_NAME_KEY = 'operation_name'
MIN_HEIGHT_KEY = 'min_height_m_agl'
MAX_HEIGHT_KEY = 'max_height_m_agl'
MIN_OPERATION_NAME = 'min'
MAX_OPERATION_NAME = 'max'
MEAN_OPERATION_NAME = 'mean'
VALID_LAYER_OPERATION_NAMES = [
MIN_OPERATION_NAME, MAX_OPERATION_NAME, MEAN_OPERATION_NAME
]
OPERATION_NAME_TO_FUNCTION_DICT = {
MIN_OPERATION_NAME: numpy.min,
MAX_OPERATION_NAME: numpy.max,
MEAN_OPERATION_NAME: numpy.mean
}
MIN_RADAR_HEIGHTS_KEY = 'min_radar_heights_m_agl'
MAX_RADAR_HEIGHTS_KEY = 'max_radar_heights_m_agl'
RADAR_LAYER_OPERATION_NAMES_KEY = 'radar_layer_operation_names'
def _read_soundings(sounding_file_name, sounding_field_names, radar_image_dict):
"""Reads storm-centered soundings and matches w storm-centered radar imgs.
:param sounding_file_name: Path to input file (will be read by
`soundings.read_soundings`).
:param sounding_field_names: See doc for `soundings.read_soundings`.
:param radar_image_dict: Dictionary created by
`storm_images.read_storm_images`.
:return: sounding_dict: Dictionary created by `soundings.read_soundings`.
:return: radar_image_dict: Same as input, but excluding storm objects with
no sounding.
"""
print('Reading data from: "{0:s}"...'.format(sounding_file_name))
sounding_dict, _ = soundings.read_soundings(
netcdf_file_name=sounding_file_name,
field_names_to_keep=sounding_field_names,
full_id_strings_to_keep=radar_image_dict[storm_images.FULL_IDS_KEY],
init_times_to_keep_unix_sec=radar_image_dict[
storm_images.VALID_TIMES_KEY]
)
num_examples_with_soundings = len(sounding_dict[soundings.FULL_IDS_KEY])
if num_examples_with_soundings == 0:
return None, None
radar_full_id_strings = numpy.array(
radar_image_dict[storm_images.FULL_IDS_KEY]
)
orig_storm_times_unix_sec = (
radar_image_dict[storm_images.VALID_TIMES_KEY] + 0
)
indices_to_keep = []
for i in range(num_examples_with_soundings):
this_index = numpy.where(numpy.logical_and(
radar_full_id_strings == sounding_dict[soundings.FULL_IDS_KEY][i],
orig_storm_times_unix_sec ==
sounding_dict[soundings.INITIAL_TIMES_KEY][i]
))[0][0]
indices_to_keep.append(this_index)
indices_to_keep = numpy.array(indices_to_keep, dtype=int)
radar_image_dict[storm_images.STORM_IMAGE_MATRIX_KEY] = radar_image_dict[
storm_images.STORM_IMAGE_MATRIX_KEY
][indices_to_keep, ...]
radar_image_dict[storm_images.FULL_IDS_KEY] = sounding_dict[
soundings.FULL_IDS_KEY
]
radar_image_dict[storm_images.VALID_TIMES_KEY] = sounding_dict[
soundings.INITIAL_TIMES_KEY
]
return sounding_dict, radar_image_dict
def _create_2d_examples(
radar_file_names, full_id_strings, storm_times_unix_sec,
target_matrix, sounding_file_name=None, sounding_field_names=None):
"""Creates 2-D examples for one file time.
E = number of desired examples (storm objects)
e = number of examples returned
T = number of target variables
:param radar_file_names: length-C list of paths to storm-centered radar
images. Files will be read by `storm_images.read_storm_images`.
:param full_id_strings: length-E list with full IDs of storm objects to
return.
:param storm_times_unix_sec: length-E numpy array with valid times of storm
objects to return.
:param target_matrix: E-by-T numpy array of target values (integer class
labels).
:param sounding_file_name: Path to sounding file (will be read by
`soundings.read_soundings`). If `sounding_file_name is None`, examples
will not include soundings.
:param sounding_field_names: See doc for `soundings.read_soundings`.
:return: example_dict: Same as input for `write_example_file`, but without
key "target_names".
"""
orig_full_id_strings = copy.deepcopy(full_id_strings)
orig_storm_times_unix_sec = storm_times_unix_sec + 0
print('Reading data from: "{0:s}"...'.format(radar_file_names[0]))
this_radar_image_dict = storm_images.read_storm_images(
netcdf_file_name=radar_file_names[0],
full_id_strings_to_keep=full_id_strings,
valid_times_to_keep_unix_sec=storm_times_unix_sec)
if this_radar_image_dict is None:
return None
if sounding_file_name is None:
sounding_matrix = None
sounding_field_names = None
sounding_heights_m_agl = None
else:
sounding_dict, this_radar_image_dict = _read_soundings(
sounding_file_name=so
|
codewarrior0/pytest
|
_pytest/python.py
|
Python
|
mit
| 84,482
| 0.002
|
""" Python test discovery, setup and run of test functions. """
import re
import fnmatch
import functools
import py
import inspect
import sys
import pytest
from _pytest.mark import MarkDecorator, MarkerError
from py._code.code import TerminalRepr
try:
import enum
except ImportError: # pragma: no cover
# Only available in Python 3.4+ or as a backport
enum = None
import _pytest
import pluggy
cutdir2 = py.path.local(_pytest.__file__).dirpath()
cutdir1 = py.path.local(pluggy.__file__.rstrip("oc"))
NoneType = type(None)
NOTSET = object()
isfunction = inspect.isfunction
isclass = inspect.isclass
callable = py.builtin.callable
# used to work around a python2 exception info leak
exc_clear = getattr(sys, 'exc_clear', lambda: None)
# The type of re.compile objects is not exposed in Python.
REGEX_TYPE = type(re.compile(''))
def filter_traceback(entry):
return entry.path != cutdir1 and not entry.path.relto(cutdir2)
def get_real_func(obj):
""" gets the real function object of the (possibly) wrapped object by
functools.wraps or functools.partial.
"""
while hasattr(obj, "__wrapped__"):
obj = obj.__wrapped__
if isinstance(obj, functools.partial):
obj = obj.func
return obj
def getfslineno(obj):
# xxx let decorators etc specify a sane ordering
obj = get_real_func(obj)
if hasattr(obj, 'place_as'):
obj = obj.place_as
fslineno = py.code.getfslineno(obj)
assert isinstance(fslineno[1], int), obj
return fslineno
def getimfunc(func):
try:
return func.__func__
except AttributeError:
try:
return func.im_func
except AttributeError:
return func
def safe_getattr(object, name, default):
""" Like getattr but return default upon any Exception.
Attribute access can potentially fail for 'evil' Python objects.
See issue214
"""
try:
return getattr(object, name, default)
except Exception:
return default
class FixtureFunctionMarker:
def __init__(self, scope, params,
autouse=False, yieldctx=False, ids=None):
self.scope = scope
self.params = params
self.autouse = autouse
self.yieldctx = yieldctx
self.ids = ids
def __call__(self, function):
if isclass(function):
raise ValueError(
"class fixtures not supported (may be in the future)")
function._pytestfixturefunction = self
return function
def fixture(scope="function", params=None, autouse=False, ids=None):
""" (return a) decorator to mark a fixture factory function.
This decorator can be used (with or or without parameters) to define
a fixture function. The name of the fixture function can later be
referenced to cause its invocation ahead of running tests: test
modules or classes can use the pytest.mark.usefixtures(fixturename)
marker. Test functions can directly use fixture names as input
arguments in which case the fixture instance returned from the fixture
function will be injected.
:arg scope: the scope for which this fixture is shared, one of
"function" (default), "class", "module", "session".
:arg params: an optional list of parameters which will cause multiple
invocations of the fixture function and all of the tests
using it.
:arg autouse: if True, the fixture func is activated for all tests that
can see it. If False (the default) then an explicit
reference is needed to activate the fixture.
:arg ids: list of string ids each corresponding to the params
so that they are part of the test id. If no ids are provided
they will be generated automatically from the params.
"""
if callable(scope) and params is None and autouse == False:
# direct decoration
return FixtureFunctionMarker(
"function", params, autouse)(scope)
if params is not None and not isinstance(params, (list, tuple)):
params = list(params)
return FixtureFunctionMarker(scope, params, autouse, ids=ids)
def yield_fixture(scope="function", params=None, autouse=False, ids=None):
""" (return a) decorator to mark a yield-fixture factory function
(EXPERIMENTAL).
This takes the same arguments as :py:func:`pytest.fixture` but
expects a fixture function to use a ``yield`` instead of a ``return``
statement to provide a fixture. See
http://pytest.org/en/latest/yieldfixture.html for more info.
"""
if callable(scope) and params is None and autouse == False:
# direct decoration
return FixtureFunctionMarker(
"function", params, autous
|
e, yieldctx=True)(scope)
else:
return FixtureFunctionMarker(scope, params, auto
|
use,
yieldctx=True, ids=ids)
defaultfuncargprefixmarker = fixture()
def pyobj_property(name):
def get(self):
node = self.getparent(getattr(pytest, name))
if node is not None:
return node.obj
doc = "python %s object this node was collected from (can be None)." % (
name.lower(),)
return property(get, None, None, doc)
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption('--fixtures', '--funcargs',
action="store_true", dest="showfixtures", default=False,
help="show available fixtures, sorted by plugin appearance")
parser.addini("usefixtures", type="args", default=[],
help="list of default fixtures to be used with this project")
parser.addini("python_files", type="args",
default=['test_*.py', '*_test.py'],
help="glob-style file patterns for Python test module discovery")
parser.addini("python_classes", type="args", default=["Test",],
help="prefixes or glob names for Python test class discovery")
parser.addini("python_functions", type="args", default=["test",],
help="prefixes or glob names for Python test function and "
"method discovery")
def pytest_cmdline_main(config):
if config.option.showfixtures:
showfixtures(config)
return 0
def pytest_generate_tests(metafunc):
# those alternative spellings are common - raise a specific error to alert
# the user
alt_spellings = ['parameterize', 'parametrise', 'parameterise']
for attr in alt_spellings:
if hasattr(metafunc.function, attr):
msg = "{0} has '{1}', spelling should be 'parametrize'"
raise MarkerError(msg.format(metafunc.function.__name__, attr))
try:
markers = metafunc.function.parametrize
except AttributeError:
return
for marker in markers:
metafunc.parametrize(*marker.args, **marker.kwargs)
def pytest_configure(config):
config.addinivalue_line("markers",
"parametrize(argnames, argvalues): call a test function multiple "
"times passing in different arguments in turn. argvalues generally "
"needs to be a list of values if argnames specifies only one name "
"or a list of tuples of values if argnames specifies multiple names. "
"Example: @parametrize('arg1', [1,2]) would lead to two calls of the "
"decorated test function, one with arg1=1 and another with arg1=2."
"see http://pytest.org/latest/parametrize.html for more info and "
"examples."
)
config.addinivalue_line("markers",
"usefixtures(fixturename1, fixturename2, ...): mark tests as needing "
"all of the specified fixtures. see http://pytest.org/latest/fixture.html#usefixtures "
)
def pytest_sessionstart(session):
session._fixturemanager = FixtureManager(session)
@pytest.hookimpl(trylast=True)
def pytest_namespace():
raises.Exception = pytest.fail.Exception
return {
'fixture': fixture,
'yield_fixture': yield_fixture,
'raises' : raises,
'collect': {
'Module': Module, 'Class': Class, 'Instance': Instance,
'Function': Function, 'Generator': Generator,
'_fillfuncargs': fillfixtures
|
aptuz/mezzanine_onepage_theme
|
one_page/customapp/templatetags/testtag.py
|
Python
|
mit
| 1,240
| 0.004032
|
from __future__ import unicode_literals
from future.builtins import int
from collections import defaultdict
from django.core.urlresolvers import reverse
from django.template.defaultfilters import linebreaksbr, urlize
from mezzanine import template
from mezzanine.conf import settings
from mezzanine.generic.forms import ThreadedCommentForm
from mezzanine.generic.models import ThreadedComment
from mezzanine.utils.importing import import_dotted_path
from mezzanine.pages.models import Page, RichTextPage
register = template.Library()
@register.assignment_tag
def allpages():
page_fields = [ 'content', 'created', 'description', 'expiry_date', 'gen_description', u'id', 'keywords', u'keywords_string', 'publish_date', 'short_url', 'slug', '
|
status', 'title', 'titles', 'updated']
output = []
# import pdb;pdb.set_trace()
AllPa
|
ges = RichTextPage.objects.all()
for item in AllPages:
temp = {}
for fld in page_fields:
temp[fld] = getattr(item, fld)
output.append(temp)
return {
'pages': output
}
@register.filter()
def remove_slash(value):
return '#' + value[1:-1]
@register.filter()
def lower(value):
# import pdb;pdb.set_trace()
return value.lower()
|
EPiCS/soundgates
|
hardware/tools/to_samples.py
|
Python
|
mit
| 505
| 0.017822
|
#!/usr/bin/env python
# coding: utf8
import sys;
import struct;
def do_convert(filename):
fid_in = open(filename, 'rb')
fid_out = open('s
|
ound_conv.out','w')
data = fid_in.read(4) # read 4 bytes = 32 Bit Sample
while data:
ser = str(struct.unpack('<i', data)[0]) + '\n'
fid_out.write(ser)
data = fid_in.read(4)
fid_in.close()
fid_out.close()
if __name__ == "__main__":
pri
|
nt "Converting..."
do_convert(sys.argv[1])
print "done"
|
IsmaelRLG/UserBot
|
mods/translate/__init__.py
|
Python
|
mit
| 817
| 0.015912
|
# -*- coding: utf-8 -*-
"""
UserBot module
Copyright 2015, Ismael R. Lugo G.
"""
import translate
reload(translate)
from sysb import commands
from translate import lang
from translate impor
|
t _
commands.addHandler('translate', '(tr|translate)2 (?P<in>[^ ]+) (?P<out>[^ ]+) '
'(?P<text>.*)', {'sintax': 'tr2 <input> <output> <text>',
'example': 'tr2 en es Hello!',
'alias': ('traslate2',),
'desc': _('Traduce un texto de un idioma a otro', lang)},
anyuser=True)(tra
|
nslate.translate2_1)
commands.addHandler('translate', '(tr|translate) (?P<in>[^ ]+) (?P<out>[^ ]+) ('
'?P<text>.*)', {'sintax': 'tr <input> <output> <text>',
'example': 'tr en es Hello!',
'alias': ('traslate',),
'desc': _('Traduce un texto de un idioma a otro', lang)},
anyuser=True)(translate.translate2_2)
|
tokyo-jesus/university
|
src/python/koans/python3/runner/path_to_enlightenment.py
|
Python
|
unlicense
| 4,893
| 0.000204
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The path to enlightenment starts with the following:
import unittest
from koans.about_asserts import AboutAsserts
from koans.about_strings import AboutStrings
from koans.about_none import AboutNone
from koans.about_lists import AboutLists
from koans.about_list_assignments import AboutListAssignments
from koans.about_dictionaries import AboutDictionaries
from koans.about_string_manipulation import AboutStringManipulation
from koans.about_tuples import AboutTuples
from koans.about_methods import AboutMethods
from koans.about_control_statements import AboutControlStatements
from koans.about_true_and_false import AboutTrueAndFalse
from koans.about_sets import AboutSets
from koans.about_triangle_project import AboutTriangleProject
from koans.about_exceptions import AboutExceptions
from koans.about_triangle_project2 import AboutTriangleProject2
from koans.about_iteration import AboutIteration
from koans.about_comprehension import AboutComprehension
from koans.about_generators import AboutGenerators
from koans.about_lambdas import AboutLambdas
from koans.about_scoring_project import AboutScoringProject
from koans.about_classes import AboutClasses
from koans.about_with_statements import AboutWithStatements
from koans.about_monkey_patching import AboutMonkeyPatching
from koans.about_dice_project import AboutDiceProject
from koans.about_method_bindings import AboutMethodBindings
from koans.about_decorating_with_functions import AboutDecoratingWithFunctions
from koans.about_decorating_with_classes import AboutDecoratingWithClasses
from koans.about_inheritance import AboutInheritance
from koans.about_multiple_inheritance import AboutMultipleInheritance
from koans.about_regex import AboutRegex
from koans.about_scope import AboutScope
from koans.about_modules import AboutModules
from koans.about_packages import AboutPackages
from koans.about_class_attributes import AboutClassAttributes
from koans.about_attribute_access import AboutAttributeAccess
from koans.about_deleting_objects import AboutDeletingObjects
from koans.about_proxy_object_project import *
from koans.about_extra_credit import AboutExtraCredit
def koans():
loader = unittest.TestLoader()
suite = unittest.TestSuite()
loader.sortTestMethodsUsing = None
suite.addTests(loader.loadTestsFromTestCase(AboutAsserts))
suite.addTests(loader.loadTestsFromTestCase(AboutStrings))
suite.addTests(loader.loadTestsFromTestCase(AboutNone))
suite.addTests(loader.loadTestsFromTestCase(AboutLists))
suite.addTests(loader.loadTestsFromTestCase(AboutListAssignments))
suite.addTests(loader.loadTestsFromTestCase(AboutDictionaries))
suite.addTests(loader.loadTestsFromTestCase(AboutStringManipulation))
suite.addTests(loader.loadTestsFromTestCase(AboutTuples))
suite.addTests(loader.loadTestsFromTestCase(AboutMethods))
suite.addTests(loader.loadTestsFromTestCase(AboutControlStatements))
suite.addTests(loader.loadTestsFromTestCase(AboutTrueAndFalse))
suite.addTests(loader.loadTestsFromTestCase(AboutSets))
suite.addTests(loader.loadTestsFromTestCase(AboutTriangleProject))
suite.addTests(loader.loadTestsFromTestCase(AboutExceptions))
suite.addTests(loader.loadTestsFromTestCase(AboutTriangleProject2))
suite.addTests(loader.loadTestsFromTestCase(AboutIteration))
suite.addTests(loader.loadTestsFromTestCase(AboutComprehension))
suite.addTests(loader.loadTestsFromTestCase(AboutGenerators))
suite.addTests(loader.loadTestsFromTestCase(AboutLambdas))
suite.addTests(loader.loadTestsFromTestCase(AboutScoringProject))
suite.addTests(loader.loadTestsFromTestCase(AboutClasses))
suite.addTests(loader.loadTestsFromTestCase(AboutWithStatements))
suite.addTests(loader.loadTestsFromTestCase(AboutMonkeyPatching))
suite.addTests(loader.loadTestsFromTestCase(AboutDiceProject))
suite.addTests(loader.loadTestsFromTestCase(AboutMethodBindings))
|
suite.addTests(loader.loadTestsFromTestCase(AboutDecoratingWithFunctions))
suite.addTests(loader.loadTestsFromTestCase(AboutDecoratingWithCla
|
sses))
suite.addTests(loader.loadTestsFromTestCase(AboutInheritance))
suite.addTests(loader.loadTestsFromTestCase(AboutMultipleInheritance))
suite.addTests(loader.loadTestsFromTestCase(AboutScope))
suite.addTests(loader.loadTestsFromTestCase(AboutModules))
suite.addTests(loader.loadTestsFromTestCase(AboutPackages))
suite.addTests(loader.loadTestsFromTestCase(AboutClassAttributes))
suite.addTests(loader.loadTestsFromTestCase(AboutAttributeAccess))
suite.addTests(loader.loadTestsFromTestCase(AboutDeletingObjects))
suite.addTests(loader.loadTestsFromTestCase(AboutProxyObjectProject))
suite.addTests(loader.loadTestsFromTestCase(TelevisionTest))
suite.addTests(loader.loadTestsFromTestCase(AboutExtraCredit))
return suite
|
TheTrueH4cks0r/YeagerBomb
|
FiLe_AD.py
|
Python
|
gpl-3.0
| 315
| 0.060317
|
from dateti
|
me import datetime
path= str(datetime.now().date())
per= datetime.now()
per_h= str(per.hour)
per_m= str(per.minute)
timeit= str("%s:%s"%(per_h,per_m))
def Final(file_name):
NPfile= str("%s-%s"%(file_name,timeit))
A_Dump= "airodump-ng wlan0 -w "
ADFN= A_Dump+NPfile
return AD
|
FN
|
hekra01/mercurial
|
contrib/import-checker.py
|
Python
|
gpl-2.0
| 8,337
| 0.001679
|
import ast
import os
import sys
# Import a minimal set of stdlib modules needed for list_stdlib_modules()
# to work when run from a virtualenv. The modules were chosen empirically
# so that the return value matches the return value without virtualenv.
import BaseHTTPServer
import zlib
def dotted_name_of_path(path, trimpure=False):
"""Given a relative path to a source file, return its dotted module name.
>>> dotted_name_of_path('mercurial/error.py')
'mercurial.error'
>>> dotted_name_of_path('mercurial/pure/parsers.py', trimpure=True)
'mercurial.parsers'
>>> dotted_name_of_path('zlibmodule.so')
'zlib'
"""
parts = path.split('/')
parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
if parts[-1].endswith('module'):
parts[-1] = parts[-1][:-6]
if trimpure:
return '.'.join(p for p in parts if p != 'pure')
return '.'.join(parts)
def list_stdlib_modules():
"""List the modules present in the stdlib.
>>> mods = set(list_stdlib_modules())
>>> 'BaseHTTPServer' in mods
True
os.path isn't really a module, so it's missing:
>>> 'os.path' in mods
False
sys requires special treatment, because it's baked into the
interpreter, but it should still appear:
>>> 'sys' in mods
True
>>> 'collections' in mods
True
>>> 'cStringIO' in mods
True
"""
for m in sys.builtin_module_names:
yield m
# These modules only exist on windows, but we should always
# consider them stdlib.
for m in ['msvcrt', '_winreg']:
yield m
# These get missed too
for m in 'ctypes', 'email':
yield m
yield 'builtins' # python3 only
for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only
yield m
stdlib_prefixes = set([sys.prefix, sys.exec_prefix])
# We need to supplement the list of prefixes for the search to work
# when run from within a virtualenv.
for mod in (BaseHTTPServer, zlib):
try:
# Not all module objects have a __file__ attribute.
filename = mod.__file__
except AttributeError:
continue
dirname = os.path.dirname(filename)
for prefix in stdlib_prefixes:
if
|
dirname.startswith(prefix):
# Then this directory is redundant.
break
else:
stdlib_prefixes.add(dirname)
for libpath in sys.path:
# We want to walk everything in sys.path that starts with
# something in stdlib_prefixes. check-code suppressed because
|
# the ast module used by this script implies the availability
# of any().
if not any(libpath.startswith(p) for p in stdlib_prefixes): # no-py24
continue
if 'site-packages' in libpath:
continue
for top, dirs, files in os.walk(libpath):
for name in files:
if name == '__init__.py':
continue
if not (name.endswith('.py') or name.endswith('.so')
or name.endswith('.pyd')):
continue
full_path = os.path.join(top, name)
if 'site-packages' in full_path:
continue
rel_path = full_path[len(libpath) + 1:]
mod = dotted_name_of_path(rel_path)
yield mod
stdlib_modules = set(list_stdlib_modules())
def imported_modules(source, ignore_nested=False):
"""Given the source of a file as a string, yield the names
imported by that file.
Args:
source: The python source to examine as a string.
ignore_nested: If true, import statements that do not start in
column zero will be ignored.
Returns:
A list of module names imported by the given source.
>>> sorted(imported_modules(
... 'import foo ; from baz import bar; import foo.qux'))
['baz.bar', 'foo', 'foo.qux']
>>> sorted(imported_modules(
... '''import foo
... def wat():
... import bar
... ''', ignore_nested=True))
['foo']
"""
for node in ast.walk(ast.parse(source)):
if ignore_nested and getattr(node, 'col_offset', 0) > 0:
continue
if isinstance(node, ast.Import):
for n in node.names:
yield n.name
elif isinstance(node, ast.ImportFrom):
prefix = node.module + '.'
for n in node.names:
yield prefix + n.name
def verify_stdlib_on_own_line(source):
"""Given some python source, verify that stdlib imports are done
in separate statements from relative local module imports.
Observing this limitation is important as it works around an
annoying lib2to3 bug in relative import rewrites:
http://bugs.python.org/issue19510.
>>> list(verify_stdlib_on_own_line('import sys, foo'))
['mixed imports\\n stdlib: sys\\n relative: foo']
>>> list(verify_stdlib_on_own_line('import sys, os'))
[]
>>> list(verify_stdlib_on_own_line('import foo, bar'))
[]
"""
for node in ast.walk(ast.parse(source)):
if isinstance(node, ast.Import):
from_stdlib = {False: [], True: []}
for n in node.names:
from_stdlib[n.name in stdlib_modules].append(n.name)
if from_stdlib[True] and from_stdlib[False]:
yield ('mixed imports\n stdlib: %s\n relative: %s' %
(', '.join(sorted(from_stdlib[True])),
', '.join(sorted(from_stdlib[False]))))
class CircularImport(Exception):
pass
def checkmod(mod, imports):
shortest = {}
visit = [[mod]]
while visit:
path = visit.pop(0)
for i in sorted(imports.get(path[-1], [])):
if i not in stdlib_modules and not i.startswith('mercurial.'):
i = mod.rsplit('.', 1)[0] + '.' + i
if len(path) < shortest.get(i, 1000):
shortest[i] = len(path)
if i in path:
if i == path[0]:
raise CircularImport(path)
continue
visit.append(path + [i])
def rotatecycle(cycle):
"""arrange a cycle so that the lexicographically first module listed first
>>> rotatecycle(['foo', 'bar'])
['bar', 'foo', 'bar']
"""
lowest = min(cycle)
idx = cycle.index(lowest)
return cycle[idx:] + cycle[:idx] + [lowest]
def find_cycles(imports):
"""Find cycles in an already-loaded import graph.
>>> imports = {'top.foo': ['bar', 'os.path', 'qux'],
... 'top.bar': ['baz', 'sys'],
... 'top.baz': ['foo'],
... 'top.qux': ['foo']}
>>> print '\\n'.join(sorted(find_cycles(imports)))
top.bar -> top.baz -> top.foo -> top.bar
top.foo -> top.qux -> top.foo
"""
cycles = set()
for mod in sorted(imports.iterkeys()):
try:
checkmod(mod, imports)
except CircularImport, e:
cycle = e.args[0]
cycles.add(" -> ".join(rotatecycle(cycle)))
return cycles
def _cycle_sortkey(c):
return len(c), c
def main(argv):
if len(argv) < 2:
print 'Usage: %s file [file] [file] ...'
return 1
used_imports = {}
any_errors = False
for source_path in argv[1:]:
f = open(source_path)
modname = dotted_name_of_path(source_path, trimpure=True)
src = f.read()
used_imports[modname] = sorted(
imported_modules(src, ignore_nested=True))
for error in verify_stdlib_on_own_line(src):
any_errors = True
print source_path, error
f.close()
cycles = find_cycles(used_imports)
if cycles:
firstmods = set()
for c in sorted(cycles, key=_cycle_sortkey):
first = c.split()[0]
# As a rough cut, ignore any cycle that starts with the
# same module as some other cycle. Otherwise we see lots
# of cycles that are effectively duplicates.
if first in firstmods:
continue
print 'Import c
|
antoinecarme/pyaf
|
tests/artificial/transf_RelativeDifference/trend_ConstantTrend/cycle_0/ar_12/test_artificial_1024_RelativeDifference_ConstantTrend_0_12_20.py
|
Python
|
bsd-3-clause
| 279
| 0.082437
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 0, transform
|
= "RelativeDifference", sigma = 0.0, exog_co
|
unt = 20, ar_order = 12);
|
Jimdo/thrift
|
test/features/string_limit.py
|
Python
|
apache-2.0
| 1,665
| 0.001201
|
#!/usr/bin/env python
import argparse
import sys
from util import add_common_args, init_protocol
from local_thrift import thrift
from thrift.Thrift import TMessageType, TType
# TODO: generate from ThriftTest.thrift
def test_string(proto, value):
method_name = 'testString'
ttype = TType.STRING
proto.writeMessageBegin(method_name, TMessageType.CALL, 3)
proto.writeStructBegin(method_name + '_args')
proto.writeFieldBegin('thing', ttype, 1)
proto.writeString(value)
proto.writeFieldEnd()
proto.writeFieldStop()
proto.writeStructEnd()
proto.writeMessageEnd()
proto.trans.flush()
_, mtype, _ = proto.readMessageBegin()
assert mtype == TMessageType.REPLY
proto.readStructBegin()
_, ftype, fid = proto.readFieldBegin()
assert fid == 0
assert ftype == ttype
result = proto.readString()
proto.readFieldEnd()
_, ftype, _ = proto.readFieldBegin()
assert ftype == TType.STOP
proto.readStructEnd()
proto.readMessageEnd()
assert value == result
def main(argv):
p = argparse.ArgumentParser()
add_common_args(p)
p.add_argument('--limit'
|
, type=int)
args = p.parse_args()
proto = init_protocol(args)
test_string(proto, 'a' * (args.limit - 1))
test_string(proto, 'a' * (args.limit - 1))
print('[OK]: limit - 1')
test_str
|
ing(proto, 'a' * args.limit)
test_string(proto, 'a' * args.limit)
print('[OK]: just limit')
try:
test_string(proto, 'a' * (args.limit + 1))
except:
print('[OK]: limit + 1')
else:
print('[ERROR]: limit + 1')
assert False
if __name__ == '__main__':
main(sys.argv[1:])
|
pyocd/pyOCD
|
pyocd/target/builtin/target_lpc4088qsb.py
|
Python
|
apache-2.0
| 5,692
| 0.01669
|
# pyOCD debugger
# Copyright (c) 2006-2016 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ...core.memory_map import (FlashRegion, RamRegion, MemoryMap)
from .target_LPC4088FBD144 import (LARGE_ERASE_SECTOR_WEIGHT, LARGE_PROGRAM_PAGE_WEIGHT, LPC4088)
from .target_LPC4088FBD144 import FLASH_ALGO as INTERNAL_FLASH_ALGO
FLASH_ALGO = {
'load_address' : 0x10000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x28100b00, 0x210ebf24, 0x00d0eb01, 0xe92d4770, 0xf8df4df0, 0x4606831c, 0x460c44c8, 0x0000f8d8,
0x1c402500, 0x0000f8c8, 0x0f01f010, 0x461749c1, 0x2080f44f, 0x63c8bf14, 0x05306388, 0xa2f8f8df,
0xf04f0d00, 0x44ca0b00, 0xf8cad111, 0xf44fb010, 0xf8ca5080, 0xe9ca6000, 0xf8ca0b01, 0xf8d8b00c,
0x4651000c, 0xf1a16882, 0x47900080, 0x2018b9c0, 0xb008f8ca, 0xb003e9ca, 0xf5b4b1cc, 0xbf8c7f80,
0x7b80f44f, 0x197046a3, 0x0b00e9ca, 0x000cf8d8, 0x19794aa9, 0x6843444a, 0x0080f1a2, 0xb1104798,
0xe8bd2001, 0x445d8df0, 0x040bebb4, 0x2000d1e5, 0x8df0e8bd, 0x41f0e92d, 0x8274f8df, 0x60e0f642,
0x4d9e44c8, 0x0008f8c8, 0x70282000, 0x732820aa, 0x73282055, 0xf8052001, 0x22000c40, 0xf0002112,
0x2200f91a, 0x4610210d, 0xf915f000, 0x210d2200, 0xf0002001, 0x2200f910, 0x20022113, 0xf90bf000,
0x68204c8c, 0x5000f440, 0x6a206020, 0x2084f440, 0x6c206220, 0x2000f440, 0xf44f6420, 0x63e72780,
0x61a6117e, 0xf4406c68, 0x64683080, 0xf8c52002, 0x22050134, 0xf0002107, 0x2205f8ee, 0x20002116,
0xf8e9f000, 0x210f2205, 0xf0002000, 0x2205f8e4, 0x20002110, 0xf8dff000, 0x21112205, 0xf0002000,
0x2205f8da, 0x20002112, 0xf8d5f000, 0xf44f4874, 0x6800727a, 0xf8c86940, 0xf8d8000c, 0xfbb11008,
0xf8d5f1f2, 0xf8d02134, 0xf002c000, 0xfbb1021f, 0x496cf3f2, 0xfba1486c, 0x08892103, 0x444822c0,
0x280047e0, 0x61e6bf04, 0x81f0e8bd, 0x61e663a7, 0xe8bd2001, 0x200081f0, 0xe92d4770, 0x4c6341f0,
0x444c2032, 0x251d2700, 0xe9c460a5, 0x4e600700, 0x0114f104, 0x47b04620, 0xb9806960, 0x60a52034,
0x0700e9c4, 0xf1044852, 0x44480114, 0x60e06880, 0x47b04620, 0x28006960, 0xe8bdbf08, 0x200181f0,
0x81f0e8bd, 0x5f20f1b0, 0xf5b0bf32, 0x20002f00, 0xb5704770, 0x2c100b04, 0x200ebf24, 0x04d4eb00,
0x4d4a2032, 0x444d4e4a, 0x0114f105, 0x0400e9c5, 0x60ac4628, 0x696847b0, 0x2034b978, 0x0400e9c5,
0x60ac483b, 0xf1054448, 0x68800114, 0x462860e8, 0x696847b0, 0xbf082800, 0x2001bd70, 0xe92dbd70,
0x4f3341f0, 0x444f4605, 0x68784614, 0x1c404a31, 0xf0106078,
|
0xf44f0f01, 0xbf145000, 0x619061d0,
0x5f20f1b5, 0x46
|
22d305, 0x5020f1a5, 0x41f0e8bd, 0xf5b5e6bd, 0xd3052f00, 0xf5a54622, 0xe8bd2000,
0xe6b441f0, 0xe9d4b975, 0x44080100, 0x1202e9d4, 0x44084411, 0x44086921, 0x44086961, 0x440869a1,
0x61e04240, 0x28100b28, 0x210ebf24, 0x00d0eb01, 0x4e1e2132, 0x8078f8df, 0xe9c6444e, 0x60b01000,
0x0114f106, 0x47c04630, 0xb9886970, 0xe9c62033, 0xf44f0500, 0xe9c67000, 0x68b84002, 0xf1066130,
0x46300114, 0x697047c0, 0xbf082800, 0x81f0e8bd, 0xe8bd2001, 0xeb0181f0, 0x490e1040, 0x0080eb01,
0xf0216801, 0x60010107, 0x43116801, 0x47706001, 0x00000004, 0x20098000, 0x000000b4, 0x400fc080,
0x1fff1ff8, 0xcccccccd, 0x00000034, 0x00000014, 0x1fff1ff1, 0x4002c000, 0x00000000, 0x00000001,
0x00000000, 0x00000000, 0x00000000,
],
'pc_init' : 0x100000D5,
'pc_unInit': 0x100001D7,
'pc_program_page': 0x1000027F,
'pc_erase_sector': 0x10000225,
'pc_eraseAll' : 0x100001DB,
'static_base' : 0x10000000 + 0x00000020 + 0x00000400,
'begin_stack' : 0x10000000 + 0x00000800,
# Double buffering is not supported since there is not enough ram
'begin_data' : 0x10000000 + 0x00000A00, # Analyzer uses a max of 120 B data (30 pages * 4 bytes / page)
'page_size' : 0x00000200,
'min_program_length' : 512,
'analyzer_supported' : True,
'analyzer_address' : 0x10002000 # Analyzer 0x10002000..0x10002600
}
class LPC4088qsb(LPC4088):
MEMORY_MAP = MemoryMap(
FlashRegion( start=0, length=0x10000, is_boot_memory=True,
blocksize=0x1000,
page_size=0x200,
algo=INTERNAL_FLASH_ALGO),
FlashRegion( start=0x10000, length=0x70000, blocksize=0x8000,
page_size=0x400,
erase_sector_weight=LARGE_ERASE_SECTOR_WEIGHT,
program_page_weight=LARGE_PROGRAM_PAGE_WEIGHT,
algo=INTERNAL_FLASH_ALGO),
FlashRegion( start=0x28000000, length=0x1000000, blocksize=0x1000,
page_size=0x200,
algo=FLASH_ALGO),
RamRegion( start=0x10000000, length=0x10000),
)
def __init__(self, session):
super(LPC4088qsb, self).__init__(session, self.MEMORY_MAP)
|
mattjmorrison/ReportLab
|
src/reportlab/pdfbase/pdfdoc.py
|
Python
|
bsd-3-clause
| 83,521
| 0.009279
|
#Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/pdfbase/pdfdoc.py
__version__=''' $Id: pdfdoc.py 3795 2010-09-30 15:52:16Z rgbecker $ '''
__doc__="""
The module pdfdoc.py handles the 'outer structure' of PDF documents, ensuring that
all objects are properly cross-referenced and indexed to the nearest byte. The
'inner structure' - the page descriptions - are presumed to be generated before
each page is saved.
pdfgen.py calls this and provides a 'canvas' object to handle page marking operators.
piddlePDF calls pdfgen and offers a high-level interface.
The classes within this generally mirror structures in the PDF file
and are not part of any public interface. Instead, canvas and font
classes are made available elsewhere for users to manipulate.
"""
import string, types, binascii, codecs
from reportlab.pdfbase import pdfutils
from reportlab.pdfbase.pdfutils import LINEEND # this constant needed in both
from reportlab import rl_config
from reportlab.lib.utils import import_zlib, open_for_read, fp_str, _digester
from reportlab.pdfbase import pdfmetrics
try:
from hashlib import md5
except ImportError:
from md5 import md5
from sys import platform
try:
from sys import version_info
except: # pre-2.0
# may be inaccurate but will at least
#work in anything which seeks to format
# version_info into a string
version_info = (1,5,2,'unknown',0)
if platform[:4] == 'java' and version_info[:2] == (2, 1):
# workaround for list()-bug in Jython 2.1 (should be fixed in 2.2)
def list(sequence):
def f(x):
return x
return map(f, sequence)
def utf8str(x):
if isinstance(x,unicode):
return x.encode('utf8')
else:
return str(x)
class PDFError(Exception):
pass
# set this flag to get more vertical whitespace (and larger files)
LongFormat = 1
##if LongFormat: (doesn't work)
## pass
##else:
## LINEEND = "\n" # no wasteful carriage returns!
# __InternalName__ is a special attribute that can only be set by the Document arbitrator
__InternalName__ = "__InternalName__"
# __RefOnly__ marks reference only elements that must be formatted on top level
__RefOnly__ = "__RefOnly__"
# __Comment__ provides a (one line) comment to inline with an object ref, if present
# if it is more than one line then percentize it...
__Comment__ = "__Comment__"
# If DoComments is set then add helpful (space wasting) comment lines to PDF files
DoComments = 1
if not LongFormat:
DoComments = 0
# name for standard font dictionary
BasicFonts = "BasicFonts"
# name for the pages object
Pages = "Pages"
### generic utilities
# for % substitutions
LINEENDDICT = {"LINEEND": LINEEND, "PERCENT": "%"}
PDF_VERSION_DEFAULT = (1, 3)
PDF_SUPPORT_VERSION = dict( #map keyword to min version that supports it
transparency = (1, 4),
)
from types import InstanceType
def format(element, document, toplevel=0, InstanceType=InstanceType):
"""Indirection step for formatting.
Ensures that document parameters alter behaviour
of formatting for all elements.
"""
if hasattr(element,'__PDFObject__'):
if not toplevel and hasattr(element, __RefOnly__):
# the object cannot be a component at non top level.
# make a reference to it and return it's format
return document.Reference(element).format(document)
else:
f = element.format(document)
if not rl_config.invariant and DoComments and hasattr(element, __Comment__):
f = "%s%s%s%s" % ("% ", element.__Comment__, LINEEND, f)
return f
elif type(element) in (float, int):
#use a controlled number formatting routine
#instead of str, so Jython/Python etc do not differ
return fp_str(element)
else:
return str(element)
def xObjectName(externalname):
return "FormXob.%s" % externalname
# backwards compatibility
formName = xObjectName
# no encryption
class NoEncryption:
def encode(self, t):
"encode a string, stream, text"
return t
def prepare(self, document):
# get ready to do encryption
pass
def register(self, objnum, version):
# enter a new direct object
pass
def info(self):
# the representation of self in file if any (should be None or PDFDict)
return None
class DummyDoc:
"used to bypass encryption when required"
__PDFObject__ = True
encrypt = NoEncryption()
### the global document structure manager
class PDFDocument:
__PDFObject__ = True
_ID = None
objectcounter = 0
inObject = None
# set this to define filters
defaultStreamFilters = None
encrypt = NoEncryption() # default no encryption
pageCounter = 1
def __init__(self,
dummyoutline=0,
compression=rl_config.pageCompression,
invariant=rl_config.invariant,
filename=None,
pdfVersion=PDF_VERSION_DEFAULT,
):
# allow None value to be passed in to mean 'give system defaults'
if invariant is None:
self.invariant = rl_config.invariant
else:
self.invariant = invariant
self.setCompression(compression)
self._pdfVersion = pdfVersion
# signature for creating PDF ID
sig = self.signature = md5()
sig.update("a reportlab document")
if not self.invariant:
cat = _getTimeStamp()
else:
cat = 946684800.0
sig.update(rep
|
r(cat)) # initialize with timestamp digest
# mapping of internal identifier ("Page001") to PDF objectnum
|
ber and generation number (34, 0)
self.idToObjectNumberAndVersion = {}
# mapping of internal identifier ("Page001") to PDF object (PDFPage instance)
self.idToObject = {}
# internal id to file location
self.idToOffset = {}
# number to id
self.numberToId = {}
cat = self.Catalog = self._catalog = PDFCatalog()
pages = self.Pages = PDFPages()
cat.Pages = pages
if dummyoutline:
outlines = PDFOutlines0()
else:
outlines = PDFOutlines()
self.Outlines = self.outline = outlines
cat.Outlines = outlines
self.info = PDFInfo()
self.info.invariant = self.invariant
#self.Reference(self.Catalog)
#self.Reference(self.Info)
self.fontMapping = {}
#make an empty font dictionary
DD = PDFDictionary({})
DD.__Comment__ = "The standard fonts dictionary"
self.Reference(DD, BasicFonts)
self.delayedFonts = []
def setCompression(self, onoff):
# XXX: maybe this should also set self.defaultStreamFilters?
self.compression = onoff
def ensureMinPdfVersion(self, *keys):
"Ensure that the pdf version is greater than or equal to that specified by the keys"
for k in keys:
self._pdfVersion = max(self._pdfVersion, PDF_SUPPORT_VERSION[k])
def updateSignature(self, thing):
"add information to the signature"
if self._ID: return # but not if its used already!
self.signature.update(utf8str(thing))
def ID(self):
"A unique fingerprint for the file (unless in invariant mode)"
if self._ID:
return self._ID
digest = self.signature.digest()
doc = DummyDoc()
ID = PDFString(digest,enc='raw')
IDs = ID.format(doc)
self._ID = "%s %% ReportLab generated PDF document -- digest (http://www.reportlab.com) %s [%s %s] %s" % (
LINEEND, LINEEND, IDs, IDs, LINEEND)
return self._ID
def SaveToFile(self, filename, canvas):
if hasattr(getattr(filename, "write",None),'__call__'):
myfile = 0
f = filename
filename = utf8str(getattr(filename,'name',''))
else :
myfile = 1
filename = utf8str(filename)
f = open(filename, "wb")
f.write(self.Ge
|
danielvdende/incubator-airflow
|
airflow/contrib/hooks/wasb_hook.py
|
Python
|
apache-2.0
| 5,899
| 0
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from airflow.hooks.base_hook import BaseHook
from azure.storage.blob import BlockBlobService
class WasbHook(BaseHook):
"""
Interacts with Azure Blob Storage through the wasb:// protocol.
Additional options passed in the 'extra' field of the connection will be
passed to the `BlockBlockService()` constructor. For example, authenticate
using a SAS token by adding {"sas_token": "YOUR_TOKEN"}.
:param wasb_conn_id: Reference to the wasb connection.
:type wasb_conn_id: str
"""
def __init__(self, wasb_conn_id='wasb_default'):
self.conn_id = wasb_conn_id
self.connection = self.get_conn()
def get_conn(self):
"""Return the BlockBlobService object."""
conn = self.get_connection(self.conn_id)
service_options = conn.extra_dejson
return BlockBlobService(account_name=conn.login,
account_key
|
=conn.password, **service_options)
def check_for_blob(self, container_name, blob_name, **kwargs):
"""
Check if a blob exists on Azure Bl
|
ob Storage.
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.exists()` takes.
:type kwargs: object
:return: True if the blob exists, False otherwise.
:rtype bool
"""
return self.connection.exists(container_name, blob_name, **kwargs)
def check_for_prefix(self, container_name, prefix, **kwargs):
"""
Check if a prefix exists on Azure Blob storage.
:param container_name: Name of the container.
:type container_name: str
:param prefix: Prefix of the blob.
:type prefix: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.list_blobs()` takes.
:type kwargs: object
:return: True if blobs matching the prefix exist, False otherwise.
:rtype bool
"""
matches = self.connection.list_blobs(container_name, prefix,
num_results=1, **kwargs)
return len(list(matches)) > 0
def load_file(self, file_path, container_name, blob_name, **kwargs):
"""
Upload a file to Azure Blob Storage.
:param file_path: Path to the file to load.
:type file_path: str
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.create_blob_from_path()` takes.
:type kwargs: object
"""
# Reorder the argument order from airflow.hooks.S3_hook.load_file.
self.connection.create_blob_from_path(container_name, blob_name,
file_path, **kwargs)
def load_string(self, string_data, container_name, blob_name, **kwargs):
"""
Upload a string to Azure Blob Storage.
:param string_data: String to load.
:type string_data: str
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.create_blob_from_text()` takes.
:type kwargs: object
"""
# Reorder the argument order from airflow.hooks.S3_hook.load_string.
self.connection.create_blob_from_text(container_name, blob_name,
string_data, **kwargs)
def get_file(self, file_path, container_name, blob_name, **kwargs):
"""
Download a file from Azure Blob Storage.
:param file_path: Path to the file to download.
:type file_path: str
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.create_blob_from_path()` takes.
:type kwargs: object
"""
return self.connection.get_blob_to_path(container_name, blob_name,
file_path, **kwargs)
def read_file(self, container_name, blob_name, **kwargs):
"""
Read a file from Azure Blob Storage and return as a string.
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.create_blob_from_path()` takes.
:type kwargs: object
"""
return self.connection.get_blob_to_text(container_name,
blob_name,
**kwargs).content
|
kyouko-taiga/mushi
|
mushi/apps/webui/views.py
|
Python
|
apache-2.0
| 1,256
| 0.001592
|
# Copyright 2015 Dimitri Racordon
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
|
.
from flask import Blueprint, current_app, redirect, render_template, url_for
from mushi.core.auth import parse_auth_token, require_auth_token, validate_auth_token
from mushi.core.exc import AuthenticationError
bp = Blueprint('views', __name__)
@bp.route('/')
@require_auth_token
def index(auth_token):
return render_template('spa.html', api_root=current_app.config['API_ROOT'])
@bp.route('/login')
def login():
try:
auth_toke
|
n = parse_auth_token()
validate_auth_token(auth_token)
return redirect(url_for('views.index'))
except AuthenticationError:
return render_template('login.html', api_root=current_app.config['API_ROOT'])
|
atlefren/beercalc
|
db_repository/versions/006_migration.py
|
Python
|
mit
| 884
| 0.001131
|
from sqlalchemy import *
from migrate import *
from migrate.changeset import schema
pre_meta = MetaData()
post_meta = MetaData()
user = Table('u
|
ser', post_meta,
Column('id', Integer, primary_key=True, nullable=False),
|
Column('username', String(length=64)),
Column('email', String(length=120)),
Column('role', SmallInteger, default=ColumnDefault(0)),
Column('name', String(length=120)),
)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind
# migrate_engine to your metadata
pre_meta.bind = migrate_engine
post_meta.bind = migrate_engine
post_meta.tables['user'].columns['name'].create()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pre_meta.bind = migrate_engine
post_meta.bind = migrate_engine
post_meta.tables['user'].columns['name'].drop()
|
kbrebanov/ansible-modules-extras
|
monitoring/logentries.py
|
Python
|
gpl-3.0
| 4,638
| 0.00539
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Ivan Vanderbyl <ivan@app.io>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: logentries
author: "Ivan Vanderbyl (@ivanvanderbyl)"
short_description: Module for tracking logs via logentries.com
description:
- Sends logs to LogEntries in realtime
version_added: "1.6"
options:
path:
description:
- path to a log file
required: true
state:
description:
- following state of the log
choices: [ 'present', 'absent' ]
required: false
default: present
name:
description:
- name of the log
required: false
logtype:
description:
- type of the log
required: false
notes:
- Requires the LogEntries agent which can be installed following the instructions at logentries.com
'''
EXAMPLES = '''
# Track nginx logs
- logentries:
path: /var/log/nginx/access.log
state: present
name: nginx-access-log
# Stop tracking nginx logs
- logentries:
path: /var/log/nginx/error.log
state: absent
'''
def query_log_status(module, le_path, path, state="present"):
""" Returns whether a log is followed or not. """
if state == "present":
rc, out, err = module.run_command("%s followed %s" % (le_path, path))
if rc == 0:
return True
return False
def follow_log(module, le_path, logs, name=None, logtype=None):
""" Follows one or more logs if not already followed. """
followed_count = 0
for log in logs:
if query_log_status(module, le_path, log):
continue
if module.check_mode:
module.exit_json(changed=True)
cmd = [le_path, 'follow', log]
if name:
cmd.extend(['--name',name])
if logtype:
cmd.extend(['--type',logtype])
rc, out, err = module.run_command
|
(' '.join(cmd))
if not query_log_status(module, le_path, log):
module.fail_json(msg="failed to follow '%s': %s" % (log, err.strip()))
followed_count += 1
if followed_count > 0:
module.exit_json(changed=True, msg="followed %d log(s)" % (followed_count,))
module.exit_json(changed=False, msg="logs(s) already followed")
def unfollow_log(module, le_path, logs):
""" Unfollows one or more logs if followed. """
removed_count = 0
# Using a for
|
loop incase of error, we can report the package that failed
for log in logs:
# Query the log first, to see if we even need to remove.
if not query_log_status(module, le_path, log):
continue
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command([le_path, 'rm', log])
if query_log_status(module, le_path, log):
module.fail_json(msg="failed to remove '%s': %s" % (log, err.strip()))
removed_count += 1
if removed_count > 0:
module.exit_json(changed=True, msg="removed %d package(s)" % removed_count)
module.exit_json(changed=False, msg="logs(s) already unfollowed")
def main():
module = AnsibleModule(
argument_spec = dict(
path = dict(required=True),
state = dict(default="present", choices=["present", "followed", "absent", "unfollowed"]),
name = dict(required=False, default=None, type='str'),
logtype = dict(required=False, default=None, type='str', aliases=['type'])
),
supports_check_mode=True
)
le_path = module.get_bin_path('le', True, ['/usr/local/bin'])
p = module.params
# Handle multiple log files
logs = p["path"].split(",")
logs = filter(None, logs)
if p["state"] in ["present", "followed"]:
follow_log(module, le_path, logs, name=p['name'], logtype=p['logtype'])
elif p["state"] in ["absent", "unfollowed"]:
unfollow_log(module, le_path, logs)
# import module snippets
from ansible.module_utils.basic import *
main()
|
kern3020/opportunity
|
opportunity/settings/prod.py
|
Python
|
mit
| 575
| 0.006957
|
import dj_database_url
import os
from .base import *
# Parse database configurati
|
on from $DATABASE_URL
DATABASES = { 'default': {} }
DATABASES['default'] = dj_database_url.config()
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static asset configuration
BASE_DIR =
|
os.path.join(os.path.dirname(os.path.abspath(__file__)),'..')
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
|
fpeder/mscr
|
bin/db_ext_split.py
|
Python
|
bsd-2-clause
| 1,070
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Usage: db_ext_split.py <src> <dst> <prob>
Options:
-h --help
"""
import os
import cv2
from glob import glob
from docopt import docopt
from mscr.split import Split, RandomSplitPredicate
from mscr.util import Crop
from mscr.data import MyProgressBar
PAD = 8
if __name__ == '__main__':
args = docopt(__doc__)
src = args
|
['<src>']
dst = args['<dst>']
prob = float(args['<prob>'])
split = Split(RandomSplitPredicate(p=prob))
crop = Crop()
count = 0
if os.path.exists(src) and os.path.exists(dst):
filz = glob(os.path.join(src, '*.jpg'))
pbar = MyProgressBar(len(filz), 'extending db:')
for im in filz:
img = cv2.imread(im)
img = crop.run(img)
|
for bl in split.run(img):
out = os.path.join(dst, str(count).zfill(PAD) + '.jpg')
cv2.imwrite(out, bl.img)
count += 1
pbar.update()
pbar.finish()
else:
print 'err: dimstat.py: path doesn\'t exists'
|
jetskijoe/SickGear
|
sickbeard/db.py
|
Python
|
gpl-3.0
| 19,942
| 0.002507
|
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear
|
. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import os.path
import re
import sqlite3
import time
import threading
import sickbeard
from sickbeard import encodingKludge as ek
from sickbeard import logger
from sickbeard.exceptions import ex
import helpers
db_lock = threading.Lock()
def dbFilename(filename='sickbeard.db', suffix=None):
"""
@param filename: The sqlite database filename to use. If not specified,
|
will be made to be sickbeard.db
@param suffix: The suffix to append to the filename. A '.' will be added
automatically, i.e. suffix='v0' will make dbfile.db.v0
@return: the correct location of the database file.
"""
if suffix:
filename = '%s.%s' % (filename, suffix)
return ek.ek(os.path.join, sickbeard.DATA_DIR, filename)
class DBConnection(object):
def __init__(self, filename='sickbeard.db', suffix=None, row_type=None):
db_src = dbFilename(filename)
if not os.path.isfile(db_src):
db_alt = dbFilename('sickrage.db')
if os.path.isfile(db_alt):
helpers.copyFile(db_alt, db_src)
self.filename = filename
self.connection = sqlite3.connect(db_src, 20)
if row_type == 'dict':
self.connection.row_factory = self._dict_factory
else:
self.connection.row_factory = sqlite3.Row
def checkDBVersion(self):
try:
if self.hasTable('db_version'):
result = self.select('SELECT db_version FROM db_version')
else:
version = self.select('PRAGMA user_version')[0]['user_version']
if version:
self.action('PRAGMA user_version = 0')
self.action('CREATE TABLE db_version (db_version INTEGER);')
self.action('INSERT INTO db_version (db_version) VALUES (%s);' % version)
return version
except:
return 0
if result:
version = int(result[0]['db_version'])
if 10000 > version and self.hasColumn('db_version', 'db_minor_version'):
minor = self.select('SELECT db_minor_version FROM db_version')
return version * 100 + int(minor[0]['db_minor_version'])
return version
else:
return 0
def mass_action(self, querylist, logTransaction=False):
with db_lock:
if querylist is None:
return
sqlResult = []
attempt = 0
while attempt < 5:
try:
affected = 0
for qu in querylist:
cursor = self.connection.cursor()
if len(qu) == 1:
if logTransaction:
logger.log(qu[0], logger.DB)
sqlResult.append(cursor.execute(qu[0]).fetchall())
elif len(qu) > 1:
if logTransaction:
logger.log(qu[0] + ' with args ' + str(qu[1]), logger.DB)
sqlResult.append(cursor.execute(qu[0], qu[1]).fetchall())
affected += cursor.rowcount
self.connection.commit()
if affected > 0:
logger.log(u'Transaction with %s queries executed affected %i row%s' % (
len(querylist), affected, helpers.maybe_plural(affected)), logger.DEBUG)
return sqlResult
except sqlite3.OperationalError as e:
sqlResult = []
if self.connection:
self.connection.rollback()
if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]:
logger.log(u'DB error: ' + ex(e), logger.WARNING)
attempt += 1
time.sleep(1)
else:
logger.log(u'DB error: ' + ex(e), logger.ERROR)
raise
except sqlite3.DatabaseError as e:
if self.connection:
self.connection.rollback()
logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
raise
return sqlResult
def action(self, query, args=None):
with db_lock:
if query is None:
return
sqlResult = None
attempt = 0
while attempt < 5:
try:
if args is None:
logger.log(self.filename + ': ' + query, logger.DB)
sqlResult = self.connection.execute(query)
else:
logger.log(self.filename + ': ' + query + ' with args ' + str(args), logger.DB)
sqlResult = self.connection.execute(query, args)
self.connection.commit()
# get out of the connection attempt loop since we were successful
break
except sqlite3.OperationalError as e:
if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]:
logger.log(u'DB error: ' + ex(e), logger.WARNING)
attempt += 1
time.sleep(1)
else:
logger.log(u'DB error: ' + ex(e), logger.ERROR)
raise
except sqlite3.DatabaseError as e:
logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
raise
return sqlResult
def select(self, query, args=None):
sqlResults = self.action(query, args).fetchall()
if sqlResults is None:
return []
return sqlResults
def upsert(self, tableName, valueDict, keyDict):
changesBefore = self.connection.total_changes
genParams = lambda myDict: [x + ' = ?' for x in myDict.keys()]
query = 'UPDATE [%s] SET %s WHERE %s' % (
tableName, ', '.join(genParams(valueDict)), ' AND '.join(genParams(keyDict)))
self.action(query, valueDict.values() + keyDict.values())
if self.connection.total_changes == changesBefore:
query = 'INSERT INTO [' + tableName + '] (' + ', '.join(valueDict.keys() + keyDict.keys()) + ')' + \
' VALUES (' + ', '.join(['?'] * len(valueDict.keys() + keyDict.keys())) + ')'
self.action(query, valueDict.values() + keyDict.values())
def tableInfo(self, tableName):
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
sqlResult = self.select('PRAGMA table_info([%s])' % tableName)
columns = {}
for column in sqlResult:
columns[column['name']] = {'type': column['type']}
return columns
# http://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
@staticmethod
def _dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def hasTable(self, tableName):
return len(self.select('SELECT 1 FROM sqlite_maste
|
chartmogul/chartmogul-python
|
chartmogul/api/plan.py
|
Python
|
mit
| 727
| 0
|
from marshmallow import Schema, fields, post_load, EXCLUDE
from ..resource import Resource
from collec
|
tions import namedtuple
|
class Plan(Resource):
"""
https://dev.chartmogul.com/v1.0/reference#plans
"""
_path = "/plans{/uuid}"
_root_key = 'plans'
_many = namedtuple('Plans', [_root_key, "current_page", "total_pages"])
class _Schema(Schema):
uuid = fields.String()
data_source_uuid = fields.String()
name = fields.String()
interval_count = fields.Int()
interval_unit = fields.String()
external_id = fields.String()
@post_load
def make(self, data, **kwargs):
return Plan(**data)
_schema = _Schema(unknown=EXCLUDE)
|
limodou/parm
|
parm/utils.py
|
Python
|
bsd-2-clause
| 10,640
| 0.004229
|
from __future__ import print_function
from ._compat import string_types
import os
import re
import logging
import inspect
import datetime
import decimal
log = logging
def safe_import(path):
module = path.split('.')
g = __import__(module[0], fromlist=['*'])
s = [module[0]]
for i in module[1:]:
mod = g
if hasattr(mod, i):
g = getattr(mod, i)
else:
s.append(i)
g = __import__('.'.join(s), fromlist=['*'])
return mod, g
def import_mod_attr(path):
"""
Import string format module, e.g. 'uliweb.orm' or an object
return module object and object
"""
if isinstance(path, string_types):
module, func = path.rsplit('.', 1)
mod = __import__(module, fromlist=['*'])
f = getattr(mod, func)
else:
f = path
mod = inspect.getmodule(path)
return mod, f
def import_attr(func):
mod, f = import_mod_attr(func)
return f
def myimport(module):
mod = __import__(module, fromlist=['*'])
return mod
class MyPkg(object):
@staticmethod
def resource_filename(module, path):
mod = myimport(module)
p = os.path.dirname(mod.__file__)
if path:
return os.path.join(p, path)
else:
return p
@staticmethod
def resource_listdir(module, path):
d = MyPkg.resource_filename(module, path)
return os.listdir(d)
@staticmethod
def resource_isdir(module, path):
d = MyPkg.resource_filename(module, path)
return os.path.isdir(d)
try:
import pkg_resources as pkg
except:
pkg = MyPkg
def extract_file(module, path, dist, verbose=False, replace=True):
outf = os.path.join(dist, os.path.basename(path))
# d = pkg.get_distribution(module)
# if d.has_metadata('zip-safe'):
# f = open(outf, 'wb')
# f.write(pkg.resource_string(module, path))
# f.close()
# if verbose:
# print 'Info : Extract %s/%s to %s' % (module, path, outf)
# else:
import shutil
inf = pkg.resource_filename(module, path)
sfile = os.path.basename(inf)
if os.path.isdir(dist):
dfile = os.path.join(dist, sfile)
else:
dfile = dist
f = os.path.exists(dfile)
if replace or not f:
shutil.copy2(inf, dfile)
if verbose:
print('Copy %s to %s' % (inf, dfile))
def extract_dirs(mod, path, dst, verbose=False, exclude=None, exclude_ext=None, recursion=True, replace=True):
"""
mod name
path mod path
dst output directory
resursion True will extract all sub module of mod
"""
default_exclude = ['.svn', '_svn', '.git']
default_exclude_ext = ['.pyc', '.pyo', '.bak', '.tmp']
exclude = exclude or []
exclude_ext = exclude_ext or []
# log = logging.getLogger('uliweb')
if not os.path.exists(dst):
os.makedirs(dst)
if verbose:
print('Make directory %s' % dst)
for r in pkg.resource_listdir(mod, path):
if r in exclude or r in default_exclude:
continue
fpath = os.path.join(path, r)
if pkg.resource_isdir(mod, fpath):
if recursion:
extract_dirs(mod, fpath,
|
os.path.join(dst, r), verbose, exclude, exclude_ext, recursion, replace)
else:
ext = os.path.splitext(fpath)[1]
if ext in exclude_ext or ext in default_exclude_ext:
|
continue
extract_file(mod, fpath, dst, verbose, replace)
def match(f, patterns):
from fnmatch import fnmatch
flag = False
for x in patterns:
if fnmatch(f, x):
return True
def walk_dirs(path, include=None, include_ext=None, exclude=None,
exclude_ext=None, recursion=True, file_only=False):
"""
path directory path
resursion True will extract all sub module of mod
"""
default_exclude = ['.svn', '_svn', '.git']
default_exclude_ext = ['.pyc', '.pyo', '.bak', '.tmp']
exclude = exclude or []
exclude_ext = exclude_ext or []
include_ext = include_ext or []
include = include or []
if not os.path.exists(path):
raise StopIteration
for r in os.listdir(path):
if match(r, exclude) or r in default_exclude:
continue
if include and r not in include:
continue
fpath = os.path.join(path, r)
if os.path.isdir(fpath):
if not file_only:
yield os.path.normpath(fpath).replace('\\', '/')
if recursion:
for f in walk_dirs(fpath, include, include_ext, exclude,
exclude_ext, recursion, file_only):
yield os.path.normpath(f).replace('\\', '/')
else:
ext = os.path.splitext(fpath)[1]
if ext in exclude_ext or ext in default_exclude_ext:
continue
if include_ext and ext not in include_ext:
continue
yield os.path.normpath(fpath).replace('\\', '/')
def copy_dir(src, dst, verbose=False, check=False, processor=None):
import shutil
# log = logging.getLogger('uliweb')
def _md5(filename):
try:
import hashlib
a = hashlib.md5()
except ImportError:
import md5
a = md5.new()
a.update(open(filename, 'rb').read())
return a.digest()
if not os.path.exists(dst):
os.makedirs(dst)
if verbose:
print("Processing %s" % src)
for r in os.listdir(src):
if r in ['.svn', '_svn', '.git']:
continue
fpath = os.path.join(src, r)
if os.path.isdir(fpath):
if os.path.abspath(fpath) != os.path.abspath(dst):
copy_dir(fpath, os.path.join(dst, r), verbose, check, processor)
else:
continue
else:
ext = os.path.splitext(fpath)[1]
if ext in ['.pyc', '.pyo', '.bak', '.tmp']:
continue
df = os.path.join(dst, r)
if check:
if os.path.exists(df):
a = _md5(fpath)
b = _md5(df)
if a != b:
print("Error: Target file %s is already existed, and "
"it not same as source one %s, so copy failed" % (fpath, dst))
else:
if processor:
if processor(fpath, dst, df):
continue
shutil.copy2(fpath, dst)
if verbose:
print("Copy %s to %s" % (fpath, dst))
else:
if processor:
if processor(fpath, dst, df):
continue
shutil.copy2(fpath, dst)
if verbose:
print("Copy %s to %s" % (fpath, dst))
def copy_dir_with_check(dirs, dst, verbose=False, check=True, processor=None):
# log = logging.getLogger('uliweb')
for d in dirs:
if not os.path.exists(d):
continue
copy_dir(d, dst, verbose, check, processor)
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def encode_unicode(s):
"""Return a JSON representation of a Python unicode
"""
return '"' + s.encode('unicode_escape') + '"'
def simple_value(v):
from uliweb.i18n.lazystr import LazyString
if callable(v):
v = v()
if isinstance(v, LazyString) or isinstance(v, decimal.Decimal) or isinstance(v, datetime.datetime):
return str(v)
else:
return v
class JSONEncoder(object):
def __init__(self, encoding='utf-8', unico
|
dave-shawley/ietfparse
|
tests/test_datastructure.py
|
Python
|
bsd-3-clause
| 3,585
| 0
|
import unittest
from ietfparse.datastructures import ContentType
class ContentTypeCreationTests(unittest.TestCase):
def test_that_primary_type_is_normalized(self):
self.assertEqual('contenttype',
ContentType('COntentType', 'b').content_type)
def test_that_subtype_is_normalized(self):
self.assertEqual('subtype',
ContentType('a', ' SubType ').content_subtype)
def test_that_content_suffix_is_normalized(self):
self.assertEqual(
'json',
ContentType('a', 'b', content_suffix=' JSON').content_suffix)
def test_that_parameter_names_are_casefolded(self):
self.assertDictEqual({'key': 'Value'},
ContentType('a', 'b', paramete
|
rs={
'KEY': 'Value'
|
}).parameters)
class ContentTypeStringificationTests(unittest.TestCase):
def test_that_simple_case_works(self):
self.assertEqual('primary/subtype',
str(ContentType('primary', 'subtype')))
def test_that_parameters_are_sorted_by_name(self):
ct = ContentType('a', 'b', {'one': '1', 'two': '2', 'three': 3})
self.assertEqual('a/b; one=1; three=3; two=2', str(ct))
def test_that_content_suffix_is_appended(self):
ct = ContentType('a', 'b', {'foo': 'bar'}, content_suffix='xml')
self.assertEqual('a/b+xml; foo=bar', str(ct))
class ContentTypeComparisonTests(unittest.TestCase):
def test_type_equals_itself(self):
self.assertEqual(ContentType('a', 'b'), ContentType('a', 'b'))
def test_that_differing_types_are_not_equal(self):
self.assertNotEqual(ContentType('a', 'b'), ContentType('b', 'a'))
def test_that_differing_suffixes_are_not_equal(self):
self.assertNotEqual(ContentType('a', 'b', content_suffix='1'),
ContentType('a', 'b', content_suffix='2'))
def test_that_differing_params_are_not_equal(self):
self.assertNotEqual(ContentType('a', 'b', parameters={'one': '1'}),
ContentType('a', 'b'))
def test_that_case_is_ignored_when_comparing_types(self):
self.assertEqual(ContentType('text', 'html', {'level': '3.2'}, 'json'),
ContentType('Text', 'Html', {'Level': '3.2'}, 'JSON'))
def test_primary_wildcard_is_less_than_anything_else(self):
self.assertLess(ContentType('*', '*'), ContentType('text', 'plain'))
self.assertLess(ContentType('*', '*'), ContentType('text', '*'))
def test_subtype_wildcard_is_less_than_concrete_types(self):
self.assertLess(ContentType('application', '*'),
ContentType('application', 'json'))
self.assertLess(ContentType('text', '*'),
ContentType('application', 'json'))
def test_type_with_fewer_parameters_is_lesser(self):
self.assertLess(
ContentType('application', 'text', parameters={'1': 1}),
ContentType('application', 'text', parameters={
'1': 1,
'2': 2
}))
def test_otherwise_equal_types_ordered_by_primary(self):
self.assertLess(ContentType('first', 'one', parameters={'1': 1}),
ContentType('second', 'one', parameters={'1': 1}))
def test_otherwise_equal_types_ordered_by_subtype(self):
self.assertLess(
ContentType('application', 'first', parameters={'1': 1}),
ContentType('application', 'second', parameters={'1': 1}))
|
DavisNT/mopidy-playbackdefaults
|
tests/test_frontend.py
|
Python
|
apache-2.0
| 5,145
| 0.000972
|
import unittest
import mock
from mopidy_playbackdefaults import PlaybackDefaultsFrontend
class PlaybackDefaultsFrontendTest(unittest.TestCase):
def test_no_settings(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
|
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.cal
|
l_count, 0)
def test_random(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
config['playbackdefaults']['default_random'] = True
PlaybackDefaultsFrontend(config, core)
core.tracklist.set_random.assert_called_once_with(True)
config['playbackdefaults']['default_random'] = False
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_random.call_count, 2)
core.tracklist.set_random.assert_called_with(False)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
def test_repeat(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
config['playbackdefaults']['default_repeat'] = True
PlaybackDefaultsFrontend(config, core)
core.tracklist.set_repeat.assert_called_once_with(True)
config['playbackdefaults']['default_repeat'] = False
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_repeat.call_count, 2)
core.tracklist.set_repeat.assert_called_with(False)
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
def test_consume(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
config['playbackdefaults']['default_consume'] = True
PlaybackDefaultsFrontend(config, core)
core.tracklist.set_consume.assert_called_once_with(True)
config['playbackdefaults']['default_consume'] = False
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_consume.call_count, 2)
core.tracklist.set_consume.assert_called_with(False)
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
def test_single(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
config['playbackdefaults']['default_single'] = True
PlaybackDefaultsFrontend(config, core)
core.tracklist.set_single.assert_called_once_with(True)
config['playbackdefaults']['default_single'] = False
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_single.call_count, 2)
core.tracklist.set_single.assert_called_with(False)
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
|
rtapadar/pscan
|
pscan/tests/test_scan.py
|
Python
|
apache-2.0
| 5,489
| 0.00419
|
# Copyright 2016 Rudrajit Tapadar
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from base import TestPscan
import errno
import mock
from StringIO import StringIO
import sys
class TestScan(TestPscan):
@mock.patch('socket.socket.connect')
def test_tcp_port_open(self, mock_connect):
hosts = "127.0.0.1"
ports = "22"
mock_connect.return_value = None
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
h = self.get_host_obj(hosts, [22])
h[0].ports[0].status = "Open"
self.assertPortsEqual(scanner.hosts[0].ports,
h[0].ports)
@mock.patch('socket.socket.connect')
def test_tcp_port_closed(self, mock_connect):
hosts = "127.0.0.1"
ports = "22"
mock_connect.side_effect = IOError()
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
h = self.get_host_obj(hosts, [22])
h[0].ports[0].status = "Closed"
self.assertPortsEqual(scanner.hosts[0].ports,
h[0].ports)
@mock.patch('socket.socket.connect')
def test_tcp_port_range(self, mock_connect):
hosts = "127.0.0.1"
ports = "21-22"
mock_connect.return_value = None
mock_connect.side_effect = [IOError(), None]
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
h = self.get_host_obj(hosts, [21, 22])
h[0].ports[0].status = "Closed"
h[0].ports[1].status = "Open"
self.assertPortsEqual(scanner.hosts[0].ports,
h[0].ports)
@mock.patch('socket.socket.connect')
def test_show_open_port(self, mock_connect):
hosts = "127.0.0.1"
ports = "5672"
mock_connect.return_value = None
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
s = sys.stdout
o = StringIO()
sys.stdout = o
output = (
"Showing results for target: 127.0.0.1\n"
"+------+----------+-------+---------+\n"
"| Port | Protocol | State | Service |\n"
"+------+----------+-------+---------+\n"
"| 5672 | TCP | Open | amqp |\n"
"+------+----------+-------+---------+"
)
scanner.show()
self.assertEqual(o.getvalue().strip(), output)
sys.stdout = s
@mock.patch('socket.socket.connect')
def test_show_closed_port(self, mock_connect):
hosts = "127.0.0.1"
ports = "5673"
mock_connect.side_effect = IOError()
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
s = sys.stdout
o = StringIO()
sys.stdout = o
output = (
"Showing results for target: 127.0.0.1\n"
"+------+----------+--------+---------+\n"
"| Port | Protocol | State | Service |\n"
"+------+----------+--------+---------+\n"
"| 5673 | TCP | Closed | unknown |\n"
"+------+----------+--------+---------+"
)
scanner.show()
self.assertEqual(o.getvalue().strip(), output)
sys.stdout = s
@mock.patch('socket.socket.connect')
def test_show_closed_port_range(self, mock_connect):
hosts = "127.0.0.1"
ports = "5673-5674"
mock_connect.side_effect = IOError(errno.ECONNREFUSED)
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
s = sys.stdout
o = StringIO()
sys.stdout = o
output = (
"Showing results for target: 127.0.0.1\n"
"All 2 scanned ports are closed on the target."
)
scanner.show()
self.assertEqual(o.getvalue().strip(), output)
sys.stdout = s
@mock.patch('socket.socket.connect')
def test_show_partially_open_port_range(self, mock_connect):
hosts = "127.0.0.1"
ports = "5671-5672"
mock_connect.return_value = None
mock_connect.side_effect = [IOError(), None]
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
s = sys.stdout
o = StringIO()
sys.stdout = o
output = (
"Showing results for target: 127.0.0.1\n"
"+------+----------+-------+---------+\n"
"| Port | Protocol | State | Service |\n"
"+------+----------+-------+---------+\n"
"| 5672 | TCP | Open | amqp |\n"
"+------+----------+-------+---------+"
)
scanner.show()
self.assertEqual(o.getv
|
alue().strip(), output)
@mock.patch('socket.socket.connect')
def test_udp_port_open(self, mock_connect):
hosts = "127.0.0.1"
ports = "53"
mock_connect.return_value = None
scanner = self.get_scanner_obj(hosts, ports)
scanner.udp()
#h = self.get_host_obj(hosts, [22])
#h[0].ports[
|
0].status = "Open"
#self.assertPortsEqual(scanner.hosts[0].ports,
# h[0].ports)
|
khs26/pele
|
examples/frozen_degrees_of_freedom/frozen_lj.py
|
Python
|
gpl-3.0
| 1,133
| 0.002648
|
"""
this example shows how to freeze degrees of freedom using the Lennard Jones potential as
an example
"""
import numpy as np
|
from pele.potentials import LJ, FrozenPotentialWrapper
from pele.optimize import mylbfgs
def main():
natoms = 4
pot = LJ()
reference_coords = np.random.uniform(-1, 1, [3 * natoms])
print reference_coords
# freeze the firs
|
t two atoms (6 degrees of freedom)
frozen_dof = range(6)
fpot = FrozenPotentialWrapper(pot, reference_coords, frozen_dof)
reduced_coords = fpot.get_reduced_coords(reference_coords)
print "the energy in the full representation:"
print pot.getEnergy(reference_coords)
print "is the same as the energy in the reduced representation:"
print fpot.getEnergy(reduced_coords)
ret = mylbfgs(reduced_coords, fpot)
print "after a minimization the energy is ", ret.energy, "and the rms gradient is", ret.rms
print "the coordinates of the frozen degrees of freedom are unchanged"
print "starting coords:", reference_coords
print "minimized coords:", fpot.get_full_coords(ret.coords)
if __name__ == "__main__":
main()
|
jimmysitu/jBenchmark
|
micro-benchmark/MixBurnIn/MixBurnIn.py
|
Python
|
gpl-2.0
| 4,944
| 0.002629
|
#!/usr/bin/env python3
from optparse import OptionParser
from datetime import datetime
from datetime import timedelta
import pyopencl as cl
import numpy as np
import time
MIN_ELAPSED = 0.25
KEY_LENGTH = 64
BUF_MAX_SIZE= 1024 * 1024
class BurnInTarget():
def __init__(self, platform, kernel):
self.name = platform.get_info(cl.platform_info.NAME)
self.devices = platform.get_devices()
self.context = cl.Context(self.devices)
self.queue = cl.CommandQueue(self.context)
self.program = cl.Program(self.context, kernel).build()
self.minXSize = 16
self.minYSize = 16
# Host bufs
self.hostInfoBuf = np.array(range(2), dtype=np.uint32)
self.hostInfoBuf[0] = 8 # Rounds for each kernel
self.hostInfoBuf[1] = 8
self.hostInBuf = np.random.rand(BUF_MAX_SIZE).astype(np.uint32)
self.hostOutBuf = np.array(range(BUF_MAX_SIZE), dtype=np.uint32)
# Device bufs
self.devInfoBuf = cl.Buffer(self.context, cl.mem_flags.READ_ONLY | cl.mem_flags.COPY_HOST_PTR, hostbuf=self.hostInfoBuf)
self.devInBuf = cl.Buffer(self.context, cl.mem_flags.READ_ONLY | cl.mem_flags.COPY_HOST_PTR, hostbuf=self.hostInBuf)
self.devOutBuf = cl.Buffer(self.context, cl.mem_flags.WRITE_ONLY, self.hostOutBuf.nbytes)
def burn(self, shape):
event = self.program.burn(self.queue, shape, None, self.devInfoBuf, self.devInBuf, self.devOutBuf)
return event
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-k", "--kernel",
dest="kernel", default='default.cl',
help="Kernel to burn in")
(opts, args) = parser.parse_args()
kernel = open(opts.kernel).read()
# Get all available device and create context for each
platforms = cl.get_platforms()
targets = []
for p in platforms:
vendor = p.get_info(cl.platform_info.VENDOR)
name = p.get_info(cl.platform_info.NAME)
if('Intel' in vendor):
print("Found platform: %s" % name)
targets.append(BurnInTarget(p, kernel))
# Tune runtime for each target
for t in targets:
xsize = 8
ysize = 32
print("Adjusting runtime for platform: %s" % t.name)
elapsed = timedelta()
while(elapsed.total_seconds() < MIN_ELAPSED):
if(elapsed.total_seconds() < (MIN_ELAPSED/2)):
xsize = xsize << 1
else:
xsize = xsize + 8
# Get some power credit
time.sleep(10)
startTime = datetime.utcnow()
event = t.burn((xsize, ysize))
event.wait()
endTime = datetime.utcnow()
elapsed = endTime - startTime
print("Kernel Elapsed Time: %s" % elaps
|
ed.total_seconds())
t.minXSize = xsize
t.minYSize = ysize
print("Final min size: %d, %d" % (t.minXSize, t.minYSize))
# Burn in one by one
time.sleep(20)
for t in target
|
s:
print("Burning platform: %s" % t.name)
startTime = datetime.utcnow()
events =[]
# Make sure this is longer than Tu of PL2
for i in range(16):
events.append(t.burn((8*t.minXSize, 2*t.minYSize)))
for e in events:
e.wait()
endTime = datetime.utcnow()
elapsed = endTime - startTime
print("Kernel Elapsed Time: %s" % elapsed.total_seconds())
time.sleep(20)
#
# # All together
# events =[]
# print("Burning platforms all together, at the same time")
# startTime = datetime.utcnow()
# for i in range(8):
# for t in targets:
# events.append(t.burn((8*t.minXSize, 2*t.minYSize)))
#
# for e in events:
# e.wait()
#
# endTime = datetime.utcnow()
# elapsed = endTime - startTime
# print("Kernel Elapsed Time: %s" % elapsed.total_seconds())
# time.sleep(30)
#
# time.sleep(30)
# print("Burning platforms with sequence")
# events =[]
# startTime = datetime.utcnow()
# for i in range(8):
# for t in sorted(targets, key=lambda x:x.name):
# events.append(t.burn((8*t.minXSize, 2*t.minYSize)))
# time.sleep(2)
#
# for e in events:
# e.wait()
#
# endTime = datetime.utcnow()
# elapsed = endTime - startTime
# print("Kernel Elapsed Time: %s" % elapsed.total_seconds())
#
# time.sleep(30)
# print("Burning platforms with reverse sequence")
# events =[]
# startTime = datetime.utcnow()
# for i in range(8):
# for t in sorted(targets, key=lambda x:x.name, reverse=True):
# events.append(t.burn((8*t.minXSize, 2*t.minYSize)))
# time.sleep(2)
#
# for e in events:
# e.wait()
#
# endTime = datetime.utcnow()
# elapsed = endTime - startTime
# print("Kernel Elapsed Time: %s" % elapsed.total_seconds())
print("Burn in test done", flush=True)
time.sleep(2)
|
SamR1/OpenCityLab-dataviz
|
functions.py
|
Python
|
gpl-3.0
| 4,740
| 0.005274
|
#
# #/usr/bin/env python3
# -*- coding:utf-8 -*-
from database import Measure
import json
import requests
import time
import yaml
with open("param.yml", 'r') as stream:
try:
param = yaml.load(stream)
except yaml.YAMLError as e:
print(e)
def getkwatthours(url, data, headers, sensorId, t0, t1):
sumEnergy=0
try:
result=requests.post(
url + '/api/' + str(sensorId) + '/get/kwatthours/by_time/' + str(t0) + '/' + str(t1),
headers=headers,
data=data)
except json.JSONDecodeError as e:
print("getkwatthours() - ERROR : requests.post \n-> %s" % e)
else:
parsed_json=json.loads(result.text)
try:
sumEnergy=(parsed_json['data']['value']) * 10000 # /100 for test and debug
except Exception as e:
sumEnergy=0
print("getkwatthours() - ERROR : json.loads(result.text) \n-> %s" % e)
print("getkwatthours() : " + str(sumEnergy))
return sumEnergy
def getkwatthoursOem(url, data, headers, sensorId):
sumEnergy=0
try:
result=requests.post(
url + '/emoncms/feed/value.json?id=' + str(sensorId) + data,
headers=headers,
data='')
except json.JSONDecodeError as e:
print("getkwatthoursOem() - ERROR : requests.post \n-> %s" % e)
else:
sumEnergy=json.loads(result.text)
print("getkwatthours() : " + str(sumEnergy))
return sumEnergy
def get_all_data():
# this function collects data from all sensors (connected to each piece of work (=item))
# definition of the time interval, in order to collect data
time0 = time.time()
delay = int(param['delay'])
time.sleep(delay)
time1 = time.time()
# getting energy produced or consumed for each item
headers = {'Content-Type': 'application/json', }
items = param['listItems'] # items must be defined in param.yml
allData = []
# loop on items to retrieve consumption or production data over the defined interval
for item in items:
itemData = {}
itemData["id"] = item
itemData["name"] = param[item]['name']
itemData["type"] = param[item]['type']
itemData["lat"] = param[item]['lat']
itemData["lon"] = param[item]['lon']
itemUrl = param[item]['url']
itemSensorId = param[item]['sensorId']
itemLogin = param[item]['login']
itemPswd = param[item]['password']
itemSource = param[item]['source']
try:
if itemSource == 'CW':
data='login=' + itemLogin + '&password=' + itemPswd
value = getkwatthours(itemUrl, data, headers, itemSensorId, time0, time1)
else:
data='&apikey=' + itemLogin
value = getkwatthoursOem(itemUrl, data, headers, itemSensorId)
except Exception as e:
value=0
print("get_all_data() - ERROR : api call (%s) \n-> %s" % (itemSource, e))
itemData["value"] = value
allData.append(itemData.copy())
print('get_all_data(): time : ' + time.strftime("%D %H:%M:%S", time.localtime(int(time1))) + ', allData = '
+ str(allData))
return allData
def get_last_da
|
ta():
items = param['listItems'] # items must be defined in param.yml
lastData = []
for item in items:
query = Measure.query.filter_by(item=item).order_by(Measure.timestamp.desc()).first()
print(query)
itemData = {}
itemData["id"] = item
itemData["name"] = p
|
aram[item]['name']
itemData["type"] = param[item]['type']
itemData["lat"] = param[item]['lat']
itemData["lon"] = param[item]['lon']
if query is None:
itemData["value"] = 0
else:
itemData["value"] = query.value
lastData.append(itemData.copy())
print('get_all_data(): lastData = ' + str(lastData))
return lastData
def get_flux_data():
items=param['listItems'] # items must be defined in param.yml
fluxData=[]
for item in items:
if param[item]['flux'] is not None:
for item2 in param[item]['flux']:
itemData={}
fr = str(param[item]['lon']) + ',' + str(param[item]['lat'])
to = str(param[item2]['lon']) + ',' + str(param[item2]['lat'])
nm = param[item]['name'] + ',' + param[item2]['name']
itemData["from"] = fr.split(',')
itemData["to"] = to.split(',')
itemData["labels"] = nm.split(',')
itemData["color"] = "#ff3a31"
fluxData.append(itemData.copy())
print('get_flux_data(): lastData = ' + str(fluxData))
return fluxData
|
JoshuaOndieki/buckylist
|
bucky/helpers.py
|
Python
|
mit
| 710
| 0.002817
|
"""
Module contains helper functions
"""
def get_user(username, db):
"""
Usage: queries through and database and returns user
object with passed username argument
:return: User object or None if no such user
"""
for user in db:
if user.username.lower()
|
== username.lower():
return user
return None
def get_bucket(title, db, current_user):
"""
Usage: queries through and database and returns bucket
object with passed title argument
:return: Bucket object or None if no such Bucket
"""
for bucket in db:
if bucket.title.lower() == titl
|
e.lower():
return bucket
return None
|
qedsoftware/commcare-hq
|
corehq/doctypemigrations/migrations/0004_auto_20151001_1809.py
|
Python
|
bsd-3-clause
| 527
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from corehq.doctypemigra
|
tions.djangomigrations import assert_initial_complete
from corehq.doctypemigrations.migrator_instances import users_migration
from corehq.sql_db.operations import HqRunPython
class Migration(migrations.Migration):
dependencies = [
|
('doctypemigrations', '0003_doctypemigration_cleanup_complete'),
]
operations = [
HqRunPython(assert_initial_complete(users_migration))
]
|
kpreid/shinysdr
|
shinysdr/interfaces.py
|
Python
|
gpl-3.0
| 11,304
| 0.007432
|
# -*- coding: utf-8 -*-
# Copyright 2013, 2014, 2015, 2016, 2017, 2018 Kevin Reid and the ShinySDR contributors
#
# This file is part of ShinySDR.
#
# ShinySDR is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ShinySDR is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ShinySDR. If not, see <http://www.gnu.org/licenses/>.
"""API for plugins, and related things.
This module contains objects and interfaces used by plugins to declare
the functionality they provide.
"""
# pylint: disable=signature-differs
# (pylint is confused by interfaces)
from __future__ import absolute_import, division, print_function, unicode_literals
from collections import namedtuple
import six
from twisted.plugin import IPlugin
from zope.interface import Attribute, Interface, implementer
from shinysdr.i.modes import IModeDef
from shinysdr.types import EnumRow
__all__ = [] # appended later
class IDemodulatorFactory(Interface):
def __call__(mode, input_rate, context):
"""
Returns a new IDemodulator.
mode: unicode, the mode to be demodulated (should be one the factory/class was declared to support)
input_rate: float, sample rate the demodulator must accept
context: an IDemodulatorContext
May support additional keyword arguments as supplied by unserialize_exported_state.
"""
__all__.append('IDemodulatorFactory')
class IDemodulator(Interface):
"""
Demodulators may also wish to implement:
IDemodulatorModeChange
ITunableDemodulator
Additional constraints:
The object must also be GNU Radio block with one gr_complex input, and output as described by get_output_type().
"""
def get_band_shape():
"""
Returns a BandShape object describing the portion of its input signal which the demodulator uses (typically, the shape of its filter).
Should be exported, typically like:
@exported_value(type=BandShape, changes='never')
This is used to display the filter on-screen and to determine when the demodulator's input requirements are satisfied by the device's tuning.
"""
def get_output_type():
"""
Return the SignalType of the demodulator's output.
The output must be stereo audio, mono audio, or nothing. Note that stereo audio is represented as a vector of two floats, not as two output ports.
"""
__all__.append('IDemodulator')
class IDemodulatorContext(Interface):
def rebuild_me():
"""Request that this demodulator be discarded and an identically configured copy be created.
This is needed when something such as the output type of the demodulator changes; it may also be used any time constructing a new demodulator is more convenient than changing the internal structure of an existing one.
"""
def lock():
"""
Use this method instead of gr.hier_block2.lock().
This differs in that it will avoid acquiring the lock if it is already held (implementing a "recursive lock"). It is therefore suitable for use when the demodulator is being invoked in a situation where the lock may already be held.
"""
def unlock():
"""Use in pairs with IDemodulatorContext.lock()."""
def output_message(message):
"""Report a message output from the demodulator, such as in demodulators which handle packets rather than audio.
The message object should provide shinysdr.telemetry.ITelemetryMessage.
"""
def get_absolute_frequency_cell():
"""Returns a cell containing the original RF carrier frequency of the signal to be demodulated — the frequency the signal entering the demodulator has been shifted down from."""
class ITunableDemodulator(IDemodulator):
"""If a demodulator implements this interface, then there may be a arbitrary frequency offset in its input signal, which it will be informed of via the set_rec_freq method."""
def set_rec_freq(freq):
"""
Set the nominal (carrier) frequency offset of the signal to be demodulated within the input signal.
"""
__all__.append('ITunableDemodulator')
class IDemodulatorModeChange(IDemodulator):
"""If a demodulator implements this interface, then it may be asked to reconfigure itself to demodulate a different mode."""
def can_set_mode(mode):
|
"""
Return whether this demodulator can reconfigure itself to demodulate the specified mode.
If it returns False, it will typically be replaced with a newly created demodulator.
"""
def set_mode(mode):
"""
Per can_set_mode.
"""
__all__.append('IDemodulatorModeChange')
|
# TODO: BandShape doesn't really belong here but it is related to IDemodulator. Find better location.
# All frequencies are relative to the demodulator's input signal (i.e. baseband)
_BandShape = namedtuple('BandShape', [
'stop_low', # float; lower edge of stopband
'pass_low', # float; lower edge of passband
'pass_high', # float; upper edge of passband
'stop_high', # float; upper edge of stopband
'markers', # dict of float to string; labels of significant frequencies (e.g. FSK mark and space)
])
class BandShape(_BandShape):
@classmethod
def lowpass_transition(cls, cutoff, transition, markers=None):
if markers is None:
markers = {}
h = transition / 2.0
return cls(
stop_low=-cutoff - h,
pass_low=-cutoff + h,
pass_high=cutoff - h,
stop_high=cutoff + h,
markers=markers)
@classmethod
def bandpass_transition(cls, transition, low, high, markers=None):
if markers is None:
markers = {}
h = transition / 2.0
return cls(
stop_low=low - h,
pass_low=low + h,
pass_high=high - h,
stop_high=high + h,
markers=markers)
__all__.append('BandShape')
class IModulatorFactory(Interface):
def __call__(mode, context):
"""
Returns a new IModulator.
mode: unicode, the mode to be modulated (should be one the factory/class was declared to support)
context: always None, will later become IModulatorContext when that exists.
May support additional keyword arguments as supplied by unserialize_exported_state.
"""
class IModulator(Interface):
"""
Additional constraints:
The object must also be a GNU Radio block with one gr_complex output, and input as described by get_input_type().
"""
def can_set_mode(mode):
"""
Return whether this modulator can reconfigure itself to modulate the specified mode.
If it returns False, it will typically be replaced with a newly created modulator.
"""
def set_mode(mode):
"""
Per can_set_mode.
"""
def get_input_type():
"""
Return the SignalType of the modulator's required input, which must currently be mono audio at any sample rate.
"""
def get_output_type():
"""
Return the SignalType of the modulator's output, which must currently be IQ at any sample rate.
"""
__all__.append('IModulator')
class IHasFrequency(Interface):
# TODO: document this
def get_freq():
pass
__all__.append('IHasFrequency')
@implementer(IPlugin, IModeDef)
class ModeDef(object):
# Twisted plugin system caches whether-a-plugin-class-was-found permanently, so we need to avoid _not_ having a ModeDef if the plugin has some sort of
|
tboyce021/home-assistant
|
homeassistant/components/hue/light.py
|
Python
|
apache-2.0
| 14,995
| 0.001067
|
"""Support for the Philips Hue lights."""
from datetime import timedelta
from functools import partial
import logging
import random
import aiohue
import async_timeout
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_FLASH,
ATTR_HS_COLOR,
ATTR_TRANSITION,
EFFECT_COLORLOOP,
EFFECT_RANDOM,
FLASH_LONG,
FLASH_SHORT,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_FLASH,
SUPPORT_TRANSITION,
LightEntity,
)
from homeassistant.core import callback
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from homeassistant.util import color
from .const import DOMAIN as HUE_DOMAIN, REQUEST_REFRESH_DELAY
from .helpers import remove_devices
SCAN_INTERVAL = timedelta(seconds=5)
_LOGGER = logging.getLogger(__name__)
SUPPORT_HUE_ON_OFF = SUPPORT_FLASH | SUPPORT_TRANSITION
SUPPORT_HUE_DIMMABLE = SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS
SUPPORT_HUE_COLOR_TEMP = SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP
SUPPORT_HUE_COLOR = SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT | SUPPORT_COLOR
SUPPORT_HUE_EXTENDED = SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR
SUPPORT_HUE = {
"Extended color light": SUPPORT_HUE_EXTENDED,
"Color light": SUPPORT_HUE_COLOR,
"Dimmable light": SUPPORT_HUE_DIMMABLE,
"On/Off plug-in unit": SUPPORT_HUE_ON_OFF,
"Color temperature light": SUPPORT_HUE_COLOR_TEMP,
}
ATTR_IS_HUE_GROUP = "is_hue_group"
GAMUT_TYPE_UNAVAILABLE = "None"
# Minimum Hue Bridge API version to support groups
# 1.4.0 introduced extended group info
# 1.12 introduced the state object for groups
# 1.13 introduced "any_on" to group state objects
GROUP_MIN_API_VERSION = (1, 13, 0)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Old way of setting up Hue lights.
Can only be called when a user accidentally mentions hue platform in their
config. But even in that case it would have been ignored.
"""
def create_light(item_class, coordinator, bridge, is_group, api, item_id):
"""Create the light."""
if is_group:
supported_features = 0
for light_id in api[item_id].lights:
if light_id not in bridge.api.lights:
continue
light = bridge.api.lights[light_id]
supported_features |= SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED)
supported_features = supported_features or SUPPORT_HUE_EXTENDED
else:
supported_features = SUPPORT_HUE.get(api[item_id].type, SUPPORT_HUE_EXTENDED)
return item_class(coordinator, bridge, is_group, api[item_id], supported_features)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Hue lights from a config entry."""
bridge = hass.data[HUE_DOMAIN][config_entry.entry_id]
light_coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="light",
update_method=partial(async_safe_fetch, bridge, bridge.api.lights.update),
update_interval=SCAN_INTERVAL,
request_refresh_debouncer=Debouncer(
bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True
),
)
# First do a refresh to see if we can reach the hub.
# Otherwise we will declare not ready.
await light_coordinator.async_refresh()
if not light_coordinator.last_update_success:
raise PlatformNotReady
update_lights = partial(
async_update_items,
bridge,
bridge.api.lights,
{},
async_add_entities,
partial(create_light, HueLight, light_coordinator, bridge, False),
)
# We add a listener after fetching the data, so manually trigger listener
bridge.reset_jobs.append(light_coordinator.async_add_listener(update_lights))
update_lights()
api_version = tuple(int(v) for v in bridge.api.config.apiversion.split("."))
allow_groups = bridge.allow_groups
if allow_groups and api_version < GROUP_MIN_API_VERSION:
_LOGGER.warning("Please update your Hue bridge to support groups")
allow_groups = False
if not allow_groups:
return
group_coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="group",
update_method=partial(async_safe_fetch, bridge, bridge.api.groups.update),
update_interval=SCAN_INTERVAL,
request_refresh_debouncer=Debouncer(
bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True
),
)
update_groups = partial(
async_update_items,
bridge,
bridge.api.groups,
{},
async_add_entities,
partial(create_light, HueLight, group_coordinator, bridge, True),
)
bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups))
await group_coordinator.async_refresh()
async def async_safe_fetch(bridge, fetch_method):
"""Safely fetch data."""
try:
with async_timeout.timeout(4):
return await bridge.async_request_call(fetch_method)
except aiohue.Unauthorized as err:
await bridge.handle_unauthorized_error()
raise UpdateFailed("Unauthorized") from err
except (aiohue.AiohueException,) as err:
raise UpdateFailed(f"Hue error: {err}") from err
@callback
def async_update_items(bridge, api, current, async_add_entities, create_item):
"""Update items."""
new_items = []
for item_id in api:
if item_id in current:
continue
current[item_id] = create_item(api, item_id)
new_items.append(current[item_id])
bridge.hass.async_create_task(remove_devices(bridge, api, current))
if new_items:
async_add_entities(new_items)
def hue_brightness_to_hass(value):
"""Convert hue brightness 1..254 to hass format 0..255."""
return min(255, round((value / 254) * 255))
def hass_to_hue_brightness(value):
"""Convert hass brightness 0..255 to hue 1..254 scale."""
return max(1, round((value / 255) * 254))
class HueLight(CoordinatorEntity, LightEntity):
"""Representation of a Hue light."""
def __init__(self, coordinator, bridge, is_group, light, supported_features):
"""Initialize the light."""
super().__init__(coordinator)
self.light = light
self.bridge = bridge
self.is_group = is_group
self._supported_features = supported_features
if is_group:
self.is_osram = False
self.is_philips = False
self.is_innr = False
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
self.gamut = None
else:
self.is_osram = light.manufacturername == "OSRAM"
self.is_philips = light.manufacturername == "Philips"
self.is_innr = light.manufacturername == "innr"
self.gamut_typ = self.light.colorgamuttype
self.gamut = self.light.colorgamut
_LOGGER.debug("Color gamut of %s: %s", self.name, str(self.gamut))
if self.light.swupdatestate == "readytoinstall":
err = (
"Please check for software updates of the %s "
"bulb in the Philips Hue App."
)
_LOGGER.warning(err, self.name)
if self.gamut:
if not color.check_valid_gamut(self.gamut):
err = "Color gamut of %s: %s, not valid, s
|
etting gamut to None."
|
_LOGGER.warning(err, self.name, str(self.gamut))
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
self.gamut = None
@property
def unique_id(self):
"""Return the unique ID of this Hue light."""
return self.light.uniqueid
@property
def device_id(self):
"""Return the ID of this Hue light."""
return self.unique_id
@property
def name(self):
"""Return the name of the Hue light."""
return self.light.na
|
snowfarthing/nibbles_3d
|
vertex.py
|
Python
|
mit
| 6,509
| 0.020126
|
# vertex.py
# This module contains all the things for creating
# and using vertices...starting with vector, and
# going on to edge and face.
# Observe two things, though:
# First, I tried to keep small numbers as "zeros"
# by rounding divisions (see __div__ and norm) to
# five significant digits. So if a number is one
# like 5.2e-6, it will be rounded to 0.
# Second, to make sure that division works
# appropriately, I initialized the original vector
# with float(etc).
from math import sqrt
import pygame
ROUNDOFF = 5
class vector(object):
def __init__(self, x, y, z):
# Note that despite the w, these are
# still 3D vectors.
# Also note that I'd like to remove the w, but I cannot for now.
self.x = float(x)
self.y = float(y)
self.z = float(z)
# self.w = 1.0
def __add__(self, v):
x = self.x+v.x
y = self.y+v.y
z = self.z+v.z
return vector(x, y, z)
def __sub__(self, v):
x = self.x-v.x
y = self.y-v.y
z = self.z-v.z
return vector(x, y, z)
def __mul__(self, s):
x = round(self.x*s, ROUNDOFF)
y = round(self.y*s, ROUNDOFF)
z = round(self.z*s, ROUNDOFF)
return vector(x, y, z)
def __div__(self, s):
x = round(self.x/s, ROUNDOFF)
y = round(self.y/s, ROUNDOFF)
z = round(self.z/s, ROUNDOFF)
return vector(x, y, z)
def __neg__(self):
return vector(-self.x, -self.y, -self.z)
def dot(self, v):
return round(self.x*v.x + self.y*v.y + self.z*v.z, ROUNDOFF)
def cross(self, v):
x = round(self.y*v.z - self.z*v.y, ROUNDOFF)
y = round(self.z*v.x - self.x*v.z, ROUNDOFF)
z = round(self.x*v.y - self.y*v.x, ROUNDOFF)
return vector(x, y, z)
def dist(self):
return round(sqrt(self.x*self.x + self.y*self.y + self.z*self.z), ROUNDOFF)
# return sqrt(self.x*self.x + self.y*self.y + self.z*self.z)
# For some reason, I can't get full rotations to work out
# if I don't allow for the possibility that self.dist() might
# be zero...
#def norm(self):
# return self/self.dist()
def norm(self):
d = self.dist()
if d == 0:
return self
else:
return self/d
def __str__(self):
return "<%s, %s, %s>" % (self.x, self.y, self.z)
# Here are a few vector constants that are nice to
# define: in particular, note that [Left, Up, Fwd]
# is a left-hand coord system, while [Right, Up, Fwd]
# represents a right-hand one.
Zero = vector(0, 0, 0)
Up = vector(0, 1, 0)
Left = vector(1, 0, 0)
Right = vector(-1, 0, 0)
Fwd = vector(0, 0, 1)
# I defined these functions separately from the
# classes because it seems more natural to say
# "x = dist(v)" rather than "x = v.dist()", etc.
def dist(v):
return round(sqrt(v.x*v.x + v.y*v.y + v.z*v.z), ROUNDOFF)
def norm(v):
return v/dist(v), 5
def orthonorm(x, y, z):
"""Returns a tuple of orthonormal vectors via the
Gramm-Schmidt process. See Apostal's Linear
Algebra, pg. 111, or another LinAlg book for
the theoretical background of this process."""
q1 = x
q2 = y - q1*(y.dot(q1)/q1.dot(q1))
q3 = z - q1*(z.dot(q1)/q1.dot(q1)) - \
q2*(z.dot(q2)/q2.dot(q2))
return (q1.norm(), q2.norm(), q3.norm())
# Now that we have our vector defined, we could
# define the things that will make our vector a
# vertex.
class edge(object):
def __init__(self, v1, v2, color='none'):
"""Initializes an edge for a wireframe.
v1, v2 are vertex indices, and color is the
default color with which to draw the edge.
For purposes of comparison, each edge is stored
with the first vertex index less than or equal
to the second vertex index."""
|
if v1 < v2:
self.v1 = v1
self.v2 = v2
else:
self.v1 = v2
self.v2 = v1
self.color = color
def __eq__(self, e):
"""Returns true if both vertex indices are equal."""
return (self.v1 == e.v1) and (self.v2 == e.v2)
def __ne__(self, e):
"""Returns true if one one of the vertex indices
is unequal."""
return (self.v1 != e.v1) or (self.v2 != e.v2)
def __str__(self):
r
|
eturn "[ %s, %s ] %s" % (self.v1, self.v2, self.color)
class face(object):
def __init__(self, vertices, edges, color='none'):
"""Initializes a face for a wireframe.
In addition to vertices and color, this class also
keeps track of edges, center, normal and norm*Vertex
of the face.
Note that the normal is calculated assuming that
the vertices are in a clockwise order around the
face when viewed from the outside of the wirefame."""
# This is a list of indices for vertices.
self.vertices = vertices
self.color = color
# This is a list of the indices of the edges.
self.edges = edges
# Note that, ideally, this class should have a
# function that calculates its center and normal;
# since only a wireframe class has this information,
# however, only a wirframe class can calculate it!
def __str__(self):
return "%s <%s>" % (self.vertices, self.color)
# These colors are included with vertices so that
# faces and edges can have colors.
#egacolors = { 'none':-1, 'black':0, 'blue':1,
#'green':2, 'cyan':3, 'red':4, 'purple':5,
#'brown':6, 'gray':7, 'brightblack':8,
#'darkgray':8, 'brightblue':9, 'brightgreen':10,
#'brightcyan':11, 'brightred':12, 'pink':12,
#'brightpurple':13, 'brightbrown':14, 'yellow':14,
#'brightgray': 15, 'white':15 }
# These colors are included with vertices so that
# faces and edges can have colors.
# Now that I'm using pygame, these need to be tweaked!
egacolor = { 'none': -1, 'black': pygame.color.Color('black'),
'blue': pygame.color.Color('blue'), 'green': pygame.color.Color('green'),
'cyan': pygame.color.Color('cyan'), 'red': pygame.color.Color('red'),
'purple': pygame.color.Color('purple'), 'brown': pygame.color.Color('brown'), 'gray': pygame.color.Color('gray'),
'darkgray': pygame.color.Color('darkgray'), 'lightblue': pygame.color.Color('lightblue'),
'lightgreen': pygame.color.Color('lightgreen'), 'lightcyan': pygame.color.Color('lightcyan'),
'pink': pygame.color.Color('pink'),
'lightpurple': pygame.color.Color('red'), 'yellow': pygame.color.Color('yellow'),
'white': pygame.color.Color('white') }
bwcolor = {}
for i in range(0, 16):
bwcolor['black%s' % (i)] = (i*16, i*16, i*16, 255)
|
ltiao/basketball-intelligence
|
bball_intel/bball_intel/wsgi.py
|
Python
|
mit
| 437
| 0.004577
|
"""
WSGI config for bball_intel project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
i
|
mport o
|
s
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bball_intel.settings.base")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())
|
skoczen/qi-toolkit
|
qi_toolkit/boltbase.py
|
Python
|
bsd-3-clause
| 27,252
| 0.007339
|
# A base file for use in fabfiles.
# This file is geared toward a particular directory structure on webfaction and in dev
# Some of it may be useful to other folks, but no guarantees.
# Local Structure
# /
# /db (sqllite for dev and dumps)
# /media
# /appname
# /source (psds and the like)
# Remote Structure (webfaction-based)
# ~/webapps/appname_live
# ~/webapps/appname_live/appname.git (live version)
# ~/webapps/appname_live/appname (symlinked -> # ~/webapps/appname_django/appname/appname)
# ~/webapps/appname_live/appname.wsgi
# ~/webapps/appname_static/ (symlinked* -> # ~/webapps/appname_django/appname/media/*)
# Usage
# Basic:
# from qi_toolkit.fabbase import *
# setup_env(project_name='projname',webfaction_user='username')
# Advanced
# from qi_toolkit.fabbase import *
# initial_settings = {
# 'media_dir':'static',
# }
# overrides = {
# 'workon': 'echo "no work today"',
# }
# setup_env(project_name='projname',webfaction_user='username', initial_settings=initial_settings, overrides=overrides)
from __future__ import with_statement # needed for python 2.5
from fabric.api import *
import fabric
from fabric.contrib.console import confirm
from qi_toolkit.helpers import print_exception
import time
def setup_env_webfaction(project_name, webfaction_user, initial_settings={}, overrides={}):
global env
env.dry_run = False
env.project_name = project_name
env.webfaction_user = webfaction_user
env.is_webfaction = True
env.is_centos = False
# Custom Config Start
env.python_version = "2.6"
env.parent = "origin"
env.working_branch = "master"
env.live_branch = "live"
env.python = "python"
env.is_local = False
env.local_working_path = "~/workingCopy"
env.media_dir = "media"
env.webfaction_host = '%(webfaction_user)s@%(webfaction_user)s.webfactional.com' % env
env.production_hosts = []
env.staging_hosts = []
env.production_db_hosts = []
env.update(initial_settings)
# semi-automated. Override this for more complex, multi-server setups, or non-wf installs.
env.production_hosts = ['%(webfaction_host)s' % env]
env.user_home = "/home/%(webfaction_user)s" % env
env.git_origin = "%(webfaction_host)s:%(user_home)s/git-root/%(project_name)s.git" % env
env.daily_backup_script_name = "daily_backup.sh"
env.weekly_backup_script_name = "weekly_backup.sh"
env.monthly_backup_script_name = "monthly_backup.sh"
env.staging_hosts = env.production_hosts
env.virtualenv_name = env.project_name
env.staging_virtualenv_name = "staging_%(project_name)s" % env
env.live_app_dir = "%(user_home)s/webapps/%(project_name)s_live" % env
env.live_static_dir = "%(user_home)s/webapps/%(project_name)s_static" % env
env.staging_app_dir = "%(user_home)s/webapps/%(project_name)s_staging" % env
env.staging_static_dir = "%(user_home)s/webapps/%(project_name)s_staging_static" % env
env.virtualenv_path = "%(user_home)s/.virtualenvs/%(virtualenv_name)s/lib/python%(python_version)s/site-packages/" % env
env.work_on = "workon %(virtualenv_name)s; " % env
env.backup_root = "%(user_home)s/backups" % env
env.offsite_backup_dir = "aglzen@quantumimagery.com:/home/aglzen/%(project_name)s/data/" % env
env.update(overrides)
def setup_env_centos(project_name, system_user="root", initial_settings={}, overrides={}):
global env
env.dry_run = False
env.project_name = project_name
env.system_user = system_user
env.is_webfaction = False
env.is_centos = True
# Custom Config Start
env.python_version = "2.6"
env.parent = "origin"
env.working_branch = "master"
env.live_branch = "live"
env.staging_branch = "staging"
env.python = "python"
env.is_local = False
env.local_working_path = "~/workingCopy"
env.media_dir = "media"
env.admin_symlink = "admin"
env.production_hosts = []
env.staging_hosts = []
env.production_db_hosts = []
env.staging_db_hosts = []
env.update(initial_settings)
env.production_hosts = ["%(system_user)s@%(h)s" % {'system_user':env.system_user,'h':h} for h in env.production_hosts]
env.staging_hosts = ["%(system_user)s@%(h)s" % {'system_user':env
|
.system_u
|
ser,'h':h} for h in env.staging_hosts]
env.production_db_hosts = ["%(system_user)s@%(h)s" % {'system_user':env.system_user,'h':h} for h in env.production_db_hosts]
env.staging_db_hosts = ["%(system_user)s@%(h)s" % {'system_user':env.system_user,'h':h} for h in env.staging_db_hosts]
if env.system_user == "root":
env.user_home = "/root"
else:
env.user_home = "/home/%(system_user)s" % env
env.virtualenv_name = env.project_name
env.staging_virtualenv_name = "staging_%(virtualenv_name)s" % env
env.live_app_dir = "/var/www"
env.git_path = "%(live_app_dir)s/%(project_name)s.git" % env
env.live_static_dir = "%(git_path)s/media" % env
env.staging_app_dir = env.live_app_dir
env.staging_static_dir = env.live_static_dir
env.virtualenv_path = "%(user_home)s/.virtualenvs/%(virtualenv_name)s/lib/python%(python_version)s/site-packages/" % env
env.work_on = "workon %(virtualenv_name)s; " % env
env.backup_root = "%(user_home)s/backups" % env
env.offsite_backup_dir = "aglzen@quantumimagery.com:/home/aglzen/%(project_name)s/data/" % env
env.update(overrides)
def setup_backup_env_webfaction():
env.current_backup_file = "%(backup_dir)s/currentBackup.json" % env
env.daily_backup_script = daily_backup_script()
env.weekly_backup_script = weekly_backup_script()
env.monthly_backup_script = monthly_backup_script()
def live(dry_run="False"):
if not confirm("You do mean live, right?"):
abort("Bailing out!")
else:
env.dry_run = dry_run.lower() == "true"
env.python = "python%(python_version)s" % env
env.role = "live"
env.settings_file = "envs.%(role)s" % env
env.hosts = env.production_hosts
env.base_path = env.live_app_dir
env.git_path = "%(live_app_dir)s/%(project_name)s.git" % env
env.backup_dir = "%(user_home)s/backups/%(project_name)s" % env
env.media_path = env.live_static_dir
env.pull_branch = env.live_branch
env.release_tag = "%(role)s_release" % env
setup_backup_env_webfaction()
def staging(dry_run="False"):
env.dry_run = dry_run.lower() == "true"
env.python = "python%(python_version)s" % env
env.role = "staging"
env.settings_file = "envs.%(role)s" % env
env.hosts = env.staging_hosts
env.base_path = env.staging_app_dir
env.git_path = "%(staging_app_dir)s/%(project_name)s.git" % env
env.media_path = env.staging_static_dir
env.backup_dir = "%(user_home)s/backups/staging_%(project_name)s" % env
env.pull_branch = env.live_branch
env.release_tag = "%(role)s_release" % env
env.virtualenv_name = env.staging_virtualenv_name
env.virtualenv_path = "%(user_home)s/.virtualenvs/%(virtualenv_name)s/lib/python%(python_version)s/site-packages/" % env
env.work_on = "workon %(virtualenv_name)s; " % env
setup_backup_env_webfaction()
def localhost(dry_run="False"):
env.dry_run = dry_run.lower() == "true"
env.hosts = ['localhost']
env.role = "localhost"
env.settings_file = "envs.dev" % env
env.is_webfaction = False
env.is_centos = False
env.base_path = "%(local_working_path)s/%(project_name)s" % env
env.git_path = env.base_path
env.backup_dir = "%(local_working_path)s/db" % env
env.pull_branch = env.working_branch
env.release_tag = "%(role)s_release" % env
env.virtualenv_path = "~/.virtualenvs/%(virtualenv_name)s/lib/python%(python_version)s/site-packages/" % env
env.is_local = True
env.media_path = "%(base_path)s/%(media_dir)s" % env
setup_backup_env_webfaction()
def live_db():
env.hosts = env.production_db_hosts
def staging_db():
env.hosts = env.staging_db_hosts
def live_celery():
env.hosts = env.production_celery_hosts
def staging_celery():
env.hosts = env.staging_celery_hosts
def has_separate_celery_server():
return hasattr(env,"%s_cele
|
cniswander/clipocr
|
clipocr1.py
|
Python
|
bsd-3-clause
| 6,229
| 0.013004
|
"""
clipocr1.py
Demonstrates a technique that often improves ocr quality on screen captures.
Reads an image from the system clipboard,
and writes to stdout various versions of the text recognized in the image.
Uses tesseract OCR.
The technique is based on judicious rescaling of image dimensions.
SIDE EFFECT:
Creates image files and text file in current working directory.
REQUIREMENTS:
Written and tested 2014 March, 2014 April
on an Ubuntu 12.04 system (64-bit Intel)
Relies on system having these python packages installed
(it's ok to install them as Ubuntu/Debian packages):
- wx
for portable clipboard access.
- PIL [can we make do with Pillow?]
for rescaling the image
NOTE: We might be able to get away with rewriting to use
the right version(s) of wx for this instead?
Relies on system having this software installed,
e.g. as an Ubuntu/Debian package:
- tesseract
the OCR software.
Conveniently, these packages are all open source.
COPYRIGHT:
Copyright (c) 2014 Chris Niswander.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import os
from PIL import Image
import wx # just to access the system clipboard.
def get_file_text(fname):
"""Reads the text out of the text file having pathname /fname/."""
with open(fname) as fin:
return fin.read()
def read_test1(fname):
"""Demonstrates OCRing the text from an image file named /fname/,
and printing it to stdout.
Makes multiple OCR attempt
|
s,
based on resizing the image to different size image files,
and prints multiple OCR attempts' text.
"""
def params_textname(params):
"""Given /params/, a resize method specification from resize_methods,
constructs a text string that can be used in a filename
for a resized/r
|
escaled image.
"""
params = params[0][0], params[0][1], params[1]
return '_'.join([str(x).strip() for x in params])
# do ocr on original, non-rescaled image.
print 'ORIGINAL IMAGE:'
print do_ocr_to_imagefile(fname)
im1 = Image.open(fname)
# List of image resizing methods to try.
# Each method consists of:
# [Rescale factor tuple, image rescaling method].
# A rescale factor tuple is (width-rescale-factor, height-rescale-factor)
# Image rescaling method is given as eval()-able text because:
# - convenient for naming image files produced using that method.
resize_methods = [
[(2, 2), 'Image.BICUBIC'],
[(2, 2), 'Image.BILINEAR'],
[(3, 2), 'Image.BICUBIC'],
[(3, 2), 'Image.BILINEAR'],
[(3, 3), 'Image.BICUBIC'],
[(3, 3), 'Image.BILINEAR'],
]
for resize_method in resize_methods:
rescale = resize_method[0]
im_resized = im1.resize(
(im1.size[0] * rescale[0], im1.size[1] * rescale[1]),
(eval (resize_method[1]) ))
resized_path = fname + '__' + params_textname(resize_method) + '.png'
print resized_path
im_resized.save(resized_path)
print do_ocr_to_imagefile(resized_path)
def do_ocr_to_imagefile(fname):
"""Runs tesseract command line utility on image file /fname/
and returns the perceived text.
SIDE EFFECTS:
Creates file 3.txt in current working directory.
"""
os.system('tesseract ' + fname + ' 3' )
# ^ OCR text from the file named /resized_path/, save the text to 3.txt.
return get_file_text('3.txt')
def save_clipboard(fname):
"""Saves an image from the system clipboard to the filename /fname/."""
app = wx.App()
if not wx.TheClipboard:
del app
raise Exception("can't get clipboard")
wx.TheClipboard.Open()
data = wx.BitmapDataObject()
clipboard_getdata_status = wx.TheClipboard.GetData(data)
wx.TheClipboard.Close()
if not clipboard_getdata_status:
del app
raise Exception("couldn't find image data in clipboard")
image = data.GetBitmap().ConvertToImage()
image.SaveFile(fname, 1) # 1 --> save as Windows bitmap.
del app
def clippy():
"""Demonstrates OCRing the text from an image in the system clipboard,
and printing it to stdout.
Makes multiple OCR attempts,
based on resizing the image to different sizes,
and prints multiple OCR attempts' text.
"""
clippy_fname = 'image_from_clipboard'
save_clipboard(clippy_fname)
read_test1(clippy_fname)
clippy()
#---------------------------------------------------------------------------
# Test code not normally called, but tester might run it from e.g. IDE.
def clear_clipboard():
"""Clear the clipboard, which can be useful for error testing."""
app = wx.App()
if not wx.TheClipboard:
del app
raise Exception("can't get clipboard")
wx.TheClipboard.Open()
wx.TheClipboard.Clear()
wx.TheClipboard.Close()
del app
|
msmbuilder/msmbuilder-legacy
|
Extras/parallel_assign/scripts/AssignParallel.py
|
Python
|
gpl-2.0
| 7,120
| 0.007022
|
#!/usr/bin/env python
import sys, os
import numpy as np
import logging
import IPython as ip
from IPython import parallel
from IPython.parallel.error import RemoteError
from msmbuilder import arglib
from msmbuilder import metrics
from msmbuilder import Project
from parallel_assign import remote, local
def setup_logger(console_stream=sys.stdout):
"""
Setup the logger
"""
formatter = logging.Formatter('%(name)s: %(asctime)s: %(message)s',
'%I:%M:%S %p')
console_handler = logging.StreamHandler(console_stream)
console_handler.setFormatter(formatter)
logger = logging.getLogger(os.path.split(sys.argv[0])[1])
logger.root.handlers = [console_handler]
return logger
def main(args, metric, logger):
project = Project.load_from(args.project)
if not os.path.exists(args.generators):
raise IOError('Could not open generators')
generators = os.path.abspath(args.generators)
output_dir = os.path.abspath(args.output_dir)
# connect to the workers
try:
json_file = client_json_file(args.profile, args.cluster_id)
client = parallel.Client(json_file, timeout=2)
except parallel.error.TimeoutError as exception:
msg = '\nparallel.error.TimeoutError: ' + str(exception)
msg += "\n\nPerhaps you didn't start a controller?\n"
msg += "(hint, use ipcluster start)"
print >> sys.stderr, msg
sys.exit(1)
lview = client.load_balanced_view()
# partition the frames into a bunch of vtrajs
all_vtrajs = local.partition(project, args.chunk_size)
# initialze the containers to save to disk
f_assignments, f_distances = local.setup_containers(output_dir,
project, all_vtrajs)
# get the chunks that have not been computed yet
valid_indices = np.where(f_assignments.root.completed_vtrajs[:] == False)[0]
remaining_vtrajs = np.array(all_vtrajs)[valid_indices].tolist()
logger.info('%d/%d jobs remaining', len(remaining_vtrajs), len(all_vtrajs))
# send the workers the files they need to get started
# dview.apply_sync(remote.load_gens, generators, project['ConfFilename'],
# metric)
# get the workers going
n_jobs = len(remaining_vtrajs)
amr = lview.map(remote.assign, remaining_vtrajs,
[generators]*n_jobs, [metric]*n_jobs, chunksize=1)
pending = set(amr.msg_ids)
while pending:
client.wait(pending, 1e-3)
# finished is the set of msg_ids that are complete
finished = pending.difference(client.outstanding)
# update pending to exclude those that just finished
pending = pending.difference(finished)
for msg_id in finished:
# we know these are done, so don't worry about blocking
async = client.get_result(msg_id)
try:
assignments, distances, chunk = async.result[0]
ex
|
cept RemoteError as e:
print 'Remote Error:'
e.print_traceback()
raise
vtraj_id = local.save(f_assignments, f_distances, assignments, distances, chunk)
log_status(logger, len(pending), n_jobs, vtraj_id, async)
f_assignments.close()
f_distances.close()
logger.info('All done, exiting.')
def log_status(logger, n_pending, n_jobs, job_id,
|
async_result):
"""After a job has completed, log the status of the map to the console
Parameters
----------
logger : logging.Logger
logger to print to
n_pending : int
number of jobs still remaining
n_jobs : int
total number of jobs in map
job_id : int
the id of the job that just completed (between 0 and n_jobs)
async_esult : IPython.parallel.client.asyncresult.AsyncMapResult
the container with the job results. includes not only the output,
but also metadata describing execution time, etc.
"""
if ip.release.version >= '0.13':
t_since_submit = async_result.completed - async_result.submitted
time_remaining = n_pending * (t_since_submit) / (n_jobs - n_pending)
td = (async_result.completed - async_result.started)
#this is equivalent to the td.total_seconds() method, which was
#introduced in python 2.7
execution_time = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / float(10**6)
eta = (async_result.completed + time_remaining).strftime('%I:%M %p')
else:
execution_time, eta = '?', '?'
logger.info('engine: %s; chunk %s; %ss; status: %s; %s/%s remaining; eta %s',
async_result.metadata.engine_id, job_id, execution_time,
async_result.status, n_pending, n_jobs, eta)
def setup_parser():
parser = arglib.ArgumentParser("""
Assign data that were not originally used in the clustering (because of
striding) to the microstates. This is applicable to all medoid-based clustering
algorithms, which includes all those implemented by Cluster.py except the
hierarchical methods. (For assigning to a hierarchical clustering, use
AssignHierarchical.py)
This code uses IPython.parallel to get parallelism accross many nodes. Consult
the documentation for details on how to run it""", get_metric=True)
parser.add_argument('project')
parser.add_argument( dest='generators', help='''Trajectory file containing
the structures of each of the cluster centers.''')
parser.add_argument('output_dir')
parser.add_argument('chunk_size', help='''Number of frames to processes per worker.
Each chunk requires some communication overhead, so you should use relativly large chunks''',
default=1000, type=int)
parser.add_argument('profile', help='IPython.parallel profile to use.', default='default')
parser.add_argument('cluster_id', help='IPython.parallel cluster_id to use', default='')
args, metric = parser.parse_args()
return args, metric
def client_json_file(profile='default', cluster_id=None):
"""
Get the path to the ipcontroller-client.json file. This really shouldn't be necessary, except that
IPython doesn't automatically insert the cluster_id in the way that it should. I submitted a pull
request to fix it, but here is a monkey patch in the mean time
"""
from IPython.core.profiledir import ProfileDir
from IPython.utils.path import get_ipython_dir
profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
if not cluster_id:
client_json = 'ipcontroller-client.json'
else:
client_json = 'ipcontroller-%s-client.json' % cluster_id
filename = os.path.join(profile_dir.security_dir, client_json)
if not os.path.exists(filename):
raise ValueError('controller information not found at: %s' % filename)
return filename
if __name__ == '__main__':
args, metric = setup_parser()
logger = setup_logger()
main(args, metric, logger)
|
Yelp/paasta
|
tests/api/test_client.py
|
Python
|
apache-2.0
| 1,486
| 0.000673
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writin
|
g, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See
|
the License for the specific language governing permissions and
# limitations under the License.
import mock
from paasta_tools.api.client import get_paasta_oapi_client
from paasta_tools.api.client import renew_issue_cert
def test_get_paasta_oapi_client(system_paasta_config):
with mock.patch(
"paasta_tools.api.client.load_system_paasta_config", autospec=True
) as mock_load_system_paasta_config:
mock_load_system_paasta_config.return_value = system_paasta_config
client = get_paasta_oapi_client()
assert client
def test_renew_issue_cert():
with mock.patch(
"paasta_tools.api.client.get_secret_provider", autospec=True
) as mock_get_secret_provider:
mock_config = mock.Mock()
renew_issue_cert(mock_config, "westeros-prod")
mock_get_secret_provider.return_value.renew_issue_cert.assert_called_with(
pki_backend=mock_config.get_pki_backend(),
ttl=mock_config.get_auth_certificate_ttl(),
)
|
luci/luci-py
|
appengine/components/test_support/test_case.py
|
Python
|
apache-2.0
| 11,348
| 0.009076
|
# Copyright 2013 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
import base64
import contextlib
import datetime
import json
import logging
import time
import webtest
from google.appengine.datastore import datastore_stub_util
from google.appengine.ext import ndb
from google.appengine.ext import testbed
from components import endpoints_webapp2
from components import utils
|
from depot_tools import auto_stub
# W0212: Access to a protected member XXX of a client class
# pylint: disable=W0212
def mock_now(test, now, seconds):
"""Mocks utcnow() and ndb properties.
In particular handles when auto_now and auto_now_add are used.
"""
now = now + datetime.timedelta(seconds=seconds)
test.mock(utils, 'utcnow', lambda: now)
test.mock(ndb.DateTimeProperty, '_now', lambda _: now)
test.mock(ndb.DateProperty, '_now', lambda _: now.date())
return now
class TestCase(auto_
|
stub.TestCase):
"""Support class to enable more unit testing in GAE.
Adds support for:
- google.appengine.api.mail.send_mail_to_admins().
- Running task queues.
"""
# See APP_DIR to the root directory containing index.yaml and queue.yaml. It
# will be used to assert the indexes and task queues are properly defined. It
# can be left to None if no index or task queue is used for the test case.
APP_DIR = None
# A test can explicitly acknowledge it depends on composite indexes that may
# not be defined in index.yaml by setting this to True. It is valid only for
# components unit tests that are running outside of a context of some app
# (APP_DIR is None in this case). If APP_DIR is provided, GAE testbed silently
# overwrite index.yaml, and it's not what we want.
SKIP_INDEX_YAML_CHECK = False
# If taskqueues are enqueued during the unit test, self.app must be set to a
# webtest.Test instance. It will be used to do the HTTP post when executing
# the enqueued tasks via the taskqueue module.
app = None
def setUp(self):
"""Initializes the commonly used stubs.
Using init_all_stubs() costs ~10ms more to run all the tests so only enable
the ones known to be required. Test cases requiring more stubs can enable
them in their setUp() function.
"""
super(TestCase, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# If you have a NeedIndexError, here is the switch you need to flip to make
# the new required indexes to be automatically added. Change
# train_index_yaml to True to have index.yaml automatically updated, then
# run your test case. Do not forget to put it back to False.
train_index_yaml = False
if self.SKIP_INDEX_YAML_CHECK:
# See comment for skip_index_yaml_check above.
self.assertIsNone(self.APP_DIR)
self.testbed.init_app_identity_stub()
self.testbed.init_datastore_v3_stub(
require_indexes=not train_index_yaml and not self.SKIP_INDEX_YAML_CHECK,
root_path=self.APP_DIR,
consistency_policy=datastore_stub_util.PseudoRandomHRConsistencyPolicy(
probability=1))
self.testbed.init_logservice_stub()
self.testbed.init_memcache_stub()
self.testbed.init_modules_stub()
# Use mocked time in memcache.
memcache = self.testbed.get_stub(testbed.MEMCACHE_SERVICE_NAME)
memcache._gettime = lambda: int(utils.time_time())
# Email support.
self.testbed.init_mail_stub()
self.mail_stub = self.testbed.get_stub(testbed.MAIL_SERVICE_NAME)
self.old_send_to_admins = self.mock(
self.mail_stub, '_Dynamic_SendToAdmins', self._SendToAdmins)
self.testbed.init_taskqueue_stub()
self._taskqueue_stub = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME)
self._taskqueue_stub._root_path = self.APP_DIR
self.testbed.init_user_stub()
def tearDown(self):
try:
if not self.has_failed():
remaining = self.execute_tasks()
self.assertEqual(0, remaining,
'Passing tests must leave behind no pending tasks, found %d.'
% remaining)
self.testbed.deactivate()
finally:
super(TestCase, self).tearDown()
def mock_now(self, now, seconds=0):
return mock_now(self, now, seconds)
def mock_milliseconds_since_epoch(self, milliseconds):
self.mock(utils, "milliseconds_since_epoch", lambda: milliseconds)
def _SendToAdmins(self, request, *args, **kwargs):
"""Make sure the request is logged.
See google_appengine/google/appengine/api/mail_stub.py around line 299,
MailServiceStub._SendToAdmins().
"""
self.mail_stub._CacheMessage(request)
return self.old_send_to_admins(request, *args, **kwargs)
def execute_tasks(self, **kwargs):
"""Executes enqueued tasks that are ready to run and return the number run.
A task may trigger another task.
Sadly, taskqueue_stub implementation does not provide a nice way to run
them so run the pending tasks manually.
"""
self.assertEqual([None], self._taskqueue_stub._queues.keys())
ran_total = 0
while True:
# Do multiple loops until no task was run.
ran = 0
for queue in self._taskqueue_stub.GetQueues():
if queue['mode'] == 'pull':
continue
for task in self._taskqueue_stub.GetTasks(queue['name']):
# Remove 2 seconds for jitter.
eta = task['eta_usec'] / 1e6 - 2
if eta >= time.time():
continue
self.assertEqual('POST', task['method'])
logging.info('Task: %s', task['url'])
self._post_task(task, **kwargs)
self._taskqueue_stub.DeleteTask(queue['name'], task['name'])
ran += 1
if not ran:
return ran_total
ran_total += ran
def execute_task(self, url, queue_name, payload):
"""Executes a specified task.
Raise error if the task isn't in the queue.
"""
task = self._find_task(url, queue_name, payload)
expected = {'url': url, 'queue_name': queue_name, 'payload': payload}
if not task:
raise AssertionError("Task is not enqueued. expected: %r" % expected)
self._post_task(task)
def _post_task(self, task, **kwargs):
# Not 100% sure why the Content-Length hack is needed, nor why the
# stub returns unicode values that break webtest's assertions.
body = base64.b64decode(task['body'])
headers = {k: str(v) for k, v in task['headers']}
headers['Content-Length'] = str(len(body))
try:
self.app.post(task['url'], body, headers=headers, **kwargs)
except:
logging.error(task)
raise
def _find_task(self, url, queue_name, payload):
for t in self._taskqueue_stub.GetTasks(queue_name):
if t['url'] != url:
continue
if t['queue_name'] != queue_name:
continue
if base64.b64decode(t['body']) != payload:
continue
return t
return None
class Endpoints(object):
"""Handles endpoints API calls."""
def __init__(self, api_service_cls, regex=None, source_ip='127.0.0.1'):
super(Endpoints, self).__init__()
self._api_service_cls = api_service_cls
kwargs = {}
if regex:
kwargs['regex'] = regex
self._api_app = webtest.TestApp(
endpoints_webapp2.api_server([self._api_service_cls], **kwargs),
extra_environ={'REMOTE_ADDR': source_ip})
def call_api(self, method, body=None, status=(200, 204)):
"""Calls endpoints API method identified by its name."""
# Because body is a dict and not a ResourceContainer, there's no way to tell
# which parameters belong in the URL and which belong in the body when the
# HTTP method supports both. However there's no harm in supplying parameters
# in both the URL and the body since ResourceContainers don't allow the same
# parameter name to be used in both places. Supplying parameters in both
# places produces no ambiguity and extraneous parameters are safely ignored.
assert hasattr(self._api_service_cls, method), method
info = getattr(self._api_service_cls, method).method_info
path = info.get_path(self._api_service_cls.api_info)
# Identify which arg
|
wbsavage/shinken
|
shinken/modules/syslog_broker.py
|
Python
|
agpl-3.0
| 1,841
| 0.001086
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2012:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of t
|
he License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <ht
|
tp://www.gnu.org/licenses/>.
# This Class is a plugin for the Shinken Broker. It is in charge
# to brok log into the syslog
import syslog
from shinken.basemodule import BaseModule
from shinken.log import logger
properties = {
'daemons': ['broker'],
'type': 'syslog',
'external': False,
'phases': ['running'],
}
# called by the plugin manager to get a broker
def get_instance(plugin):
logger.info("Get a Syslog broker for plugin %s" % plugin.get_name())
#Catch errors
#path = plugin.path
instance = Syslog_broker(plugin)
return instance
# Class for the Merlindb Broker
# Get broks and puts them in merlin database
class Syslog_broker(BaseModule):
def __init__(self, modconf):
BaseModule.__init__(self, modconf)
# A service check have just arrived, we UPDATE data info with this
def manage_log_brok(self, b):
data = b.data
syslog.syslog(data['log'].encode('UTF-8'))
|
mbi/django-simple-captcha
|
setup.py
|
Python
|
mit
| 2,125
| 0.000471
|
import sys
from captcha import get_version as get_captcha_version
from setuptools import find_packages, setup
from setuptools.command.test import test as test_command
class Tox(test_command):
user_options = [("tox-args=", "a", "Arguments to pass to tox")]
def initialize_options(self):
test_command.initialize_options(self)
self.tox_args = None
def finalize_options(self):
test_command.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import tox
import shlex
args = self.tox_args
if args:
args = shlex.split(self.tox_args)
errno = tox.cmdline(args=args)
sys.exit(errno)
install_requires = ["Django >= 2.2", "Pillow >=6.2.0", "django-ranged-response == 0.2.0"]
EXTRAS_REQUIRE = {"test": ("testfixtures",)}
with open("README.rst") as readme:
long_description = readme.read()
setup(
name="django-simple-captcha",
version=get_captcha_version(),
description="A very simple, yet powerful, Django captcha application",
long_description=long_description,
author="Marco Bonetti",
author_email="mbonetti@gmail.com",
url="https://github.com/mbi/django-simple-captcha",
license="MIT",
packages=find_packages(exclude=["testproject", "testproject.*"]),
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT Lice
|
nse",
"Operating System :: OS Independent",
"
|
Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Security",
"Topic :: Internet :: WWW/HTTP",
"Framework :: Django",
],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
extras_require=EXTRAS_REQUIRE,
tests_require=["tox"],
cmdclass={"test": Tox},
)
|
bninja/rump
|
rump/request.py
|
Python
|
isc
| 10,243
| 0.000293
|
import base64
import copy
import inspect
import logging
import re
import urlparse
import pilo
from . import Expression, exp, types
logger = logging.getLogger(__name__)
__all__ = [
'PathMixin',
'String',
'Boolean',
'Integer',
'IPAddress',
'IPNetwork',
'NamedTuple'
'StringHash',
'ArgumentHash',
'HeaderHash',
]
class PathMixin(object):
"""
Mix-in for adding `.path` property to a field.
"""
@property
def path(self):
return self.name
class BooleanMixin(Expression):
"""
Mix-in for adding boolean expression capabilities to a field with type
``rump.type.bool``.
"""
inv = False
def __eq__(self, other):
return exp.FieldEqual(self, other)
def __ne__(self, other):
return exp.FieldNotEqual(self, other)
def __invert__(self):
other = copy.copy(self)
other.inv = not other.inv
return other
# Expression
def __call__(self, request):
value = self.__get__(request)
result = False if value is None else value
if self.inv:
result = not result
return result
def __str__(self):
return '{inv}{op}'.format(
inv='not ' if self.inv else '',
op=self.name,
)
def compile(self, symbols):
field_key = symbols.field(self)
return '{inv}request[{field}]'.format(
inv='not ' if self.inv else '',
field=field_key,
)
class Boolean(BooleanMixin, pilo.fields.Boolean, exp.UnaryOp, PathMixin):
type = bool
class BooleanSubField(BooleanMixin, exp.SubField):
type = bool
class StringMixin(object):
"""
Mix-in for adding string expression capabilities to a field with type
``rump.type.str``.
"""
def __eq__(self, other):
return exp.FieldEqual(self, other)
def __ne__(self, other):
return exp.FieldNotEqual(self, other)
def contains(self, item):
return exp.FieldContains(self, item)
def in_(self, others):
return exp.FieldIn(self, others)
def match(self, pattern, ignore_case=False):
flags = 0
if ignore_case:
flags |= re.I
pattern_re = re.compile(pattern, flags)
return exp.FieldMatch(self, pattern_re)
def startswith(self, prefix):
return exp.FieldStartswith(self, prefix)
def endswith(self, suffix):
return exp.FieldEndswith(self, suffix)
class String(pilo.fields.String, PathMixin, StringMixin):
type = str
class StringSubField(exp.SubField, StringMixin):
type = str
class IntegerMixin(object):
"""
Mix-in for adding integer expression capabilities to a field with type
``rump.type.int``.
"""
def __eq__(self, other):
return exp.FieldEqual(self, other)
def __ne__(self, other):
return exp.FieldNotEqual(self, other)
def __lt__(self, other):
return exp.FieldLessThan(self, other)
def __le__(self, other):
return exp.FieldLessThanEqual(self, other)
def __gt__(self, other):
return exp.FieldGreaterThan(self, other)
def __ge__(self, other):
return exp.FieldGreaterThanEqual(self, other)
def in_(self, others):
return exp.FieldIn(self, others)
class Integer(pilo.fields.Integer, PathMixin, IntegerMixin):
type = int
class NamedTuple(pilo.Field, PathMixin):
type = pilo.NOT_SET
def __init__(self, *args, **kwargs):
self.type = kwargs.pop('type', pilo.NOT_SET)
if self.type is pilo.NOT_SET:
args, self.type = pilo.fields.pluck(args, lambda arg: (
inspect.isclass(arg) and issubclass(arg, types.NamedTuple)
))
if self.type is pilo.NOT_SET:
raise TypeError('Missing type=NamedTuple')
super(NamedTuple, self).__init__(*args, **kwargs)
def __getattr__(self, name):
field = getattr(self.type, name, None)
if field is None:
raise AttributeError(
'{0}.{1} does not exist'.format(self.type, name)
)
if not hasattr(field, 'type'):
raise AttributeError(
'{0}.{1}.type does not exist'.format(self.type, name)
)
if issubclass(field.type, str):
sub_field_type = StringSubField
elif issubclass(field.type, bool):
sub_field_type = BooleanSubField
else:
raise TypeError(
'{0}.{1}.type={2} is not supported, must be on of {3}'
.format(self.type, name, field.type, [str, bool])
)
sub_field = sub_field_type(self, name)
setattr(self, name, sub_field)
return sub_field
class StringHash(pilo.Field, PathMixin):
type = types.StringHash
def __getattr__(self, name):
return StringSubField(self, name)
def contains(self, item):
return exp.FieldContains(self, item)
class ArgumentHash(pilo.Field, PathMixin):
type = types.ArgumentHash
def __getattr__(self, name):
return StringSubField(self, name)
def contains(self, item):
return exp.FieldContains(self, item)
class IPAddress(pilo.Field, PathMixin):
type = types.IPAddress
def __eq__(self, other):
return exp.FieldEqual(self, other)
def __ne__(self, other):
return exp.FieldNotEqual(self, other)
def in_(self, others):
return exp.FieldIn(self, others)
class Object(pilo.Field, PathMixin):
type = object
class HeaderHash(pilo.fields.Group, PathMixin):
type = types.HeaderHash
def __init__(self, *args, **kwargs):
super(HeaderHash, self).__init__(
(re.compile('HTTP\_(.+)'), String()), *args, **kwargs
)
def _
|
munge(self, value):
return dict(
|
(match.group(0).lower(), value)
for _, match, value in super(HeaderHash, self)._munge(value)
)
def __getattr__(self, name):
return StringSubField(self, name)
def contains(self, item):
return exp.FieldContains(self, item)
class BasicAuthorization(types.NamedTuple):
username = String()
password = String()
class Request(pilo.Form):
"""
Defines a request schema as collections of fields:
- ``rump.request.String``
- ``rump.request.Integer`
- ``rump.request.NamedTuple``
- ...
all of which parse or compute values that one of these ``rump.types``. If
you need to add custom fields just:
.. code:: python
import rump
class MyRequest(rump.Request)
x_sauce = rump.request.String('HTTP_X_SAUCE', default='blue')
env = rump.request.String()
@env.compute
def env(self)
if not self.authorized or not self.password:
return 'public'
return self.password.split('-')[0]
which can then be used in matching expressions:
.. code:: python
print rump._and(MyRequest.x_sauce.in_(['mayo', 'ketchup']), MyRequest.env != 'open')
"""
def __init__(self, environ, router=None):
"""
:param environ: The WSGI environment for the request. This will be
wrapped and stored as `src`.
:param router: Optional `Router` examining this request. This can be
useful when fields uses `Router` information when
computing a value.
"""
super(Request, self).__init__()
self.src = pilo.source.DefaultSource(environ)
self.router = router
def context(self, symbols):
"""
Creates a context for this request to be used when evaluating a
**compiled** rule.
:param symbols: An instance of `exp.Symbols`.
:return: The `exp.Context` for this request.
"""
return exp.Context(self, symbols)
method = String('REQUEST_METHOD')
path = String('PATH_INFO')
query_string = String('QUERY_STRING')
query = ArgumentHash()
@query.compute
def query(self):
if self.query_string:
query = dict(
(k
|
Microvellum/Fluid-Designer
|
win64-vc/2.78/scripts/templates_py/operator_node.py
|
Python
|
gpl-3.0
| 1,429
| 0
|
import bpy
def main(operator, context):
space = context.space_data
node_tree = space.node_tree
node_active = context.active_node
node_selected = context.selected_nodes
# now we have the context, pe
|
rform a simple operation
if node_active in node_selected:
node_selected.remove(node_active)
if len(node_selected)
|
!= 1:
operator.report({'ERROR'}, "2 nodes must be selected")
return
node_other, = node_selected
# now we have 2 nodes to operate on
if not node_active.inputs:
operator.report({'ERROR'}, "Active node has no inputs")
return
if not node_other.outputs:
operator.report({'ERROR'}, "Selected node has no outputs")
return
socket_in = node_active.inputs[0]
socket_out = node_other.outputs[0]
# add a link between the two nodes
node_link = node_tree.links.new(socket_in, socket_out)
class NodeOperator(bpy.types.Operator):
"""Tooltip"""
bl_idname = "node.simple_operator"
bl_label = "Simple Node Operator"
@classmethod
def poll(cls, context):
space = context.space_data
return space.type == 'NODE_EDITOR'
def execute(self, context):
main(self, context)
return {'FINISHED'}
def register():
bpy.utils.register_class(NodeOperator)
def unregister():
bpy.utils.unregister_class(NodeOperator)
if __name__ == "__main__":
register()
|
acesonl/remotecare
|
remotecare/apps/questionnaire/rheumatism/__init__.py
|
Python
|
gpl-3.0
| 114
| 0
|
# -*- coding: utf-8 -*-
""
|
"
This package contains all the forms and models
for the rheumatism questionnaires.
""
|
"
|
Tilo15/PhotoFiddle2
|
PF2/Tools/HueEqualiser.py
|
Python
|
gpl-3.0
| 5,526
| 0.005972
|
import cv2
import numpy
import Tool
class HueEqualiser(Tool.Tool):
def on_init(self):
self.id = "hueequaliser"
self.name = "Hue Equaliser"
self.icon_path = "ui/PF2_Icons/HueEqualiser.png"
self.properties = [
Tool.Property("header", "Hue Equaliser", "Header", None, has_toggle=False, has_button=False),
Tool.Property("bleed", "Hue Bleed", "Slider", 0.5, max=2.0, min=0.01),
Tool.Property("neighbour_bleed", "Neighbour Bleed",
|
"Slider", 0.25, max=2.0, min=0.0),
# Red
Tool.Property("header_red", "Red", "Header", None, has_toggle=False, has_button=False),
|
Tool.Property("red_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("red_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Yellow
Tool.Property("header_yellow", "Yellow", "Header", None, has_toggle=False, has_button=False),
Tool.Property("yellow_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("yellow_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Green
Tool.Property("header_green", "Green", "Header", None, has_toggle=False, has_button=False),
Tool.Property("green_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("green_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Cyan
Tool.Property("header_cyan", "Cyan", "Header", None, has_toggle=False, has_button=False),
Tool.Property("cyan_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("cyan_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Blue
Tool.Property("header_blue", "Blue", "Header", None, has_toggle=False, has_button=False),
Tool.Property("blue_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("blue_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Violet
Tool.Property("header_violet", "Violet", "Header", None, has_toggle=False, has_button=False),
Tool.Property("violet_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("violet_saturation", "Saturation", "Slider", 0, max=50, min=-50),
]
def on_update(self, image):
hues = {
"red": 0,
"yellow": 60,
"green": 120,
"cyan": 180,
"blue": 240,
"violet": 300,
"_red": 360,
}
out = image
if(not self.is_default()):
bleed = self.props["bleed"].get_value()
neighbour_bleed = self.props["neighbour_bleed"].get_value()
out = out.astype(numpy.float32)
# Convert to HSV colorspace
out = cv2.cvtColor(out, cv2.COLOR_BGR2HSV)
# Bits per pixel
bpp = float(str(image.dtype).replace("uint", "").replace("float", ""))
# Pixel value range
np = float(2 ** bpp - 1)
imhue = out[0:, 0:, 0]
imsat = out[0:, 0:, 1]
imval = out[0:, 0:, 2]
for hue in hues:
hsat = self.props["%s_saturation" % hue.replace('_', '')].get_value()
hval = self.props["%s_value" % hue.replace('_', '')].get_value()
isHue = self._is_hue(imhue, hues[hue], (3.5/bleed))
isHue = self._neighbour_bleed(isHue, neighbour_bleed)
imsat = imsat + ((hsat / 10000) * 255) * isHue
imval = imval + ((hval / 1000) * np) * isHue
# Clip any values out of bounds
imval[imval < 0.0] = 0.0
imval[imval > np] = np
imsat[imsat < 0.0] = 0.0
imsat[imsat > 1.0] = 1.0
out[0:, 0:, 1] = imsat
out[0:, 0:, 2] = imval
# Convert back to BGR colorspace
out = cv2.cvtColor(out, cv2.COLOR_HSV2BGR)
out = out.astype(image.dtype)
return out
def _is_hue(self, image, hue_value, bleed_value = 3.5):
mif = hue_value - 30
mir = hue_value + 30
if (mir > 360):
mir = 360
if (mif < 0):
mif = 0
bleed = float(360 / bleed_value)
icopy = image.copy()
print(bleed, mif, mir)
if(mif != 0):
icopy[icopy < mif - bleed] = 0.0
icopy[icopy > mir + bleed] = 0.0
icopy[(icopy < mif) * (icopy != 0.0)] = (((mif - (icopy[(icopy < mif) * (icopy != 0.0)]))/360.0) / (bleed/360.0)) * -1 + 1
icopy[(icopy > mir) * (icopy != 0.0)] = ((((icopy[(icopy > mir) * (icopy != 0.0)]) - mir)/360.0) / (bleed/360.0)) * -1 + 1
icopy[(icopy >= mif) * (icopy <= mir)] = 1.0
if(mif == 0):
icopy[icopy > mir + bleed] = 0.0
icopy[(icopy > mir) * (icopy != 0.0)] = ((((icopy[(icopy > mir) * (icopy != 0.0)]) - mir) / 360.0) / (bleed/360.0)) * -1 + 1
return icopy
def _neighbour_bleed(self, map, bleed):
strength = bleed*30
if (strength > 0):
height, width = map.shape[:2]
size = (height * width)
mul = numpy.math.sqrt(size) / 1064.416 # numpy.math.sqrt(1132982.0)
map = map*255
blur_size = abs(2 * round((round(strength * mul) + 1) / 2) - 1)
im = cv2.blur(map, (int(blur_size), int(blur_size)))
return im/255.0
return map
|
HERA-Team/hera_mc
|
scripts/mc_listen_to_corr_logger.py
|
Python
|
bsd-2-clause
| 3,281
| 0.000914
|
#! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2018 the HERA Collaboration
# Licensed under the 2-clause BSD license.
"""Gather correlator log outputs and log them into M&C."""
import sys
import json
import redis
import socket
import logging
from astropy.time import Time
from hera_mc import mc
from hera_mc.correlator import DEFAULT_REDIS_ADDRESS
allowed_levels = ["DEBUG", "INFO", "NOTIFY", "WARNING", "ERROR", "CRITICAL"]
logging.addLevelName(logging.INFO + 1, "NOTIFY")
parser = mc.get_mc_argument_parser()
parser.add_argument(
"--redishost",
"-r",
dest="redishost",
default=DEFAULT_REDIS_ADDRESS,
help="The redis db hostname",
)
parser.add_argument(
"--channel",
"-c",
dest="channel",
default="mc-log-channel",
help="The redis channel to listen on.",
)
parser.add_argument(
"-l",
dest="level",
type=str,
default="NOTIFY",
help=(
"Don't log messages below this level. "
"Allowed values are {vals:}".format(vals=allowed_levels)
),
choices=allowed_levels,
)
args = parser.parse_args()
db = mc.connect_to_mc_db(args)
hostname = socket.gethostname()
redis_pool = redis.ConnectionPool(host=args.redishost)
level = logging.getLevelName(args.level)
while True:
try:
with db.sessionmaker() as session, redis.Redis(
connection_pool=redis_pool
) as redis_db:
pubsub = redis_db.pubsub()
pubsub.ignore_subscribe_messages = True
pubsub.subscribe(args.channel)
# pubsub.listen() will create an infinite generator
# that yields messages in our channel
for message in pubsub.listen():
if (
message["data"].decode()
!= "UnicodeDecodeError on emit!"
# messages come as byte strings, make sure an error didn't occur
):
message_dict = json.loads(message["data"])
msg_level = message_dict["levelno"]
if msg_level >= level:
session.add_subsystem_error(
Time(message_dict["logtime"], format="unix"),
|
message_dict["subsystem"],
|
message_dict["severity"],
message_dict["message"],
)
session.add_daemon_status(
"mc_listen_to_corr_logger", hostname, Time.now(), "good"
)
session.commit()
except KeyboardInterrupt:
sys.exit()
except Exception as e:
# some common exceptions are this Nonetype being yielded by the iterator
# and a forcible connection closure by the server.
# Ignore for now and re-attach to the pubsub channel
if not any(
str(e).startswith(err)
for err in [
"'NoneType' object has no attribute 'readline'",
"Connection closed by server.",
]
):
print(e)
session.add_daemon_status(
"mc_listen_to_corr_logger", hostname, Time.now(), "errored"
)
session.commit()
continue
|
shivam1111/python-fedex
|
fedex/services/rate_service.py
|
Python
|
bsd-3-clause
| 5,261
| 0.004752
|
"""
Rate Service Module
===================
This package contains classes to request pre-ship rating information and to
determine estimated or courtesy billing quotes. Time in Transit can be
returned with the rates if it is specified in the request.
"""
from datetime import datetime
from .. base_service import FedexBaseService
class FedexRateServiceRequest(FedexBaseService):
"""
This class allows you to get the shipping charges for a particular address.
You will need to populate the data structures in self.RequestedShipment,
then send the request.
"""
def __init__(self, config_obj, *args, **kwargs):
"""
The optional keyword args detailed on L{FedexBaseService}
apply here as well.
@type config_obj: L{FedexConfig}
@param config_obj: A valid FedexConfig object.
"""
self._config_obj = config_obj
# Holds version info for the VersionId SOAP object.
self._version_info = {'service_id': 'crs', 'major': '16',
'intermediate': '0', 'minor': '0'}
self.RequestedShipment = None
"""@ivar: Holds the RequestedShipment WSDL object."""
# Call the parent FedexBaseService class for basic setup work.
super(FedexRateServiceRequest, self).__init__(self._config_obj,
'RateService_v16.wsdl',
*args, **kwargs)
self.ClientDetail.Region = config_obj.express_region_code
def _prepare_wsdl_objects(self):
"""
This is the data that will be used to create your shipment. Create
the data structure and get it ready for the WSDL request.
"
|
""
# Default behavior is to not request transit information
self.ReturnTransitAndCommit = False
# This is the primary data structure for processShipment requests.
self.RequestedShipment = self.client.factory.create('RequestedShipment')
self.RequestedShipment.ShipTimestamp = datetime.now()
TotalWeight = self.client.factory.create('Weight
|
')
# Start at nothing.
TotalWeight.Value = 0.0
# Default to pounds.
TotalWeight.Units = 'LB'
# This is the total weight of the entire shipment. Shipments may
# contain more than one package.
self.RequestedShipment.TotalWeight = TotalWeight
# This is the top level data structure for Shipper information.
ShipperParty = self.client.factory.create('Party')
ShipperParty.Address = self.client.factory.create('Address')
ShipperParty.Contact = self.client.factory.create('Contact')
# Link the ShipperParty to our master data structure.
self.RequestedShipment.Shipper = ShipperParty
# This is the top level data structure for Recipient information.
RecipientParty = self.client.factory.create('Party')
RecipientParty.Contact = self.client.factory.create('Contact')
RecipientParty.Address = self.client.factory.create('Address')
# Link the RecipientParty object to our master data structure.
self.RequestedShipment.Recipient = RecipientParty
Payor = self.client.factory.create('Payor')
# Grab the account number from the FedexConfig object by default.
Payor.AccountNumber = self._config_obj.account_number
# Assume US.
Payor.CountryCode = 'US'
# Start with no packages, user must add them.
self.RequestedShipment.PackageCount = 0
self.RequestedShipment.RequestedPackageLineItems = []
# This is good to review if you'd like to see what the data structure
# looks like.
self.logger.debug(self.RequestedShipment)
def _assemble_and_send_request(self):
"""
Fires off the Fedex request.
@warning: NEVER CALL THIS METHOD DIRECTLY. CALL send_request(),
WHICH RESIDES ON FedexBaseService AND IS INHERITED.
"""
# Fire off the query.
return self.client.service.getRates(
WebAuthenticationDetail=self.WebAuthenticationDetail,
ClientDetail=self.ClientDetail,
TransactionDetail=self.TransactionDetail,
Version=self.VersionId,
RequestedShipment=self.RequestedShipment,
ReturnTransitAndCommit=self.ReturnTransitAndCommit)
def add_package(self, package_item):
"""
Adds a package to the ship request.
@type package_item: WSDL object, type of RequestedPackageLineItem
WSDL object.
@keyword package_item: A RequestedPackageLineItem, created by
calling create_wsdl_object_of_type('RequestedPackageLineItem') on
this ShipmentRequest object. See examples/create_shipment.py for
more details.
"""
self.RequestedShipment.RequestedPackageLineItems.append(package_item)
package_weight = package_item.Weight.Value
self.RequestedShipment.TotalWeight.Value += package_weight
self.RequestedShipment.PackageCount += 1
|
p5py/p5
|
p5/pmath/tests/test_utils.py
|
Python
|
gpl-3.0
| 874
| 0
|
import unittest
from p5.pmath.utils import (
constrain,
lerp,
remap,
normalize,
magnitude,
distance,
sq)
class TestUtils(unittest.TestCase):
def test_constrain(self):
self.assertEqual(constrain(5, 0, 10), 5)
self.assertEqual(constrain(-10, 0, 10), 0)
self.assertEqual(constrain(20, 0, 10), 10)
def test_lerp(self):
self.assertEqual(lerp(0, 100, 0.5), 50)
def test_remap(self):
self.assertEqual(remap(50, (0, 100), (0, 10)), 5.0)
def test_normalize(self):
self.assertEqual(normalize(50, 0, 100), 0.
|
5)
def test_magnitude(self):
self.assertEqual(magnitude(3, 4), 5)
def test_distance(self):
self.assertEqual(distance((0, 0, 0), (2, 3, 6)), 7)
def test_sq(self):
|
self.assertEqual(sq(4), 16)
if __name__ == "__main__":
unittest.main()
|
BGCECSE2015/CADO
|
PYTHON/NURBSReconstruction/DooSabin/DualCont_toABC_simple.py
|
Python
|
bsd-3-clause
| 4,309
| 0.02019
|
__author__ = 'erik'
import numpy as np
from PetersScheme.Edge import Edge
from PetersScheme.Quad import Quad
from PetersScheme.Vertex import Vertex
def getABsC_ind(quadIndex, indVertex, indOtherVertex, regularPoints):
'''
:param _quad:
:param indVertex:
:param indOtherVertex:
:param regularPoints:
:return:
'''
assert isinstance(quadIndex,int)
assert isinstance(indVertex,int)
assert isinstance(indOtherVertex,int)
assert isinstance(regularPoints,np.ndarray)
# as
|
suming 16 vertices per row of regularPoints
# listed l
|
ike
'''
4-------------------2
| 12 13 14 15 |
| 8 9 10 11 |
| 4 5 6 7 |
| 0 1 2 3 |
0-------------------1
'''
# points in order A, B1, B2, C
# B1 is the one closest to the edge
clockwiseQuadrantIndices = np.array([[5,4,1,0],\
[6,2,7,3],\
[10,11,14,15],\
[9,13,8,12]], dtype=int)
counterclockwiseQuadrantIndices = np.array([[5,1,4,0],\
[6,7,2,3],\
[10,14,11,15],\
[9,8,13,12]], dtype=int)
if indOtherVertex == (indVertex - 1)%4: #clockwise
return regularPoints[quadIndex,clockwiseQuadrantIndices[indVertex, :]]
else:
return regularPoints[quadIndex,counterclockwiseQuadrantIndices[indVertex, :]]
def getABsC(_quad, _edge, _vertex, regularPoints):
"""
:param:_quad: Quad
:param:_edge: Edge
:param:_vertex: Vertex
:param:regularPoints: numpy.ndarray
:return: A, B1, B2, C
:rtype: numpy.ndarray([int, int, int, int])
"""
assert isinstance(regularPoints, np.ndarray)
assert isinstance(_quad, Quad)
assert isinstance(_edge, Edge)
assert isinstance(_vertex, Vertex)
vertex_inquad_index = _quad.get_vertices().index(_vertex)
neighbour_vertex = _edge.get_other_vertex(_vertex)
other_inquad_index = _quad.get_vertices().index(neighbour_vertex)
return getABsC_ind(_quad.get_id(), vertex_inquad_index, other_inquad_index, regularPoints)
def dualCont_to_ABC_simpl(quad_objs, vert_objs):
num_quads = quad_objs.__len__()
num_verts = len(vert_objs)
points_per_quad = 16
As = np.full((num_verts,7,2),-1,dtype=int)
B1s = np.full((num_verts,7,4),-1,dtype=int)
B2s = np.full((num_verts,7,4),-1,dtype=int)
Cs = np.full((num_verts,7,2),-1,dtype=int)
regularPoints = np.arange(num_quads*points_per_quad, dtype=int).reshape((num_quads, points_per_quad))
for vertex in vert_objs:
vert_id = int(vertex.get_id())
# print "Vert. id: %d, Vert. number: %d" % (vertex.get_id(), vert_id)
number_of_quads = vertex.number_quads()
assert number_of_quads > 2, "Found course mesh vertex %d with 2 or less quads, probably manifold" % vert_id
one_edge = next(iter(vertex.get_edges()))
one_quad = next(iter(one_edge.get_quads()))
vertex_inquad_index = one_quad.get_vertices().index(vertex)
for quadIndex in range(number_of_quads):
# save the vertex ids on the edges closest to B1 and B2
B1s[vert_id, quadIndex, 3] = B2s[vert_id, quadIndex, 3] = vert_id
#first save B2 edge, then switch, then save B1 edge
B2s[vert_id, quadIndex, 2] = one_edge.get_other_vertex(vertex).get_id()
one_edge = one_quad.get_other_edge_sharing_vertex(one_edge, vertex)
B1s[vert_id, quadIndex, 2] = one_edge.get_other_vertex(vertex).get_id()
#get the ABC IDs of the vertex points
As[vert_id,quadIndex,0],\
B1s[vert_id,quadIndex,0],\
B2s[vert_id,quadIndex,0],\
Cs[vert_id,quadIndex,0] = getABsC(one_quad,one_edge, vertex, regularPoints)
#save the quad ID
As[vert_id,quadIndex,1] =\
B1s[vert_id,quadIndex,1] =\
B2s[vert_id,quadIndex,1] =\
Cs[vert_id,quadIndex,1] = int(one_quad.get_id())
#shift to next quad
one_quad = one_quad.get_neighbour_sharing_edge(one_edge)
return As, B1s, B2s, Cs, regularPoints
|
jordanemedlock/psychtruths
|
temboo/core/Library/Google/ComputeEngine/Zones/ListZones.py
|
Python
|
apache-2.0
| 5,734
| 0.005232
|
# -*- coding: utf-8 -*-
###############################################################################
#
# ListZones
# Retrieves the list of Zone resources for the specified project.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ListZones(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ListZones Choreo. A TembooSession object, containing a valid
s
|
et of Temboo credentials, must be supplied.
"""
super(ListZones, self).__init__(temboo_session
|
, '/Library/Google/ComputeEngine/Zones/ListZones')
def new_input_set(self):
return ListZonesInputSet()
def _make_result_set(self, result, path):
return ListZonesResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ListZonesChoreographyExecution(session, exec_id, path)
class ListZonesInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ListZones
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((optional, string) A valid access token retrieved during the OAuth process. This is required unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new access token.)
"""
super(ListZonesInputSet, self)._set_input('AccessToken', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required unless providing a valid AccessToken.)
"""
super(ListZonesInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required unless providing a valid AccessToken.)
"""
super(ListZonesInputSet, self)._set_input('ClientSecret', value)
def set_Fields(self, value):
"""
Set the value of the Fields input for this Choreo. ((optional, string) Comma-seperated list of fields you want to include in the response.)
"""
super(ListZonesInputSet, self)._set_input('Fields', value)
def set_Filter(self, value):
"""
Set the value of the Filter input for this Choreo. ((optional, string) A filter expression for narrowing results in the form: {field_name} {comparison_string} {literal_string} (e.g. name eq europe-west1-a). Comparison strings can be eq (equals) or ne (not equals).)
"""
super(ListZonesInputSet, self)._set_input('Filter', value)
def set_MaxResults(self, value):
"""
Set the value of the MaxResults input for this Choreo. ((optional, integer) The maximum number of results to return.)
"""
super(ListZonesInputSet, self)._set_input('MaxResults', value)
def set_PageToken(self, value):
"""
Set the value of the PageToken input for this Choreo. ((optional, string) The "nextPageToken" found in the response which is used to page through results.)
"""
super(ListZonesInputSet, self)._set_input('PageToken', value)
def set_Project(self, value):
"""
Set the value of the Project input for this Choreo. ((required, string) The ID of a Google Compute project.)
"""
super(ListZonesInputSet, self)._set_input('Project', value)
def set_RefreshToken(self, value):
"""
Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth refresh token used to generate a new access token when the original token is expired. Required unless providing a valid AccessToken.)
"""
super(ListZonesInputSet, self)._set_input('RefreshToken', value)
class ListZonesResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ListZones Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Google.)
"""
return self._output.get('Response', None)
def get_NewAccessToken(self):
"""
Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.)
"""
return self._output.get('NewAccessToken', None)
class ListZonesChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ListZonesResultSet(response, path)
|
hank5925/automatic-subtitle-transcriptor
|
subgen/subtitle_parsing.py
|
Python
|
mit
| 1,283
| 0.018706
|
import math
import datetime
#from subgen.subtitle import Subtitle
#def time_parse(s):
#hour, minute, second_decimal = [t for t in s.split(':')]
#second, microsec = second_decimal.split('.')
#microsec = microsec.ljust(6, '0')
#return [int(hour), int(minute), int(second), int(microsec)]
def subtitle_parser(sub_file):
res_start = []
res_end = []
res_offset = 0
with open(sub_file, 'r') as f:
inEvents = Fals
|
e
ss = f.readlines()
for s in ss:
if inEvents == True:
sl = s.strip().split(',')
if sl[1] != ' Start':
#rs = time_parse(sl[1])
#re = time_parse(sl[2])
rs = sl[1]
re = sl[2]
res_start.append(rs)
res_end.append(re)
else:
res_offset += len(s)
|
if s.strip() == "[Events]":
inEvents = True
res_offset += len(s)
return res_start, res_end, res_offset
#if __name__ == "__main__":
#import sys
#import os
#if os.path.exists(sys.argv[1]) == True:
#subtitle_parser(sys.argv[1])
#else:
#raise FileNotFoundError(sys.argv[0] + ": " + sys.argv[1] + "not found.")
|
houseurmusic/my-swift
|
test/unit/container/test_updater.py
|
Python
|
apache-2.0
| 8,262
| 0.000484
|
# Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cPickle as pickle
import os
import sys
import unittest
from gzip import GzipFile
from shutil import rmtree
from tempfile import mkdtemp
from eventlet import spawn, TimeoutError, listen
from eventlet.timeout import Timeout
from swift.common import utils
from swift.container import updater as container_updater
from swift.container import server as container_server
from swift.common.db import ContainerBroker
from swift.common.ring import RingData
from swift.common.utils import normalize_timestamp
class TestContainerUpdater(unittest.TestCase):
def setUp(self):
utils.HASH_PATH_SUFFIX = 'endcap'
self.testdir = os.path.join(mkdtemp(), 'tmp_test_container_updater')
rmtree(self.testdir, ignore_errors=1)
os.mkdir(self.testdir)
pickle.dump(RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
[{'id': 0, 'ip': '127.0.0.1', 'port': 12345, 'device': 'sda1',
'zone': 0},
{'id': 1, 'ip': '127.0.0.1', 'port': 12345, 'device': 'sda1',
'zone': 2}], 30),
GzipFile(os.path.join(self.testdir, 'account.ring.gz'), 'wb'))
self.devices_dir = os.path.join(self.testdir, 'devices')
os.mkdir(self.devices_dir)
self.sda1 = os.path.join(self.devices_dir, 'sda1')
os.mkdir(self.sda1)
def tearDown(self):
rmtree(os.path.dirname(self.testdir), ignore_errors=1)
def test_creation(self):
cu = container_updater.ContainerUpdater({
'devices': self.devices_dir,
'mount_check': 'false',
'swift_dir': self.testdir,
'interval': '1',
'concurrency': '2',
'node_timeout': '5',
})
self.assert_(hasattr(cu, 'logger'))
self.assert_(cu.logger is not None)
self.assertEquals(cu.devices, self.devices_dir)
self.assertEquals(cu.interval, 1)
self.assertEquals(cu.concurrency, 2)
self.assertEquals(cu.node_timeout, 5)
self.assert_(cu.get_account_ring() is not None)
def test_run_once(self):
cu = container_updater.ContainerUpdater({
'devices': self.devices_dir,
'mount_check': 'false',
'swift_dir': self.testdir,
'interval': '1',
'concurrency': '1',
'node_timeout': '15',
'account_suppression_time': 0
})
cu.run_once()
containers_dir = os.path.join(self.sda1, container_server.DATADIR)
os.mkdir(containers_dir)
cu.run_once()
self.assert_(os.path.exists(containers_dir))
subdir = os.path.join(containers_dir, 'subdir')
os.mkdir(subdir)
cb = ContainerBroker(os.path.join(subdir, 'hash.db'), account='a',
container='c')
cb.initialize(normalize_timestamp(1))
cu.run_once()
info = cb.get_info()
self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0)
self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0)
cb.put_object('o', normalize_timestamp(2), 3, 'text/plain',
'68b329da9893e34099c7d8ad5cb9c940')
cu.run_once()
info = cb.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 3)
self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0)
def accept(sock, addr, return_code):
try:
with Timeout(3):
inc = sock.makefile('rb')
out = sock.makefile('wb')
out.write('HTTP/1.1 %d OK\r\nContent-Length: 0\r\n\r\n' %
return_code)
out.flush()
self.assertEquals(inc.readline(),
'PUT /sda1/0/a/c HTTP/1.1\r\n')
headers = {}
line = inc.readline()
while line and line != '\r\n':
headers[line.split(':')[0].lower()] = \
line.split(':')[1].strip()
line = inc.readline()
self.assert_('x-put-timestamp' in headers)
self.assert_('x-delete-timestamp' in headers)
self.assert_('x-object-count' in headers)
self.assert_('x-bytes-used' in headers)
except BaseException, err:
import traceback
traceback.print_exc()
return err
return None
bindsock = listen(('127.0.0.1', 0))
def spawn_accepts():
events = []
for _junk in xrange(2):
sock, addr = bindsock.accept()
events.append(spawn(accept, sock, addr, 201))
return events
spawned = spawn(spawn_accepts)
for dev in cu.get_account_ring().devs:
if dev is not None:
dev['port'] = bindsock.getsockname()[1]
cu.run_once()
for event in spawned.wait():
err = event.wait()
if err:
|
raise err
info = cb.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 3)
self.asser
|
tEquals(info['reported_object_count'], 1)
self.assertEquals(info['reported_bytes_used'], 3)
def test_unicode(self):
cu = container_updater.ContainerUpdater({
'devices': self.devices_dir,
'mount_check': 'false',
'swift_dir': self.testdir,
'interval': '1',
'concurrency': '1',
'node_timeout': '15',
})
containers_dir = os.path.join(self.sda1, container_server.DATADIR)
os.mkdir(containers_dir)
subdir = os.path.join(containers_dir, 'subdir')
os.mkdir(subdir)
cb = ContainerBroker(os.path.join(subdir, 'hash.db'), account='a',
container='\xce\xa9')
cb.initialize(normalize_timestamp(1))
cb.put_object('\xce\xa9', normalize_timestamp(2), 3, 'text/plain',
'68b329da9893e34099c7d8ad5cb9c940')
def accept(sock, addr):
try:
with Timeout(3):
inc = sock.makefile('rb')
out = sock.makefile('wb')
out.write('HTTP/1.1 201 OK\r\nContent-Length: 0\r\n\r\n')
out.flush()
inc.read()
except BaseException, err:
import traceback
traceback.print_exc()
return err
return None
bindsock = listen(('127.0.0.1', 0))
def spawn_accepts():
events = []
for _junk in xrange(2):
with Timeout(3):
sock, addr = bindsock.accept()
events.append(spawn(accept, sock, addr))
return events
spawned = spawn(spawn_accepts)
for dev in cu.get_account_ring().devs:
if dev is not None:
dev['port'] = bindsock.getsockname()[1]
cu.run_once()
for event in spawned.wait():
err = event.wait()
if err:
raise err
info = cb.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 3)
self.assertEquals(info['reported_object_count'], 1)
self.assertEquals
|
Patreon/cartographer
|
cartographer/exceptions/request_exceptions.py
|
Python
|
apache-2.0
| 2,455
| 0.001222
|
AUTHENTICATION_REQUIRED_401 = 401
NOT_AUTHORIZED_403 = 403
INVALID_REQUEST_400 = 400
REQUEST_ENTITY_TOO_LARGE_413 = 413
NOT_FOUND_404 = 404
INTERNAL_SERVER_ERROR_500 = 500
RESOURCE_CONFLICT_409 = 409
PAYMENT_REQUIRED_402 = 402
HEADER_PRECONDITIONS_FAILED = 412
LOCKED_423 = 423
TOO_MANY_REQUESTS_429 = 429
class JSONAPIException(Exception):
status_code = INTERNAL_SERVER_ERROR_500
error_title = "Internal Error."
error_type_ = None
error_description = None
@property
def error_type(self):
return self.error_type_ or type(self).__name__
def __init__(self, status_code=None, error_type=None, error_title=None,
error_description=None):
self.status_code = status_code or self.status_code
self.error_title = error_title or self.error_title
self.error_description = error_description or self.error_description or self.error_title
self.error_type_ = error_type
class ParameterMissing(JSONAPIException):
status_code = INVALID_REQUEST_400
parameter_name = None
def __init__(self, parameter_name=None):
self.error_title = \
"Parameter '{}'
|
is missing.".format(parameter_name or self.parameter_name)
class ParameterInvalid(JSONAPIException):
status_code = INVALID_REQUEST_400
def __init__(self, parameter_name, parameter_value
|
):
self.error_title = \
"Invalid value for parameter '{}'.".format(parameter_name)
self.error_description = \
"Invalid parameter for '{0}': {1}.".format(parameter_name, parameter_value)
class BadPageCountParameter(ParameterInvalid):
def __init__(self, parameter_value):
super().__init__(parameter_name='page[count]',
parameter_value=parameter_value)
self.error_description = "Page sizes must be integers."
class BadPageCursorParameter(ParameterInvalid):
def __init__(self, parameter_value):
super().__init__(parameter_name='page[cursor]',
parameter_value=parameter_value)
self.error_description = "Provided cursor was not parsable."
class BadPageOffsetParameter(ParameterInvalid):
def __init__(self, parameter_value):
super().__init__(parameter_name='page[offset]',
parameter_value=parameter_value)
self.error_description = "Page offsets must be integers."
class DataMissing(ParameterMissing):
parameter_name = 'data'
|
shapiromatron/tblBuilder
|
src/private/scripts/ftpScraper.py
|
Python
|
mit
| 3,966
| 0.000756
|
from io import BytesIO
import json
import os
import urllib.parse
import six
import sys
from ftptool import FTPHost
import xlsxwriter
# from https://docs.djangoproject.com/en/1.10/_modules/django/utils/encoding/
def smart_text(s, encoding="utf-8", strings_only=False, errors="strict"):
"""
Returns a text object representing 's' -- unicode on Python 2 and str on
Python 3. Treats bytestrings using the 'encoding' codec.
If strings_only is True, don't convert (some) non-string-like objects.
"""
return force_text(s, encoding, strings_only, errors)
# from https://docs.djangoproject.com/en/1.10/_modules/django/utils/encoding/
def force_text(s, encoding="utf-8", strings_only=False, errors="strict"):
"""
Similar to smart_text, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first for performance reasons.
if issubclass(type(s), six.text_type):
return s
try:
if not issubclass(type(s), six.string_types):
if six.PY3:
if isinstance(s, bytes):
s = six.text_type(s, encoding, errors)
else:
s = six.text_type(s)
elif hasattr(s, "__unicode__"):
s = six.text_type(s)
else:
s = six.text_type(bytes(s), encoding, errors)
else:
# Note: We use .decode() here, instead of six.text_type(s, encoding,
# errors), so that if s is a SafeBytes, it ends up being a
# SafeText at the end.
s = s.decode(encoding, errors)
except UnicodeDecodeError as e:
# If we get to here, the caller has passed in an Exception
# subclass populated with non-ASCII bytestring data without a
# working unicode method. Try to handle this without raising a
# further exception by individually forcing the exception args
# to unicode.
s = " ".join(force_text(arg, encoding, strings_only, errors) for arg in s)
return s
def _get_ftp_data(data):
outputs = []
conn = FTPHost.connect(
data["address"], user=data["user"], password=data["password"]
)
for (dirname, subdirs, files) in conn.walk(data.get("path", "/")):
outputs.append((dirname, files))
conn.try_quit()
return outputs
def _get_root_url(data):
return f'ftp://{data["user"]}:{data["password"]}@{data["address"]}'
def _populate_workbook(wb, root_url, data):
ws = wb.add_worksheet()
# write header rows
ws.write(0, 0, "Folder")
ws.write(0, 1, "Filename")
ws.write(0, 2, "URL")
|
parser = urllib.parse.quote
# write data rows
row = 0
for path, files in data:
for fn in files:
row += 1
path_url = parser(
os.path.join(path
|
.decode("utf8"), fn.decode("utf8")).encode("utf8")
)
url = root_url + path_url
ws.write(row, 0, smart_text(path))
ws.write(row, 1, smart_text(fn))
ws.write(row, 2, smart_text(url))
# setup header and autofilter
bold = wb.add_format({"bold": True})
ws.set_row(0, None, bold)
ws.autofilter(f"A1:C{row + 1}")
# set widths
ws.set_column("A:A", 30)
ws.set_column("B:B", 65)
ws.set_column("C:C", 100)
def _generate_xlsx(data):
# create workbook
output = BytesIO()
wb = xlsxwriter.Workbook(output, {"constant_memory": True})
# add stuff to workbook
ftp_data = _get_ftp_data(data)
root_url = _get_root_url(data)
_populate_workbook(wb, root_url, ftp_data)
# return base64 encoded workbook
wb.close()
output.seek(0)
return output.read().encode("base64")
if __name__ == "__main__":
for data in sys.stdin:
b64 = _generate_xlsx(json.loads(data))
print(json.dumps({"xlsx": b64}))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.