code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# This file is part of Shoop.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import decimal
from ._price import TaxfulPrice, TaxlessPrice
from ._priceful_properties import TaxfulFrom, TaxlessFrom
class Priceful(object):
"""
Mixin to define price properties based on other price properties.
You must provide at least
* ``quantity`` (`~decimal.Decimal`)
and both
* ``base_unit_price`` (`~shoop.core.pricing.Price`) and
* ``discount_amount`` (`~shoop.core.pricing.Price`)
or both
* ``price`` (`~shoop.core.pricing.Price`) and
* ``base_price`` (`~shoop.core.pricing.Price`).
You may also provide
* ``tax_amount`` (`~shoop.utils.money.Money`)
to get various tax related properties.
Provided ``base_unit_price``, ``discount_amount``, ``price``,
``base_price``, and ``tax_amount`` must have compatible units
(i.e. same taxness and currency).
Invariants:
* ``price = base_unit_price * quantity - discount_amount``
* ``discount_amount = base_price - price``
* ``discount_rate = 1 - (price / base_price)``
* ``discount_percentage = 100 * discount_rate``
* ``unit_discount_amount = discount_amount / quantity``
* ``taxful_price = taxless_price + tax_amount``
* ``tax_rate = (taxful_price.amount / taxless_price.amount) - 1``
* ``tax_percentage = 100 * tax_rate``
"""
@property
def price(self):
"""
Total price for the specified quantity with discount.
:rtype: shoop.core.pricing.Price
"""
return self.base_unit_price * self.quantity - self.discount_amount
@property
def base_price(self):
"""
Total price for the specified quantity excluding discount.
:rtype: shoop.core.pricing.Price
"""
return self.price + self.discount_amount
@property
def base_unit_price(self):
"""
Undiscounted unit price.
Note: If quantity is 0, will return ``base_price``.
:rtype: shoop.core.pricing.Price
"""
return self.base_price / (self.quantity or 1)
@property
def discount_amount(self):
"""
Amount of discount for the total quantity.
:rtype: shoop.core.pricing.Price
"""
return (self.base_price - self.price)
@property
def discount_rate(self):
"""
Discount rate, 1 meaning totally discounted.
Note: Could be negative, when base price is smaller than
effective price. Could also be greater than 1, when effective
price is negative.
If base price is 0, will return 0.
:rtype: decimal.Decimal
"""
if not self.base_price:
return decimal.Decimal(0)
return 1 - (self.price / self.base_price)
@property
def discount_percentage(self):
"""
Discount percentage, 100 meaning totally discounted.
See `discount_rate`.
:rtype: decimal.Decimal
"""
return self.discount_rate * 100
@property
def is_discounted(self):
"""
Check if there is a discount in effect.
:return: True, iff price < base price.
"""
return (self.price < self.base_price)
@property
def discounted_unit_price(self):
"""
Unit price with discount.
If quantity is 0, will return ``base_unit_price - discount_amount``.
:rtype: shoop.core.pricing.Price
"""
return self.base_unit_price - (self.discount_amount / (self.quantity or 1))
@property
def unit_discount_amount(self):
"""
Discount amount per unit.
If quantity is 0, will return ``discount_amount``.
:rtype: shoop.core.pricing.Price
"""
return self.discount_amount / (self.quantity or 1)
@property
def tax_rate(self):
"""
:rtype: decimal.Decimal
"""
taxless = self.taxless_price
taxful = self.taxful_price
if not taxless.amount:
return decimal.Decimal(0)
return (taxful.amount / taxless.amount) - 1
@property
def tax_percentage(self):
"""
:rtype: decimal.Decimal
"""
return self.tax_rate * 100
@property
def taxful_price(self):
"""
:rtype: TaxfulPrice
"""
price = self.price
if price.includes_tax:
return price
else:
return TaxfulPrice(price.amount + self.tax_amount)
@property
def taxless_price(self):
"""
:rtype: TaxlessPrice
"""
price = self.price
if price.includes_tax:
return TaxlessPrice(price.amount - self.tax_amount)
else:
return price
taxful_base_price = TaxfulFrom('base_price')
taxless_base_price = TaxlessFrom('base_price')
taxful_discount_amount = TaxfulFrom('discount_amount')
taxless_discount_amount = TaxlessFrom('discount_amount')
taxful_base_unit_price = TaxfulFrom('base_unit_price')
taxless_base_unit_price = TaxlessFrom('base_unit_price')
taxful_discounted_unit_price = TaxfulFrom('discounted_unit_price')
taxless_discounted_unit_price = TaxlessFrom('discounted_unit_price')
taxful_unit_discount_amount = TaxfulFrom('unit_discount_amount')
taxless_unit_discount_amount = TaxlessFrom('unit_discount_amount')
|
akx/shoop
|
shoop/core/pricing/_priceful.py
|
Python
|
agpl-3.0
| 5,549
|
# -*- coding: utf-8 -*-
import os
import sys
import cPickle
import numpy as np
from sklearn.cluster import KMeans
from sklearn.cluster.k_means_ import _init_centroids
from sklearn.metrics.pairwise import chi2_kernel
from sklearn.metrics.pairwise import pairwise_distances
from antispoofing.spectralcubes.utils import N_JOBS
class MidLevelFeatures(object):
cs_dict = {"random": 0, "kmeans": 1}
cp_dict = ["hardsum", "hardmax", "softmax"]
sdd_dict = {"unified": 0, "class_based": 1}
seed = 42
debug = True
def __init__(self, meta, input_path, output_path,
codebook_path=None,
codebook_selection="random",
codebook_build="unified",
codebook_size=80,
coding_poling="hardsum",
file_type="npy",
n_jobs=N_JOBS):
# -- private attributes
self.__input_path = ""
self.__output_path = ""
self.__codebook_selection = {}
self.__codebook_build = {}
self.__coding_poling = {}
# -- public attributes
self._meta = meta
self.input_path = input_path
self.output_path = output_path
self.codebook_path = codebook_path
self.codebook_selection = codebook_selection
self.codebook_build = codebook_build
self.coding_poling = coding_poling
self.codebook_size = codebook_size
self.file_type = file_type
self.variance = 0.04
self.n_jobs = n_jobs
if codebook_path is None:
self._fname_codebook_pos = "{0}/codebook/positive_class.codebook".format(self.output_path)
self._fname_codebook_neg = "{0}/codebook/negative_class.codebook".format(self.output_path)
self._fname_codebook_unified = "{0}/codebook/unified.codebook".format(self.output_path)
else:
self._fname_codebook_pos = "{0}/codebook/positive_class.codebook".format(codebook_path)
self._fname_codebook_neg = "{0}/codebook/negative_class.codebook".format(codebook_path)
self._fname_codebook_unified = "{0}/codebook/unified.codebook".format(codebook_path)
@property
def input_path(self):
return self.__input_path
@input_path.setter
def input_path(self, path):
self.__input_path = os.path.abspath(path)
@property
def output_path(self):
return self.__output_path
@output_path.setter
def output_path(self, path):
path = os.path.abspath(path)
self.__output_path = path
@property
def codebook_selection(self):
return self.__codebook_selection
@codebook_selection.setter
def codebook_selection(self, value):
try:
assert value in self.cs_dict
self.__codebook_selection = self.cs_dict[value]
except AssertionError:
raise AssertionError("Value not found: choose 'random' or 'kmeans'")
@property
def coding_poling(self):
return self.__coding_poling
@coding_poling.setter
def coding_poling(self, value):
try:
assert value in self.cp_dict
self.__coding_poling = value
except AssertionError:
raise AssertionError("Value not found: choose 'hardsum', 'hardmax', or 'softmax'")
@property
def codebook_build(self):
return self.__codebook_build
@codebook_build.setter
def codebook_build(self, value):
try:
self.__codebook_build = self.sdd_dict[value]
except KeyError:
raise KeyError("Value not found: choose 'unified' or 'class_based'")
def __load_features(self, fnames):
feats = []
for i, fname in enumerate(fnames):
if 'npy' in self.file_type:
feats += [np.load(fname)]
else:
values = np.loadtxt(fname, delimiter=',')
values = values[:, np.newaxis, :]
feats += [values]
return np.array(feats)
def __load_train_features(self):
if self.debug:
print '\t- loading low level features ...'
sys.stdout.flush()
all_labels = self._meta['all_labels']
all_fnames = self._meta['all_fnames']
train_idxs = self._meta['train_idxs']
return all_labels[train_idxs], self.__load_features(all_fnames[train_idxs])
def __load_all_features(self):
if self.debug:
print '\t- loading low level features ...'
sys.stdout.flush()
all_fnames = self._meta['all_fnames']
all_labels = self._meta['all_labels']
return all_labels, self.__load_features(all_fnames), all_fnames
def create_codebook(self, features, _class='label'):
if self.debug:
print '\t- creating visual codebook for {0} ...'.format(_class)
print '\t- features.shape', features.shape
sys.stdout.flush()
n_feats, n_cuboids, cuboid_depth = features.shape
features = features.reshape(-1, cuboid_depth)
if self.codebook_selection == self.cs_dict["kmeans"]:
codebook = KMeans(init='k-means++', n_clusters=self.codebook_size, n_init=50,
tol=1e-10, max_iter=1000, random_state=self.seed, n_jobs=self.n_jobs)
codebook.fit(features)
return codebook
else:
codebook = KMeans(init='random', n_clusters=self.codebook_size, n_init=1,
tol=1e-10, max_iter=1, random_state=self.seed, n_jobs=self.n_jobs)
codebook.cluster_centers_ = _init_centroids(features, k=self.codebook_size, init='random', random_state=self.seed)
return codebook
@staticmethod
def pickle(fname, data):
try:
os.makedirs(os.path.dirname(fname))
except OSError:
pass
fo = open(fname, 'wb')
cPickle.dump(data, fo)
fo.close()
@staticmethod
def unpickle(fname):
fo = open(fname, 'rb')
data = cPickle.load(fo)
fo.close()
return data
def build_unified_codebook(self, feats):
if not (os.path.exists(self._fname_codebook_unified)):
codebook_unified = self.create_codebook(feats)
self.pickle(self._fname_codebook_unified, codebook_unified)
def build_class_based_codebook(self, labels, feats):
if not (os.path.exists(self._fname_codebook_pos)):
train_idxs_pos = np.where(labels == 1)
feats_train_pos = feats[train_idxs_pos]
codebook_pos = self.create_codebook(feats_train_pos, _class='pos')
self.pickle(self._fname_codebook_pos, codebook_pos)
if not (os.path.exists(self._fname_codebook_neg)):
train_idxs_neg = np.where(labels == 0)
feats_train_neg = feats[train_idxs_neg]
codebook_neg = self.create_codebook(feats_train_neg, _class='neg')
self.pickle(self._fname_codebook_neg, codebook_neg)
def coding_class_based(self, codebook_pos_, codebook_neg_, feats_):
feats = feats_.copy()
codebook_pos = codebook_pos_.copy()
codebook_neg = codebook_neg_.copy()
if self.debug:
print '\t- coding features ...'
sys.stdout.flush()
if 'hard' in self.coding_poling:
print "\t- feats.shape", feats.shape
coded_feats = np.zeros((feats.shape[:2] + (self.codebook_size + self.codebook_size,)), dtype=np.int)
feats = feats.reshape(feats.shape[0], feats.shape[1], -1)
idxs_cuboid = np.arange(feats.shape[1])
codebook_pos -= codebook_pos.min(axis=1).reshape(-1, 1)
codebook_neg -= codebook_neg.min(axis=1).reshape(-1, 1)
for sample in range(feats.shape[0]):
feats[sample] -= feats[sample].min(axis=1).reshape(-1, 1)
dists_pos = pairwise_distances(feats[sample], codebook_pos, metric="cosine")
dists_neg = pairwise_distances(feats[sample], codebook_neg, metric="cosine")
dists = np.hstack((dists_neg, dists_pos))
idxs = np.argmin(dists, axis=1)
coded_feats[sample, idxs_cuboid, idxs] = 1
elif 'soft' in self.coding_poling:
print "\t- feats.shape", feats.shape
coded_feats = np.zeros((feats.shape[:2] + (self.codebook_size + self.codebook_size,)), dtype=np.float)
feats = feats.reshape(feats.shape[0], feats.shape[1], -1)
beta = 1.0 / (2.0 * self.variance)
codebook_pos -= codebook_pos.min(axis=1).reshape(-1, 1)
codebook_neg -= codebook_neg.min(axis=1).reshape(-1, 1)
for sample in range(feats.shape[0]):
feats[sample] -= feats[sample].min(axis=1).reshape(-1, 1)
dists_pos = chi2_kernel(feats[sample], codebook_pos, gamma=beta)
dists_neg = chi2_kernel(feats[sample], codebook_neg, gamma=beta)
cfnorm = dists_pos.sum(axis=1).reshape(-1, 1)
cfnorm[cfnorm == 0] = 1.
dists_pos /= cfnorm
cfnorm = dists_neg.sum(axis=1).reshape(-1, 1)
cfnorm[cfnorm == 0] = 1.
dists_neg /= cfnorm
coded_feats[sample] = np.hstack((dists_neg, dists_pos))
else:
raise ValueError('Coding method not implemented')
return coded_feats
def coding_unified(self, codebook_, feats_):
feats = feats_.copy()
codebook = codebook_.copy()
if self.debug:
print '\t- coding features ...'
sys.stdout.flush()
if 'hard' in self.coding_poling:
coded_feats = np.zeros((feats.shape[:2] + (self.codebook_size,)), dtype=np.int)
feats = feats.reshape(feats.shape[0], feats.shape[1], -1)
idxs_cuboid = np.arange(feats.shape[1])
codebook -= codebook.min(axis=1).reshape(-1, 1)
for sample in range(feats.shape[0]):
feats[sample] -= feats[sample].min(axis=1).reshape(-1, 1)
idxs = np.argmin(pairwise_distances(feats[sample], codebook, metric="cosine"), axis=1)
coded_feats[sample, idxs_cuboid, idxs] = 1
elif 'soft' in self.coding_poling:
coded_feats = np.zeros((feats.shape[:2] + (self.codebook_size,)), dtype=np.float)
beta = 1.0 / (2.0 * self.variance)
codebook -= codebook.min(axis=1).reshape(-1, 1)
for sample in range(feats.shape[0]):
feats[sample] -= feats[sample].min(axis=1).reshape(-1, 1)
coded_feats[sample] = chi2_kernel(feats[sample], codebook, gamma=beta)
cfnorm = coded_feats[sample].sum(axis=1).reshape(-1, 1)
cfnorm[cfnorm == 0] = 1.
coded_feats[sample] /= cfnorm
else:
raise ValueError('Coding method not implemented')
return coded_feats
def pooling(self, coded_feats):
if self.debug:
print '\t- pooling features ...'
sys.stdout.flush()
if 'sum' in self.coding_poling:
pooled_feats = []
for sample in range(coded_feats.shape[0]):
pooled_feats += [coded_feats[sample].sum(axis=0)]
pooled_feats = np.array(pooled_feats)
elif 'max' in self.coding_poling:
pooled_feats = []
for sample in range(coded_feats.shape[0]):
pooled_feats += [coded_feats[sample].max(axis=0)]
pooled_feats = np.array(pooled_feats)
else:
raise ValueError('Pooling method not implemented')
return pooled_feats
def feature_extraction_with_unified_codebook(self, feats):
codebook = self.unpickle(self._fname_codebook_unified)
coded_feats = self.coding_unified(codebook.cluster_centers_, feats)
pooled_feats = self.pooling(coded_feats)
return pooled_feats
def feature_extraction_with_class_based_dictionary(self, feats):
codebook_pos = self.unpickle(self._fname_codebook_pos)
codebook_neg = self.unpickle(self._fname_codebook_neg)
# coded_feats = self.coding_class_based(codebook_pos.cluster_centers_, codebook_neg.cluster_centers_, feats)
coded_feats_neg = self.coding_unified(codebook_neg.cluster_centers_, feats)
coded_feats_pos = self.coding_unified(codebook_pos.cluster_centers_, feats)
coded_feats = np.concatenate((coded_feats_neg, coded_feats_pos), axis=2)
del coded_feats_neg
del coded_feats_pos
pooled_feats = self.pooling(coded_feats)
return pooled_feats
def save_features(self, mid_level_feats, fnames):
print '\t- saving mid level features ...'
sys.stdout.flush()
for fname, feat_vector in zip(fnames, mid_level_feats):
relfname = os.path.os.path.relpath(fname, self.input_path)
output_fname = os.path.join(self.output_path, self.coding_poling, relfname)
# output_fname = output_fname.replace("llf", "mlf")
try:
os.makedirs(os.path.dirname(output_fname))
except OSError:
pass
np.save(output_fname, feat_vector[np.newaxis, :])
def build_codebook(self):
if self.codebook_build == self.sdd_dict["class_based"]:
labels, feats = self.__load_train_features()
self.build_class_based_codebook(labels, feats)
else:
labels, feats = self.__load_train_features()
self.build_unified_codebook(feats)
return True
def run(self):
if self.codebook_build == self.sdd_dict["class_based"]:
all_labels, all_feats, all_fnames = self.__load_all_features()
all_mid_level_feats = self.feature_extraction_with_class_based_dictionary(all_feats)
self.save_features(all_mid_level_feats, all_fnames)
else:
all_labels, all_feats, all_fnames = self.__load_all_features()
all_mid_level_feats = self.feature_extraction_with_unified_codebook(all_feats)
self.save_features(all_mid_level_feats, all_fnames)
return True
|
allansp84/spectralcubes
|
antispoofing/spectralcubes/midlevelfeatures/midlevelfeatures.py
|
Python
|
agpl-3.0
| 14,264
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='smon',
version='0.8',
description='Scrapy monitorization tools',
author='Jordi Burguet-Castell',
author_email='jordi@mp2p.net',
py_modules=['smon_utils'],
license='GNU General Public License, version 3',
scripts=['smon', 'smon-ls', 'smon-add', 'smon-rm'])
|
torrents-com/content
|
scrapy/tools/setup.py
|
Python
|
agpl-3.0
| 358
|
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from . import generate_communication_wizard
|
CompassionCH/compassion-modules
|
thankyou_letters/wizards/__init__.py
|
Python
|
agpl-3.0
| 423
|
import os
activate_this = os.path.join('/home/ckan/.virtualenvs/ckan/bin/activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
from paste.deploy import loadapp
config_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'prod.ini')
from paste.script.util.logging_config import fileConfig
fileConfig(config_filepath)
application = loadapp('config:%s' % config_filepath)
|
DanePubliczneGovPl/ckanext-danepubliczne
|
config/ckan.py
|
Python
|
agpl-3.0
| 474
|
""" Password reset logic and views . """
import logging
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.forms import PasswordResetForm, SetPasswordForm
from django.contrib.auth.hashers import UNUSABLE_PASSWORD_PREFIX
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.contrib.auth.views import INTERNAL_RESET_SESSION_TOKEN, PasswordResetConfirmView
from django.core.exceptions import ObjectDoesNotExist
from django.core.validators import ValidationError
from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseRedirect
from django.template.response import TemplateResponse
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.encoding import force_bytes, force_text
from django.utils.http import base36_to_int, int_to_base36, urlsafe_base64_encode
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
from django.views.decorators.http import require_POST
from edx_ace import ace
from edx_ace.recipient import Recipient
from eventtracking import tracker
from rest_framework.views import APIView
from edxmako.shortcuts import render_to_string
from openedx.core.djangoapps.ace_common.template_context import get_base_template_context
from openedx.core.djangoapps.lang_pref import LANGUAGE_KEY
from openedx.core.djangoapps.oauth_dispatch.api import destroy_oauth_tokens
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.theming.helpers import get_current_request, get_current_site
from openedx.core.djangoapps.user_api import accounts, errors, helpers
from openedx.core.djangoapps.user_api.accounts.utils import is_secondary_email_feature_enabled
from openedx.core.djangoapps.user_api.helpers import FormDescription
from openedx.core.djangoapps.user_api.models import UserRetirementRequest
from openedx.core.djangoapps.user_api.preferences.api import get_user_preference
from openedx.core.djangoapps.user_authn.message_types import PasswordReset
from openedx.core.djangolib.markup import HTML
from student.forms import send_account_recovery_email_for_user
from student.models import AccountRecovery
from util.json_request import JsonResponse
from util.password_policy_validators import normalize_password, validate_password
from util.request_rate_limiter import PasswordResetEmailRateLimiter
SETTING_CHANGE_INITIATED = 'edx.user.settings.change_initiated'
# Maintaining this naming for backwards compatibility.
log = logging.getLogger("edx.student")
AUDIT_LOG = logging.getLogger("audit")
def get_password_reset_form():
"""Return a description of the password reset form.
This decouples clients from the API definition:
if the API decides to modify the form, clients won't need
to be updated.
See `user_api.helpers.FormDescription` for examples
of the JSON-encoded form description.
Returns:
HttpResponse
"""
form_desc = FormDescription("post", reverse("password_change_request"))
# Translators: This label appears above a field on the password reset
# form meant to hold the user's email address.
email_label = _(u"Email")
# Translators: This example email address is used as a placeholder in
# a field on the password reset form meant to hold the user's email address.
email_placeholder = _(u"username@domain.com")
# Translators: These instructions appear on the password reset form,
# immediately below a field meant to hold the user's email address.
# pylint: disable=no-member
email_instructions = _(u"The email address you used to register with {platform_name}").format(
platform_name=configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)
)
form_desc.add_field(
"email",
field_type="email",
label=email_label,
placeholder=email_placeholder,
instructions=email_instructions,
restrictions={
"min_length": accounts.EMAIL_MIN_LENGTH,
"max_length": accounts.EMAIL_MAX_LENGTH,
}
)
return form_desc
def send_password_reset_email_for_user(user, request, preferred_email=None):
"""
Send out a password reset email for the given user.
Arguments:
user (User): Django User object
request (HttpRequest): Django request object
preferred_email (str): Send email to this address if present, otherwise fallback to user's email address.
"""
site = get_current_site()
message_context = get_base_template_context(site)
message_context.update({
'request': request, # Used by google_analytics_tracking_pixel
# TODO: This overrides `platform_name` from `get_base_template_context` to make the tests passes
'platform_name': configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME),
'reset_link': '{protocol}://{site}{link}?track=pwreset'.format(
protocol='https' if request.is_secure() else 'http',
site=configuration_helpers.get_value('SITE_NAME', settings.SITE_NAME),
link=reverse('password_reset_confirm', kwargs={
'uidb36': int_to_base36(user.id),
'token': default_token_generator.make_token(user),
}),
)
})
msg = PasswordReset().personalize(
recipient=Recipient(user.username, preferred_email or user.email),
language=get_user_preference(user, LANGUAGE_KEY),
user_context=message_context,
)
ace.send(msg)
class PasswordResetFormNoActive(PasswordResetForm):
"""
A modified version of the default Django password reset form to handle
unknown or unusable email addresses without leaking data.
"""
error_messages = {
'unknown': _("That e-mail address doesn't have an associated "
"user account. Are you sure you've registered?"),
'unusable': _("The user account associated with this e-mail "
"address cannot reset the password."),
}
is_account_recovery = True
users_cache = []
def clean_email(self):
"""
This is a literal copy from Django 1.4.5's django.contrib.auth.forms.PasswordResetForm
Except removing the requirement of active users
Validates that a user exists with the given email address.
"""
email = self.cleaned_data["email"]
# The line below contains the only change, removing is_active=True
self.users_cache = User.objects.filter(email__iexact=email)
if not self.users_cache and is_secondary_email_feature_enabled():
# Check if user has entered the secondary email.
self.users_cache = User.objects.filter(
id__in=AccountRecovery.objects.filter(secondary_email__iexact=email, is_active=True).values_list('user')
)
self.is_account_recovery = not bool(self.users_cache)
if not self.users_cache:
raise forms.ValidationError(self.error_messages['unknown'])
if any((user.password.startswith(UNUSABLE_PASSWORD_PREFIX))
for user in self.users_cache):
raise forms.ValidationError(self.error_messages['unusable'])
return email
def save(self, # pylint: disable=arguments-differ
use_https=False,
token_generator=default_token_generator,
request=None,
**_kwargs):
"""
Generates a one-use only link for resetting password and sends to the
user.
"""
for user in self.users_cache:
if self.is_account_recovery:
send_password_reset_email_for_user(user, request)
else:
send_account_recovery_email_for_user(user, request, user.account_recovery.secondary_email)
class PasswordResetView(APIView):
"""HTTP end-point for GETting a description of the password reset form. """
# This end-point is available to anonymous users,
# so do not require authentication.
authentication_classes = []
@method_decorator(ensure_csrf_cookie)
def get(self, request):
return HttpResponse(get_password_reset_form().to_json(), content_type="application/json")
@helpers.intercept_errors(errors.UserAPIInternalError, ignore_errors=[errors.UserAPIRequestError])
def request_password_change(email, is_secure):
"""Email a single-use link for performing a password reset.
Users must confirm the password change before we update their information.
Args:
email (str): An email address
orig_host (str): An originating host, extracted from a request with get_host
is_secure (bool): Whether the request was made with HTTPS
Returns:
None
Raises:
errors.UserNotFound
AccountRequestError
errors.UserAPIInternalError: the operation failed due to an unexpected error.
"""
# Binding data to a form requires that the data be passed as a dictionary
# to the Form class constructor.
form = PasswordResetFormNoActive({'email': email})
# Validate that a user exists with the given email address.
if form.is_valid():
# Generate a single-use link for performing a password reset
# and email it to the user.
form.save(
from_email=configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL),
use_https=is_secure,
request=get_current_request(),
)
else:
# No user with the provided email address exists.
raise errors.UserNotFound
@csrf_exempt
@require_POST
def password_reset(request):
"""
Attempts to send a password reset e-mail.
"""
password_reset_email_limiter = PasswordResetEmailRateLimiter()
if password_reset_email_limiter.is_rate_limit_exceeded(request):
AUDIT_LOG.warning("Password reset rate limit exceeded")
return JsonResponse(
{
'success': False,
'value': _("Your previous request is in progress, please try again in a few moments.")
},
status=403
)
form = PasswordResetFormNoActive(request.POST)
if form.is_valid():
form.save(use_https=request.is_secure(),
from_email=configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL),
request=request)
# When password change is complete, a "edx.user.settings.changed" event will be emitted.
# But because changing the password is multi-step, we also emit an event here so that we can
# track where the request was initiated.
tracker.emit(
SETTING_CHANGE_INITIATED,
{
"setting": "password",
"old": None,
"new": None,
"user_id": request.user.id,
}
)
destroy_oauth_tokens(request.user)
else:
# bad user? tick the rate limiter counter
AUDIT_LOG.info("Bad password_reset user passed in.")
password_reset_email_limiter.tick_request_counter(request)
return JsonResponse({
'success': True,
'value': render_to_string('registration/password_reset_done.html', {}),
})
def _uidb36_to_uidb64(uidb36):
"""
Needed to support old password reset URLs that use base36-encoded user IDs
https://github.com/django/django/commit/1184d077893ff1bc947e45b00a4d565f3df81776#diff-c571286052438b2e3190f8db8331a92bR231
Args:
uidb36: base36-encoded user ID
Returns: base64-encoded user ID. Otherwise returns a dummy, invalid ID
"""
try:
uidb64 = force_text(urlsafe_base64_encode(force_bytes(base36_to_int(uidb36))))
except ValueError:
uidb64 = '1' # dummy invalid ID (incorrect padding for base64)
return uidb64
class PasswordResetConfirmWrapper(PasswordResetConfirmView):
"""
A wrapper around django.contrib.auth.views.PasswordResetConfirmView.
Needed because we want to set the user as active at this step.
We also optionally do some additional password policy checks.
"""
def __init__(self):
self.platform_name = PasswordResetConfirmWrapper._get_platform_name()
self.validlink = False
self.user = None
self.uidb36 = ''
self.token = ''
self.uidb64 = ''
self.uid_int = -1
def _process_password_reset_success(self, request, token, uidb64, extra_context):
self.user = self.get_user(uidb64)
form = SetPasswordForm(self.user, request.POST)
if self.token_generator.check_token(self.user, token) and form.is_valid():
self.form_valid(form)
url = reverse('password_reset_complete')
return HttpResponseRedirect(url)
else:
context = self.get_context_data()
if extra_context is not None:
context.update(extra_context)
return self.render_to_response(context)
def _get_token_from_session(self, request):
"""
Internal method to get password reset token from session.
"""
return request.session[INTERNAL_RESET_SESSION_TOKEN]
@staticmethod
def _get_platform_name():
return {"platform_name": configuration_helpers.get_value('platform_name', settings.PLATFORM_NAME)}
def _set_user(self, request):
try:
self.uid_int = base36_to_int(self.uidb36)
if request.user.is_authenticated and request.user.id != self.uid_int:
raise Http404
self.user = User.objects.get(id=self.uid_int)
except (ValueError, User.DoesNotExist):
# if there's any error getting a user, just let django's
# password_reset_confirm function handle it.
return super(PasswordResetConfirmWrapper, self).dispatch(request, uidb64=self.uidb64, token=self.token,
extra_context=self.platform_name)
def _handle_retired_user(self, request):
"""
method responsible to stop password reset in case user is retired
"""
context = {
'validlink': True,
'form': None,
'title': _('Password reset unsuccessful'),
'err_msg': _('Error in resetting your password.'),
}
context.update(self.platform_name)
return TemplateResponse(
request, 'registration/password_reset_confirm.html', context
)
def _validate_password(self, password, request):
try:
validate_password(password, user=self.user)
except ValidationError as err:
context = {
'validlink': True,
'form': None,
'title': _('Password reset unsuccessful'),
'err_msg': ' '.join(err.messages),
}
context.update(self.platform_name)
return TemplateResponse(
request, 'registration/password_reset_confirm.html', context
)
def _handle_password_reset_failure(self, response):
form_valid = response.context_data['form'].is_valid() if response.context_data['form'] else False
if not form_valid:
log.warning(
u'Unable to reset password for user [%s] because form is not valid. '
u'A possible cause is that the user had an invalid reset token',
self.user.username,
)
response.context_data['err_msg'] = _('Error in resetting your password. Please try again.')
return response
def _handle_primary_email_update(self, updated_user):
try:
updated_user.email = updated_user.account_recovery.secondary_email
updated_user.account_recovery.delete()
# emit an event that the user changed their secondary email to the primary email
tracker.emit(
SETTING_CHANGE_INITIATED,
{
"setting": "email",
"old": self.user.email,
"new": updated_user.email,
"user_id": updated_user.id,
}
)
except ObjectDoesNotExist:
log.error('Account recovery process initiated without AccountRecovery instance for user {username}'
.format(username=updated_user.username))
def _handle_password_creation(self, request, updated_user):
messages.success(
request,
HTML(_(
u'{html_start}Password Creation Complete{html_end}'
u'Your password has been created. {bold_start}{email}{bold_end} is now your primary login email.'
)).format(
support_url=configuration_helpers.get_value('SUPPORT_SITE_LINK', settings.SUPPORT_SITE_LINK),
html_start=HTML('<p class="message-title">'),
html_end=HTML('</p>'),
bold_start=HTML('<b>'),
bold_end=HTML('</b>'),
email=updated_user.email,
),
extra_tags='account-recovery aa-icon submission-success'
)
def post(self, request, *args, **kwargs):
# We have to make a copy of request.POST because it is a QueryDict object which is immutable until copied.
# We have to use request.POST because the password_reset_confirm method takes in the request and a user's
# password is set to the request.POST['new_password1'] field. We have to also normalize the new_password2
# field so it passes the equivalence check that new_password1 == new_password2
# In order to switch out of having to do this copy, we would want to move the normalize_password code into
# a custom User model's set_password method to ensure it is always happening upon calling set_password.
request.POST = request.POST.copy()
request.POST['new_password1'] = normalize_password(request.POST['new_password1'])
request.POST['new_password2'] = normalize_password(request.POST['new_password2'])
password = request.POST['new_password1']
response = self._validate_password(password, request)
if response:
return response
response = self._process_password_reset_success(request, self.token, self.uidb64,
extra_context=self.platform_name)
# If password reset was unsuccessful a template response is returned (status_code 200).
# Check if form is invalid then show an error to the user.
# Note if password reset was successful we get response redirect (status_code 302).
if response.status_code == 200:
return self._handle_password_reset_failure(response)
updated_user = User.objects.get(id=self.uid_int)
if 'is_account_recovery' in request.GET:
self._handle_primary_email_update(updated_user)
updated_user.save()
if response.status_code == 302 and 'is_account_recovery' in request.GET:
self._handle_password_creation(request, updated_user)
return response
def dispatch(self, *args, **kwargs):
self.uidb36 = kwargs.get('uidb36')
self.token = kwargs.get('token')
self.uidb64 = _uidb36_to_uidb64(self.uidb36)
# User can not get this link unless account recovery feature is enabled.
if 'is_account_recovery' in self.request.GET and not is_secondary_email_feature_enabled():
raise Http404
response = self._set_user(self.request)
if response:
return response
if UserRetirementRequest.has_user_requested_retirement(self.user):
return self._handle_retired_user(self.request)
if self.request.method == 'POST':
# Get actual token from session before processing the POST request.
# This is needed because django's post process is not called on password reset
# post request and the correct token needs to be extracted from session.
self.token = self._get_token_from_session(self.request)
return self.post(self.request, *args, **kwargs)
else:
response = super(PasswordResetConfirmWrapper, self).dispatch(
self.request,
uidb64=self.uidb64,
token=self.token,
extra_context=self.platform_name
)
if hasattr(response, 'context_data'):
response_was_successful = response.context_data.get('validlink')
if response_was_successful and not self.user.is_active:
self.user.is_active = True
self.user.save()
return response
def _get_user_from_email(email):
"""
Find a user using given email and return it.
Arguments:
email (str): primary or secondary email address of the user.
Raises:
(User.ObjectNotFound): If no user is found with the given email.
(User.MultipleObjectsReturned): If more than one user is found with the given email.
Returns:
User: Django user object.
"""
try:
return User.objects.get(email=email)
except ObjectDoesNotExist:
return User.objects.filter(
id__in=AccountRecovery.objects.filter(secondary_email__iexact=email, is_active=True).values_list('user')
).get()
@require_POST
def password_change_request_handler(request):
"""Handle password change requests originating from the account page.
Uses the Account API to email the user a link to the password reset page.
Note:
The next step in the password reset process (confirmation) is currently handled
by student.views.password_reset_confirm_wrapper, a custom wrapper around Django's
password reset confirmation view.
Args:
request (HttpRequest)
Returns:
HttpResponse: 200 if the email was sent successfully
HttpResponse: 400 if there is no 'email' POST parameter
HttpResponse: 403 if the client has been rate limited
HttpResponse: 405 if using an unsupported HTTP method
Example usage:
POST /account/password
"""
password_reset_email_limiter = PasswordResetEmailRateLimiter()
if password_reset_email_limiter.is_rate_limit_exceeded(request):
AUDIT_LOG.warning("Password reset rate limit exceeded")
return HttpResponse(
_("Your previous request is in progress, please try again in a few moments."),
status=403
)
user = request.user
# Prefer logged-in user's email
email = user.email if user.is_authenticated else request.POST.get('email')
if email:
try:
request_password_change(email, request.is_secure())
user = user if user.is_authenticated else _get_user_from_email(email=email)
destroy_oauth_tokens(user)
except errors.UserNotFound:
AUDIT_LOG.info("Invalid password reset attempt")
# If enabled, send an email saying that a password reset was attempted, but that there is
# no user associated with the email
if configuration_helpers.get_value('ENABLE_PASSWORD_RESET_FAILURE_EMAIL',
settings.FEATURES['ENABLE_PASSWORD_RESET_FAILURE_EMAIL']):
site = get_current_site()
message_context = get_base_template_context(site)
message_context.update({
'failed': True,
'request': request, # Used by google_analytics_tracking_pixel
'email_address': email,
})
msg = PasswordReset().personalize(
recipient=Recipient(username='', email_address=email),
language=settings.LANGUAGE_CODE,
user_context=message_context,
)
ace.send(msg)
except errors.UserAPIInternalError as err:
log.exception(u'Error occured during password change for user {email}: {error}'
.format(email=email, error=err))
return HttpResponse(_("Some error occured during password change. Please try again"), status=500)
password_reset_email_limiter.tick_request_counter(request)
return HttpResponse(status=200)
else:
return HttpResponseBadRequest(_("No email address provided."))
|
edx-solutions/edx-platform
|
openedx/core/djangoapps/user_authn/views/password_reset.py
|
Python
|
agpl-3.0
| 24,770
|
# Copyright (C) 2011 Alexey Agapitov
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from ktope import hw2,hw5
import sys
import fileinput
__name__='hw5'
def main(file):
lines=[]
'''with fileinput.input(files=(file)) as f:
for line in f:
lines.append(line)
'''
finp=fileinput.input(files=(sys.argv[2]))
for line in finp:
lines.append(line)
circuits=hw2.buildCircuits(lines)
elements=hw2.getElements(circuits)
connMatrix=hw2.buildConnMatrix(circuits,elements)
for line in hw5.hamiltonChain(connMatrix,elements):
print(line)
|
marwinxxii/ktope
|
ktope/cli/hw5.py
|
Python
|
agpl-3.0
| 1,242
|
###########################################################################
# (C) Vrije Universiteit, Amsterdam (the Netherlands) #
# #
# This file is part of AmCAT - The Amsterdam Content Analysis Toolkit #
# #
# AmCAT is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Affero General Public License as published by the #
# Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# AmCAT is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Affero General Public #
# License along with AmCAT. If not, see <http://www.gnu.org/licenses/>. #
###########################################################################
from amcat.models import Language
from amcat.scripts.actions.export_codebook import ExportCodebook
from amcat.tools import amcattest
from amcat.tools.amcattest import AmCATTestCase
def head(seq):
"""Return the first element in seq"""
return next(iter(seq))
class TestExportCodebook(AmCATTestCase):
def setUp(self):
self.de = Language.objects.get(label="de")
self.nl = Language.objects.get(label="nl")
self.default = Language.objects.get(id=1)
self.codebook = amcattest.create_test_codebook_with_codes()[0]
self.codes_list = sorted(self.codebook.codes.all(), key=lambda c:c.id)
self.codes_list[0].add_label(self.de, "Ein")
self.codes_list[1].add_label(self.nl, "Een")
def export(self, codebook=None, language=None, structure="indented", labelcols=False):
"""Run ExportCodebook with some default arguments. Returns tableObj."""
codebook = codebook or self.codebook
language = language or self.default
return {c.code_id: c for c in ExportCodebook(
codebook=codebook.id, language=language.id,
structure=structure, labelcols=labelcols
).run().to_list()}
def test_indented(self):
"""Test indented format."""
codes = self.export()
# Depth of tree is 3, so we need exactly three columns
self.assertTrue(hasattr(head(codes.values()), "code1"))
self.assertTrue(hasattr(head(codes.values()), "code2"))
self.assertTrue(hasattr(head(codes.values()), "code3"))
self.assertFalse(hasattr(head(codes.values()), "code4"))
# Check other properties
self.assertTrue(hasattr(head(codes.values()), "uuid"))
self.assertTrue(hasattr(head(codes.values()), "code_id"))
self.assertFalse(hasattr(head(codes.values()), "parent"))
# 2 roots
self.assertEqual(2, len(list(filter(bool, [c.code1 for c in codes.values()]))))
# 3 'sub'roots
self.assertEqual(3, len(list(filter(bool, [c.code2 for c in codes.values()]))))
# 2 'subsub'roots
self.assertEqual(2, len(list(filter(bool, [c.code3 for c in codes.values()]))))
def test_parent(self):
"""Test parent format."""
codes = self.export(structure="parent")
self.assertTrue(hasattr(head(codes.values()), "parent_id"))
def test_language(self):
"""Test if exporter renders correct labels"""
codes = self.export(language=self.de)
# Exporting structure format, thus no parent column
self.assertFalse(hasattr(head(codes.values()), "parent_id"))
# Should export default label, e.g. "A"
de_code = codes[self.codes_list[0].id]
self.assertIn("A", map(str, self.codes_list))
# should not put 'languaged' labels in codeX columns
self.assertNotIn("Ein", (de_code.code1, de_code.code2, de_code.code3))
nl_code = codes[self.codes_list[1].id]
self.assertNotIn("Een", (nl_code.code1, nl_code.code2, nl_code.code3))
def test_labelcols(self):
"""Test whether extra labels are created """
codes = self.export(labelcols=True)
self.assertTrue(hasattr(head(codes.values()), "labelnl"))
self.assertTrue(hasattr(head(codes.values()), "labelde"))
self.assertFalse(hasattr(head(codes.values()), "label?"))
nl_labels = list(filter(bool, [c.labelnl for c in codes.values()]))
self.assertEqual(1, len(nl_labels))
self.assertEqual("Een", nl_labels[0])
de_labels = list(filter(bool, [c.labelde for c in codes.values()]))
self.assertEqual(1, len(de_labels))
self.assertEqual("Ein", de_labels[0])
# Exporting structure format, thus no parent column
self.assertFalse(hasattr(head(codes.values()), "parent"))
|
amcat/amcat
|
amcat/scripts/actions/tests/test_export_codebook.py
|
Python
|
agpl-3.0
| 5,182
|
from django.db import migrations
from auth_helpers.migrations import (
get_migration_group_create,
get_migration_group_delete,
)
try:
from uk_results.models import (
TRUSTED_TO_CONFIRM_CONTROL_RESULTS_GROUP_NAME,
TRUSTED_TO_CONFIRM_VOTE_RESULTS_GROUP_NAME,
)
except:
TRUSTED_TO_CONFIRM_CONTROL_RESULTS_GROUP_NAME = "trusted_to_confirm_control"
TRUSTED_TO_CONFIRM_VOTE_RESULTS_GROUP_NAME = "trusted_to_confirm_votes"
class Migration(migrations.Migration):
dependencies = [("uk_results", "0005_auto_20160426_1058")]
operations = [
migrations.RunPython(
get_migration_group_create(
TRUSTED_TO_CONFIRM_CONTROL_RESULTS_GROUP_NAME, []
),
get_migration_group_delete(
TRUSTED_TO_CONFIRM_CONTROL_RESULTS_GROUP_NAME
),
),
migrations.RunPython(
get_migration_group_create(
TRUSTED_TO_CONFIRM_VOTE_RESULTS_GROUP_NAME, []
),
get_migration_group_delete(
TRUSTED_TO_CONFIRM_VOTE_RESULTS_GROUP_NAME
),
),
]
|
DemocracyClub/yournextrepresentative
|
ynr/apps/uk_results/migrations/0006_add_admin_persmissions.py
|
Python
|
agpl-3.0
| 1,141
|
#!/usr/bin/python
import os
import sys
import re
url_cgi = {}
url_23andme = {}
phen_23andme_map = {}
phen_cgi_map = {}
fp = open("data/data_locations.csv")
for line in fp:
line = line.strip()
a = line.split(",")
if a[1] == "cgi":
url_cgi[a[0]] = a[2]
elif a[1] == "23andme":
url_23andme[a[0]] = a[2]
fp.close()
first=True
fp = open("dbgap_file_submission_txt/5a_dbGaP_SubjectSampleMappings.txt")
for line in fp:
line = line.strip()
a = line.split("\t")
if first:
first = False
continue
if re.search( r'23andme', a[1]):
phen_23andme_map[a[0]] = a[1]
else:
phen_cgi_map[a[0]] = a[1]
fp.close()
print "SUBJECT_ID\tSAMPLE_ID\tURL"
for x in url_cgi:
print x + "\t" + phen_cgi_map[x] + "\t" + url_cgi[x]
for x in url_23andme:
print x + "\t" + phen_23andme_map[x] + "\t" + url_23andme[x]
|
abeconnelly/hupgp-dbgap-submission
|
src/mk_data_url_location_csv.py
|
Python
|
agpl-3.0
| 837
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import time
import os
import cStringIO
import tempfile
import traceback
import cPickle
import threading
from Queue import Empty
from shinken.external_command import ExternalCommand
from shinken.check import Check
from shinken.notification import Notification
from shinken.eventhandler import EventHandler
from shinken.brok import Brok
from shinken.downtime import Downtime
from shinken.contactdowntime import ContactDowntime
from shinken.comment import Comment
from shinken.acknowledge import Acknowledge
from shinken.log import logger
from shinken.util import nighty_five_percent, get_memory
from shinken.load import Load
from shinken.http_client import HTTPClient, HTTPExceptions
from shinken.stats import statsmgr
from shinken.misc.common import DICT_MODATTR
class Scheduler(object):
"""Please Add a Docstring to describe the class here"""
def __init__(self, scheduler_daemon):
'''
:type scheduler_daemon: shinken.daemons.schedulerdaemon.Shinken
'''
self.sched_daemon = scheduler_daemon
# When set to false by us, we die and arbiter launch a new Scheduler
self.must_run = True
# protect this uniq list
self.waiting_results_lock = threading.RLock()
self.waiting_results = [] # satellites returns us results
# and to not wait for them, we put them here and
# use them later
# Every N seconds we call functions like consume, del zombies
# etc. All of theses functions are in recurrent_works with the
# every tick to run. So must be an integer > 0
# The order is important, so make key an int.
# TODO: at load, change value by configuration one (like reaper time, etc)
self.recurrent_works = {
0: ('update_downtimes_and_comments', self.update_downtimes_and_comments, 1),
1: ('schedule', self.schedule, 1), # just schedule
2: ('consume_results', self.consume_results, 1), # incorporate checks and dependencies
# now get the news actions (checks, notif) raised
3: ('get_new_actions', self.get_new_actions, 1),
4: ('get_new_broks', self.get_new_broks, 1), # and broks
5: ('scatter_master_notifications', self.scatter_master_notifications, 1),
6: ('delete_zombie_checks', self.delete_zombie_checks, 1),
7: ('delete_zombie_actions', self.delete_zombie_actions, 1),
# 3: (self.delete_unwanted_notifications, 1),
8: ('check_freshness', self.check_freshness, 10),
9: ('clean_caches', self.clean_caches, 1),
10: ('update_retention_file', self.update_retention_file, 3600),
11: ('check_orphaned', self.check_orphaned, 60),
# For NagVis like tools: update our status every 10s
12: ('get_and_register_update_program_status_brok',
self.get_and_register_update_program_status_brok, 10),
# Check for system time change. And AFTER get new checks
# so they are changed too.
13: ('check_for_system_time_change', self.sched_daemon.check_for_system_time_change, 1),
# launch if need all internal checks
14: ('manage_internal_checks', self.manage_internal_checks, 1),
# clean some times possible overridden Queues, to do not explode in memory usage
# every 1/4 of hour
15: ('clean_queues', self.clean_queues, 1),
# Look for new business_impact change by modulation every minute
16: ('update_business_values', self.update_business_values, 60),
# Reset the topology change flag if need
17: ('reset_topology_change_flag', self.reset_topology_change_flag, 1),
18: ('check_for_expire_acknowledge', self.check_for_expire_acknowledge, 1),
19: ('send_broks_to_modules', self.send_broks_to_modules, 1),
20: ('get_objects_from_from_queues', self.get_objects_from_from_queues, 1),
}
# stats part
self.nb_checks_send = 0
self.nb_actions_send = 0
self.nb_broks_send = 0
self.nb_check_received = 0
# Log init
logger.load_obj(self)
self.instance_id = 0 # Temporary set. Will be erase later
# Ours queues
self.checks = {}
self.actions = {}
self.downtimes = {}
self.contact_downtimes = {}
self.comments = {}
self.broks = []
# Some flags
self.has_full_broks = False # have a initial_broks in broks queue?
self.need_dump_memory = False # set by signal 1
self.need_objects_dump = False # set by signal 2
# And a dummy push flavor
self.push_flavor = 0
# Now fake initialize for our satellites
self.brokers = {}
self.pollers = {}
self.reactionners = {}
def reset(self):
self.must_run = True
with self.waiting_results_lock:
del self.waiting_results[:]
for o in self.checks, self.actions, self.downtimes,\
self.contact_downtimes, self.comments,\
self.brokers:
o.clear()
del self.broks[:]
def iter_hosts_and_services(self):
for what in (self.hosts, self.services):
for elt in what:
yield elt
# Load conf for future use
# we are in_test if the data are from an arbiter object like,
# so only for tests
def load_conf(self, conf, in_test=False):
self.program_start = int(time.time())
self.conf = conf
self.hostgroups = conf.hostgroups
self.services = conf.services
# We need reversed list for search in the retention
# file read
self.services.optimize_service_search(conf.hosts)
self.hosts = conf.hosts
self.notificationways = conf.notificationways
self.checkmodulations = conf.checkmodulations
self.macromodulations = conf.macromodulations
self.contacts = conf.contacts
self.contactgroups = conf.contactgroups
self.servicegroups = conf.servicegroups
self.timeperiods = conf.timeperiods
self.commands = conf.commands
self.triggers = conf.triggers
self.triggers.compile()
self.triggers.load_objects(self)
if not in_test:
# Commands in the host/services/contacts are not real one
# we must relink them
t0 = time.time()
self.conf.late_linkify()
logger.debug("Late command relink in %d", time.time() - t0)
# self.status_file = StatusFile(self)
# External status file
# From Arbiter. Use for Broker to differentiate schedulers
self.instance_id = conf.instance_id
# Tag our hosts with our instance_id
for h in self.hosts:
h.instance_id = conf.instance_id
for s in self.services:
s.instance_id = conf.instance_id
# self for instance_name
self.instance_name = conf.instance_name
# and push flavor
self.push_flavor = conf.push_flavor
# Now we can update our 'ticks' for special calls
# like the retention one, etc
self.update_recurrent_works_tick('update_retention_file',
self.conf.retention_update_interval * 60)
self.update_recurrent_works_tick('clean_queues', self.conf.cleaning_queues_interval)
# Update the 'tick' for a function call in our
# recurrent work
def update_recurrent_works_tick(self, f_name, new_tick):
for i in self.recurrent_works:
(name, f, old_tick) = self.recurrent_works[i]
if name == f_name:
logger.debug("Changing the tick to %d for the function %s", new_tick, name)
self.recurrent_works[i] = (name, f, new_tick)
# Load the pollers from our app master
def load_satellites(self, pollers, reactionners):
self.pollers = pollers
self.reactionners = reactionners
# Oh... Arbiter want us to die... To launch a new Scheduler
# "Mais qu'a-t-il de plus que je n'ais pas?"
# "But.. On which point it is better than me?"
def die(self):
self.must_run = False
def dump_objects(self):
d = tempfile.gettempdir()
p = os.path.join(d, 'scheduler-obj-dump-%d' % time.time())
logger.info('Opening the DUMP FILE %s', p)
try:
f = open(p, 'w')
f.write('Scheduler DUMP at %d\n' % time.time())
for c in self.checks.values():
s = 'CHECK: %s:%s:%s:%s:%s:%s\n' % \
(c.id, c.status, c.t_to_go, c.poller_tag, c.command, c.worker)
f.write(s)
for a in self.actions.values():
s = '%s: %s:%s:%s:%s:%s:%s\n' % \
(a.__class__.my_type.upper(), a.id, a.status,
a.t_to_go, a.reactionner_tag, a.command, a.worker)
f.write(s)
for b in self.broks:
s = 'BROK: %s:%s\n' % (b.id, b.type)
f.write(s)
f.close()
except Exception, exp:
logger.error("Error in writing the dump file %s : %s", p, str(exp))
def dump_config(self):
d = tempfile.gettempdir()
p = os.path.join(d, 'scheduler-conf-dump-%d' % time.time())
logger.info('Opening the DUMP FILE %s', p)
try:
f = open(p, 'w')
f.write('Scheduler config DUMP at %d\n' % time.time())
self.conf.dump(f)
f.close()
except Exception, exp:
logger.error("Error in writing the dump file %s : %s", p, str(exp))
# Load the external command
def load_external_command(self, e):
self.external_command = e
# We've got activity in the fifo, we get and run commands
def run_external_commands(self, cmds):
for command in cmds:
self.run_external_command(command)
def run_external_command(self, command):
logger.debug("scheduler resolves command '%s'", command)
ext_cmd = ExternalCommand(command)
self.external_command.resolve_command(ext_cmd)
# Add_Brok is a bit more complex than the others, because
# on starting, the broks are put in a global queue : self.broks
# then when the first broker connect, it will generate initial_broks
# in it's own queue (so bname != None).
# and when in "normal" run, we just need to put the brok to all queues
def add_Brok(self, brok, bname=None):
# For brok, we TAG brok with our instance_id
brok.instance_id = self.instance_id
# Maybe it's just for one broker
if bname:
self.brokers[bname]['broks'].append(brok)
else:
# If there are known brokers, give it to them
if len(self.brokers) > 0:
# Or maybe it's for all
for bname in self.brokers:
self.brokers[bname]['broks'].append(brok)
else: # no brokers? maybe at startup for logs
# we will put in global queue, that the first broker
# connection will get all
self.broks.append(brok)
def add_Notification(self, notif):
self.actions[notif.id] = notif
# A notification ask for a brok
if notif.contact is not None:
b = notif.get_initial_status_brok()
self.add(b)
def add_Check(self, c):
self.checks[c.id] = c
# A new check means the host/service changes its next_check
# need to be refreshed
b = c.ref.get_next_schedule_brok()
self.add(b)
def add_EventHandler(self, action):
# print "Add an event Handler", elt.id
self.actions[action.id] = action
def add_Downtime(self, dt):
self.downtimes[dt.id] = dt
if dt.extra_comment:
self.add_Comment(dt.extra_comment)
def add_ContactDowntime(self, contact_dt):
self.contact_downtimes[contact_dt.id] = contact_dt
def add_Comment(self, comment):
self.comments[comment.id] = comment
b = comment.ref.get_update_status_brok()
self.add(b)
# Ok one of our modules send us a command? just run it!
def add_ExternalCommand(self, ext_cmd):
self.external_command.resolve_command(ext_cmd)
# Schedulers have some queues. We can simplify call by adding
# elements into the proper queue just by looking at their type
# Brok -> self.broks
# Check -> self.checks
# Notification -> self.actions
# Downtime -> self.downtimes
# ContactDowntime -> self.contact_downtimes
def add(self, elt):
f = self.__add_actions.get(elt.__class__, None)
if f:
# print("found action for %s: %s" % (elt.__class__.__name__, f.__name__))
f(self, elt)
__add_actions = {
Check: add_Check,
Brok: add_Brok,
Notification: add_Notification,
EventHandler: add_EventHandler,
Downtime: add_Downtime,
ContactDowntime: add_ContactDowntime,
Comment: add_Comment,
ExternalCommand: add_ExternalCommand,
}
# We call the function of modules that got the
# hook function
# TODO: find a way to merge this and the version in daemon.py
def hook_point(self, hook_name):
for inst in self.sched_daemon.modules_manager.instances:
full_hook_name = 'hook_' + hook_name
logger.debug("hook_point: %s: %s %s",
inst.get_name(), str(hasattr(inst, full_hook_name)), hook_name)
if hasattr(inst, full_hook_name):
f = getattr(inst, full_hook_name)
try:
f(self)
except Exception, exp:
logger.error("The instance %s raise an exception %s."
"I disable it and set it to restart it later",
inst.get_name(), str(exp))
output = cStringIO.StringIO()
traceback.print_exc(file=output)
logger.error("Exception trace follows: %s", output.getvalue())
output.close()
self.sched_daemon.modules_manager.set_to_restart(inst)
# Ours queues may explode if no one ask us for elements
# It's very dangerous: you can crash your server... and it's a bad thing :)
# So we 'just' keep last elements: 5 of max is a good overhead
def clean_queues(self):
# if we set the interval at 0, we bail out
if self.conf.cleaning_queues_interval == 0:
return
max_checks = 5 * (len(self.hosts) + len(self.services))
max_broks = 5 * (len(self.hosts) + len(self.services))
max_actions = 5 * len(self.contacts) * (len(self.hosts) + len(self.services))
# For checks, it's not very simple:
# For checks, they may be referred to their host/service
# We do not just del them in the check list, but also in their service/host
# We want id of lower than max_id - 2*max_checks
if len(self.checks) > max_checks:
# keys does not ensure sorted keys. Max is slow but we have no other way.
id_max = max(self.checks.keys())
to_del_checks = [c for c in self.checks.values() if c.id < id_max - max_checks]
nb_checks_drops = len(to_del_checks)
if nb_checks_drops > 0:
logger.info("I have to del some checks (%d)..., sorry", nb_checks_drops)
for c in to_del_checks:
i = c.id
elt = c.ref
# First remove the link in host/service
elt.remove_in_progress_check(c)
# Then in dependent checks (I depend on, or check
# depend on me)
for dependent_checks in c.depend_on_me:
dependent_checks.depend_on.remove(c.id)
for c_temp in c.depend_on:
c_temp.depen_on_me.remove(c)
del self.checks[i] # Final Bye bye ...
else:
nb_checks_drops = 0
# For broks and actions, it's more simple
# or brosk, manage global but also all brokers queue
b_lists = [self.broks]
for (bname, e) in self.brokers.iteritems():
b_lists.append(e['broks'])
nb_broks_drops = 0
for broks in b_lists:
if len(broks) > max_broks:
count = len(broks) - max_broks
del broks[-count:]
nb_broks_drops += count
if len(self.actions) > max_actions:
id_max = max(self.actions.keys())
id_to_del_actions = [i for i in self.actions if i < id_max - max_actions]
nb_actions_drops = len(id_to_del_actions)
for i in id_to_del_actions:
# Remember to delete reference of notification in service/host
a = self.actions[i]
if a.is_a == 'notification':
a.ref.remove_in_progress_notification(a)
del self.actions[i]
else:
nb_actions_drops = 0
statsmgr.incr("scheduler.checks.dropped", nb_checks_drops, "queue")
statsmgr.incr("scheduler.broks.dropped", nb_broks_drops, "queue")
statsmgr.incr("scheduler.actions.dropped", nb_actions_drops, "queue")
if nb_checks_drops != 0 or nb_broks_drops != 0 or nb_actions_drops != 0:
logger.warning("We drop %d checks, %d broks and %d actions",
nb_checks_drops, nb_broks_drops, nb_actions_drops)
# For tunning purpose we use caches but we do not want them to explode
# So we clean them
def clean_caches(self):
for tp in self.timeperiods:
tp.clean_cache()
# Ask item (host or service) an update_status
# and add it to our broks queue
def get_and_register_status_brok(self, item):
b = item.get_update_status_brok()
self.add(b)
# Ask item (host or service) a check_result_brok
# and add it to our broks queue
def get_and_register_check_result_brok(self, item):
b = item.get_check_result_brok()
self.add(b)
# We do not want this downtime id
def del_downtime(self, dt_id):
if dt_id in self.downtimes:
self.downtimes[dt_id].ref.del_downtime(dt_id)
del self.downtimes[dt_id]
# We do not want this downtime id
def del_contact_downtime(self, dt_id):
if dt_id in self.contact_downtimes:
self.contact_downtimes[dt_id].ref.del_downtime(dt_id)
del self.contact_downtimes[dt_id]
# We do not want this comment id
def del_comment(self, c_id):
if c_id in self.comments:
self.comments[c_id].ref.del_comment(c_id)
del self.comments[c_id]
# We are looking for outdated acks, and if so, remove them
def check_for_expire_acknowledge(self):
for elt in self.iter_hosts_and_services():
elt.check_for_expire_acknowledge()
# We update all business_impact to look at new modulation
# start for impacts, and so update broks status and
# problems value too
def update_business_values(self):
for elt in self.iter_hosts_and_services():
if not elt.is_problem:
was = elt.business_impact
elt.update_business_impact_value()
new = elt.business_impact
# Ok, the business_impact change, we can update the broks
if new != was:
# print "The elements", i.get_name(), "change it's business_impact value"
self.get_and_register_status_brok(elt)
# When all impacts and classic elements are updated,
# we can update problems (their value depend on impacts, so
# they must be done after)
for elt in self.iter_hosts_and_services():
# We first update impacts and classic elements
if elt.is_problem:
was = elt.business_impact
elt.update_business_impact_value()
new = elt.business_impact
# Maybe one of the impacts change it's business_impact to a high value
# and so ask for the problem to raise too
if new != was:
# print "The elements", i.get_name(),
# print "change it's business_impact value from", was, "to", new
self.get_and_register_status_brok(elt)
# Each second we search for master notification that are scatterisable and we do the job
# we take the sons and we put them into our actions queue
def scatter_master_notifications(self):
now = time.time()
for a in self.actions.values():
# We only want notifications
if a.is_a != 'notification':
continue
if a.status == 'scheduled' and a.is_launchable(now):
if not a.contact:
# This is a "master" notification created by create_notifications.
# It wont sent itself because it has no contact.
# We use it to create "child" notifications (for the contacts and
# notification_commands) which are executed in the reactionner.
item = a.ref
childnotifications = []
if not item.notification_is_blocked_by_item(a.type, now):
# If it is possible to send notifications
# of this type at the current time, then create
# a single notification for each contact of this item.
childnotifications = item.scatter_notification(a)
for c in childnotifications:
c.status = 'scheduled'
self.add(c) # this will send a brok
# If we have notification_interval then schedule
# the next notification (problems only)
if a.type == 'PROBLEM':
# Update the ref notif number after raise the one of the notification
if len(childnotifications) != 0:
# notif_nb of the master notification
# was already current_notification_number+1.
# If notifications were sent,
# then host/service-counter will also be incremented
item.current_notification_number = a.notif_nb
if item.notification_interval != 0 and a.t_to_go is not None:
# We must continue to send notifications.
# Just leave it in the actions list and set it to "scheduled"
# and it will be found again later
# Ask the service/host to compute the next notif time. It can be just
# a.t_to_go + item.notification_interval*item.__class__.interval_length
# or maybe before because we have an
# escalation that need to raise up before
a.t_to_go = item.get_next_notification_time(a)
a.notif_nb = item.current_notification_number + 1
a.status = 'scheduled'
else:
# Wipe out this master notification. One problem notification is enough.
item.remove_in_progress_notification(a)
self.actions[a.id].status = 'zombie'
else:
# Wipe out this master notification.
# We don't repeat recover/downtime/flap/etc...
item.remove_in_progress_notification(a)
self.actions[a.id].status = 'zombie'
# Called by poller to get checks
# Can get checks and actions (notifications and co)
def get_to_run_checks(self, do_checks=False, do_actions=False,
poller_tags=['None'], reactionner_tags=['None'],
worker_name='none', module_types=['fork'],
max_actions=None
):
res = []
now = time.time()
# As priority attribute may not exist on objects loaded from retention
# backend, we ensure that filtering does not break
def get_prio(o):
return getattr(o, "priority", o.properties["priority"].default)
# If poller want to do checks
if do_checks:
for c in sorted(self.checks.itervalues(), key=get_prio):
if max_actions is not None and len(res) >= max_actions:
break
# If the command is untagged, and the poller too, or if both are tagged
# with same name, go for it
# if do_check, call for poller, and so poller_tags by default is ['None']
# by default poller_tag is 'None' and poller_tags is ['None']
# and same for module_type, the default is the 'fork' type
if c.poller_tag in poller_tags and c.module_type in module_types:
# must be ok to launch, and not an internal one (business rules based)
if c.status == 'scheduled' and c.is_launchable(now) and not c.internal:
c.status = 'inpoller'
c.worker = worker_name
# We do not send c, because it is a link (c.ref) to
# host/service and poller do not need it. It only
# need a shell with id, command and defaults
# parameters. It's the goal of copy_shell
res.append(c.copy_shell())
# If reactionner want to notify too
if do_actions:
for a in sorted(self.actions.itervalues(), key=get_prio):
if max_actions is not None and len(res) >= max_actions:
break
is_master = (a.is_a == 'notification' and not a.contact)
if not is_master:
# if do_action, call the reactionner,
# and so reactionner_tags by default is ['None']
# by default reactionner_tag is 'None' and reactionner_tags is ['None'] too
# So if not the good one, loop for next :)
if a.reactionner_tag not in reactionner_tags:
continue
# same for module_type
if a.module_type not in module_types:
continue
# And now look for can launch or not :)
if a.status == 'scheduled' and a.is_launchable(now):
if not is_master:
# This is for child notifications and eventhandlers
a.status = 'inpoller'
a.worker = worker_name
new_a = a.copy_shell()
res.append(new_a)
return res
# Called by poller and reactionner to send result
def put_results(self, c):
if c.is_a == 'notification':
# We will only see childnotifications here
try:
timeout = False
if c.status == 'timeout':
# Unfortunately the remove_in_progress_notification
# sets the status to zombie, so we need to save it here.
timeout = True
execution_time = c.execution_time
# Add protection for strange charset
if isinstance(c.output, str):
c.output = c.output.decode('utf8', 'ignore')
self.actions[c.id].get_return_from(c)
item = self.actions[c.id].ref
item.remove_in_progress_notification(c)
self.actions[c.id].status = 'zombie'
item.last_notification = c.check_time
# And we ask the item to update it's state
self.get_and_register_status_brok(item)
# If we' ve got a problem with the notification, raise a Warning log
if timeout:
logger.warning("Contact %s %s notification command '%s ' "
"timed out after %d seconds",
self.actions[c.id].contact.contact_name,
self.actions[c.id].ref.__class__.my_type,
self.actions[c.id].command,
int(execution_time))
elif c.exit_status != 0:
logger.warning("The notification command '%s' raised an error "
"(exit code=%d): '%s'", c.command, c.exit_status, c.output)
except KeyError, exp: # bad number for notif, not that bad
logger.warning('put_results:: get unknown notification : %s ', str(exp))
except AttributeError, exp: # bad object, drop it
logger.warning('put_results:: get bad notification : %s ', str(exp))
elif c.is_a == 'check':
try:
if c.status == 'timeout':
c.output = "(%s Check Timed Out)" %\
self.checks[c.id].ref.__class__.my_type.capitalize()
c.long_output = c.output
c.exit_status = self.conf.timeout_exit_status
self.checks[c.id].get_return_from(c)
self.checks[c.id].status = 'waitconsume'
except KeyError, exp:
pass
elif c.is_a == 'eventhandler':
try:
old_action = self.actions[c.id]
old_action.status = 'zombie'
except KeyError: # cannot find old action
return
if c.status == 'timeout':
_type = 'event handler'
if c.is_snapshot:
_type = 'snapshot'
logger.warning("%s %s command '%s ' timed out after %d seconds" %
(self.actions[c.id].ref.__class__.my_type.capitalize(),
_type,
self.actions[c.id].command,
int(c.execution_time)))
# If it's a snapshot we should get the output an export it
if c.is_snapshot:
old_action.get_return_from(c)
b = old_action.ref.get_snapshot_brok(old_action.output, old_action.exit_status)
self.add(b)
else:
logger.error("The received result type in unknown! %s", str(c.is_a))
# Get the good tabs for links regarding to the kind. If unknown, return None
def get_links_from_type(self, type):
t = {'poller': self.pollers, 'reactionner': self.reactionners}
if type in t:
return t[type]
return None
# Check if we do not connect to often to this
def is_connection_try_too_close(self, elt):
now = time.time()
last_connection = elt['last_connection']
if now - last_connection < 5:
return True
return False
# initialize or re-initialize connection with a poller
# or a reactionner
def pynag_con_init(self, id, type='poller'):
# Get good links tab for looping..
links = self.get_links_from_type(type)
if links is None:
logger.debug("Unknown '%s' type for connection!", type)
return
# We want only to initiate connections to the passive
# pollers and reactionners
passive = links[id]['passive']
if not passive:
return
# If we try to connect too much, we slow down our tests
if self.is_connection_try_too_close(links[id]):
return
# Ok, we can now update it
links[id]['last_connection'] = time.time()
logger.debug("Init connection with %s", links[id]['uri'])
uri = links[id]['uri']
try:
links[id]['con'] = HTTPClient(uri=uri, strong_ssl=links[id]['hard_ssl_name_check'])
con = links[id]['con']
except HTTPExceptions, exp:
logger.warning("Connection problem to the %s %s: %s", type, links[id]['name'], str(exp))
links[id]['con'] = None
return
try:
# initial ping must be quick
con.get('ping')
except HTTPExceptions, exp:
logger.warning("Connection problem to the %s %s: %s", type, links[id]['name'], str(exp))
links[id]['con'] = None
return
except KeyError, exp:
logger.warning("The %s '%s' is not initialized: %s", type, links[id]['name'], str(exp))
links[id]['con'] = None
return
logger.info("Connection OK to the %s %s", type, links[id]['name'])
# We should push actions to our passives satellites
def push_actions_to_passives_satellites(self):
# We loop for our passive pollers or reactionners
for p in filter(lambda p: p['passive'], self.pollers.values()):
logger.debug("I will send actions to the poller %s", str(p))
con = p['con']
poller_tags = p['poller_tags']
if con is not None:
# get actions
lst = self.get_to_run_checks(True, False, poller_tags, worker_name=p['name'])
try:
# initial ping must be quick
logger.debug("Sending %s actions", len(lst))
con.post('push_actions', {'actions': lst, 'sched_id': self.instance_id})
self.nb_checks_send += len(lst)
except HTTPExceptions, exp:
logger.warning("Connection problem to the %s %s: %s", type, p['name'], str(exp))
p['con'] = None
return
except KeyError, exp:
logger.warning("The %s '%s' is not initialized: %s", type, p['name'], str(exp))
p['con'] = None
return
else: # no connection? try to reconnect
self.pynag_con_init(p['instance_id'], type='poller')
# TODO:factorize
# We loop for our passive reactionners
for p in filter(lambda p: p['passive'], self.reactionners.values()):
logger.debug("I will send actions to the reactionner %s", str(p))
con = p['con']
reactionner_tags = p['reactionner_tags']
if con is not None:
# get actions
lst = self.get_to_run_checks(False, True,
reactionner_tags=reactionner_tags,
worker_name=p['name'])
try:
# initial ping must be quick
logger.debug("Sending %d actions", len(lst))
con.post('push_actions', {'actions': lst, 'sched_id': self.instance_id})
self.nb_checks_send += len(lst)
except HTTPExceptions, exp:
logger.warning("Connection problem to the %s %s: %s", type, p['name'], str(exp))
p['con'] = None
return
except KeyError, exp:
logger.warning("The %s '%s' is not initialized: %s", type, p['name'], str(exp))
p['con'] = None
return
else: # no connection? try to reconnect
self.pynag_con_init(p['instance_id'], type='reactionner')
# We should get returns from satellites
def get_actions_from_passives_satellites(self):
# We loop for our passive pollers
for p in [p for p in self.pollers.values() if p['passive']]:
logger.debug("I will get actions from the poller %s", str(p))
con = p['con']
poller_tags = p['poller_tags']
if con is not None:
try:
# initial ping must be quick
# Before ask a call that can be long, do a simple ping to be sure it is alive
con.get('ping')
results = con.get('get_returns', {'sched_id': self.instance_id}, wait='long')
try:
results = str(results)
except UnicodeEncodeError: # ascii not working, switch to utf8 so
# if not eally utf8 will be a real problem
results = results.encode("utf8", 'ignore')
# and data will be invalid, socatch by the pickle.
# now go the cpickle pass, and catch possible errors from it
try:
results = cPickle.loads(results)
except Exception, exp:
logger.error('Cannot load passive results from satellite %s : %s',
p['name'], str(exp))
continue
nb_received = len(results)
self.nb_check_received += nb_received
logger.debug("Received %d passive results", nb_received)
for result in results:
result.set_type_passive()
with self.waiting_results_lock:
self.waiting_results.extend(results)
except HTTPExceptions, exp:
logger.warning("Connection problem to the %s %s: %s", type, p['name'], str(exp))
p['con'] = None
continue
except KeyError, exp:
logger.warning("The %s '%s' is not initialized: %s", type, p['name'], str(exp))
p['con'] = None
continue
else: # no connection, try reinit
self.pynag_con_init(p['instance_id'], type='poller')
# We loop for our passive reactionners
for p in [p for p in self.reactionners.values() if p['passive']]:
logger.debug("I will get actions from the reactionner %s", str(p))
con = p['con']
reactionner_tags = p['reactionner_tags']
if con is not None:
try:
# initial ping must be quick
# Before ask a call that can be long, do a simple ping to be sure it is alive
con.get('ping')
results = con.get('get_returns', {'sched_id': self.instance_id}, wait='long')
results = cPickle.loads(str(results))
nb_received = len(results)
self.nb_check_received += nb_received
logger.debug("Received %d passive results", nb_received)
for result in results:
result.set_type_passive()
with self.waiting_results_lock:
self.waiting_results.extend(results)
except HTTPExceptions, exp:
logger.warning("Connection problem to the %s %s: %s", type, p['name'], str(exp))
p['con'] = None
return
except KeyError, exp:
logger.warning("The %s '%s' is not initialized: %s", type, p['name'], str(exp))
p['con'] = None
return
else: # no connection, try reinit
self.pynag_con_init(p['instance_id'], type='reactionner')
# Some checks are purely internal, like business based one
# simply ask their ref to manage it when it's ok to run
def manage_internal_checks(self):
now = time.time()
for c in self.checks.values():
# must be ok to launch, and not an internal one (business rules based)
if c.internal and c.status == 'scheduled' and c.is_launchable(now):
c.ref.manage_internal_check(self.hosts, self.services, c)
# it manage it, now just ask to consume it
# like for all checks
c.status = 'waitconsume'
# Call by brokers to have broks
# We give them, and clean them!
def get_broks(self, bname, broks_batch=0):
res = []
if broks_batch > 0:
count = len(self.broks)
else:
count = min(broks_batch, len(self.broks))
res.extend(self.broks[:count])
del self.broks[:count]
# If we are here, we are sure the broker entry exists
if broks_batch > 0:
count = len(self.brokers[bname]['broks'])
else:
count = min(broks_batch, len(self.brokers[bname]['broks']))
count -= len(res)
res.extend(self.brokers[bname]['broks'][:count])
del self.brokers[bname]['broks'][:count]
return res
# An element can have its topology changed by an external command
# if so a brok will be generated with this flag. No need to reset all of
# them.
def reset_topology_change_flag(self):
for i in self.hosts:
i.topology_change = False
for i in self.services:
i.topology_change = False
# Update the retention file and give all te data in
# a dict so the read function can pickup what it wants
# For now compression is not used, but it can be added easily
# just uncomment :)
def update_retention_file(self, forced=False):
# If we set the update to 0, we do not want of this
# if we do not forced (like at stopping)
if self.conf.retention_update_interval == 0 and not forced:
return
self.hook_point('save_retention')
# Load the retention file and get status from it. It does not get all checks in progress
# for the moment, just the status and the notifications.
def retention_load(self):
self.hook_point('load_retention')
# Helper function for module, will give the host and service
# data
def get_retention_data(self):
# We create an all_data dict with list of useful retention data dicts
# of our hosts and services
all_data = {'hosts': {}, 'services': {}}
for h in self.hosts:
d = {}
running_properties = h.__class__.running_properties
for prop, entry in running_properties.items():
if entry.retention:
v = getattr(h, prop)
# Maybe we should "prepare" the data before saving it
# like get only names instead of the whole objects
f = entry.retention_preparation
if f:
v = f(h, v)
d[prop] = v
# and some properties are also like this, like
# active checks enabled or not
properties = h.__class__.properties
for prop, entry in properties.items():
if entry.retention:
v = getattr(h, prop)
# Maybe we should "prepare" the data before saving it
# like get only names instead of the whole objects
f = entry.retention_preparation
if f:
v = f(h, v)
d[prop] = v
all_data['hosts'][h.host_name] = d
# Same for services
for s in self.services:
d = {}
running_properties = s.__class__.running_properties
for prop, entry in running_properties.items():
if entry.retention:
v = getattr(s, prop)
# Maybe we should "prepare" the data before saving it
# like get only names instead of the whole objects
f = entry.retention_preparation
if f:
v = f(s, v)
d[prop] = v
# We consider the service ONLY if it has modified attributes.
# If not, then no non-running attributes will be saved for this service.
if s.modified_attributes > 0:
# Same for properties, like active checks enabled or not
properties = s.__class__.properties
for prop, entry in properties.items():
# We save the value only if the attribute
# is selected for retention AND has been modified.
if entry.retention and \
not (prop in DICT_MODATTR and
not DICT_MODATTR[prop].value & s.modified_attributes):
v = getattr(s, prop)
# Maybe we should "prepare" the data before saving it
# like get only names instead of the whole objects
f = entry.retention_preparation
if f:
v = f(s, v)
d[prop] = v
all_data['services'][(s.host.host_name, s.service_description)] = d
return all_data
# Get back our broks from a retention module :)
def restore_retention_data(self, data):
# Now load interesting properties in hosts/services
# Tagging retention=False prop that not be directly load
# Items will be with theirs status, but not in checking, so
# a new check will be launched like with a normal beginning (random distributed
# scheduling)
ret_hosts = data['hosts']
for ret_h_name in ret_hosts:
# We take the dict of our value to load
d = data['hosts'][ret_h_name]
h = self.hosts.find_by_name(ret_h_name)
if h is not None:
# First manage all running properties
running_properties = h.__class__.running_properties
for prop, entry in running_properties.items():
if entry.retention:
# Maybe the saved one was not with this value, so
# we just bypass this
if prop in d:
setattr(h, prop, d[prop])
# Ok, some are in properties too (like active check enabled
# or not. Will OVERRIDE THE CONFIGURATION VALUE!
properties = h.__class__.properties
for prop, entry in properties.items():
if entry.retention:
# Maybe the saved one was not with this value, so
# we just bypass this
if prop in d:
setattr(h, prop, d[prop])
# Now manage all linked objects load from previous run
for a in h.notifications_in_progress.values():
a.ref = h
self.add(a)
# Also raises the action id, so do not overlap ids
a.assume_at_least_id(a.id)
h.update_in_checking()
# And also add downtimes and comments
for dt in h.downtimes:
dt.ref = h
if hasattr(dt, 'extra_comment'):
dt.extra_comment.ref = h
else:
dt.extra_comment = None
# raises the downtime id to do not overlap
Downtime.id = max(Downtime.id, dt.id + 1)
self.add(dt)
for c in h.comments:
c.ref = h
self.add(c)
# raises comment id to do not overlap ids
Comment.id = max(Comment.id, c.id + 1)
if h.acknowledgement is not None:
h.acknowledgement.ref = h
# Raises the id of future ack so we don't overwrite
# these one
Acknowledge.id = max(Acknowledge.id, h.acknowledgement.id + 1)
# Relink the notified_contacts as a set() of true contacts objects
# it it was load from the retention, it's now a list of contacts
# names
if 'notified_contacts' in d:
new_notified_contacts = set()
for cname in h.notified_contacts:
c = self.contacts.find_by_name(cname)
# Maybe the contact is gone. Skip it
if c:
new_notified_contacts.add(c)
h.notified_contacts = new_notified_contacts
# SAme for services
ret_services = data['services']
for (ret_s_h_name, ret_s_desc) in ret_services:
# We take our dict to load
d = data['services'][(ret_s_h_name, ret_s_desc)]
s = self.services.find_srv_by_name_and_hostname(ret_s_h_name, ret_s_desc)
if s is not None:
# Load the major values from running properties
running_properties = s.__class__.running_properties
for prop, entry in running_properties.items():
if entry.retention:
# Maybe the saved one was not with this value, so
# we just bypass this
if prop in d:
setattr(s, prop, d[prop])
# And some others from properties dict too
properties = s.__class__.properties
for prop, entry in properties.items():
if entry.retention:
# Maybe the saved one was not with this value, so
# we just bypass this
if prop in d:
setattr(s, prop, d[prop])
# Ok now manage all linked objects
for a in s.notifications_in_progress.values():
a.ref = s
self.add(a)
# Also raises the action id, so do not overlap id
a.assume_at_least_id(a.id)
s.update_in_checking()
# And also add downtimes and comments
for dt in s.downtimes:
dt.ref = s
if hasattr(dt, 'extra_comment'):
dt.extra_comment.ref = s
else:
dt.extra_comment = None
# raises the downtime id to do not overlap
Downtime.id = max(Downtime.id, dt.id + 1)
self.add(dt)
for c in s.comments:
c.ref = s
self.add(c)
# raises comment id to do not overlap ids
Comment.id = max(Comment.id, c.id + 1)
if s.acknowledgement is not None:
s.acknowledgement.ref = s
# Raises the id of future ack so we don't overwrite
# these one
Acknowledge.id = max(Acknowledge.id, s.acknowledgement.id + 1)
# Relink the notified_contacts as a set() of true contacts objects
# it it was load from the retention, it's now a list of contacts
# names
if 'notified_contacts' in d:
new_notified_contacts = set()
for cname in s.notified_contacts:
c = self.contacts.find_by_name(cname)
# Maybe the contact is gone. Skip it
if c:
new_notified_contacts.add(c)
s.notified_contacts = new_notified_contacts
# Fill the self.broks with broks of self (process id, and co)
# broks of service and hosts (initial status)
def fill_initial_broks(self, bname, with_logs=False):
# First a Brok for delete all from my instance_id
b = Brok('clean_all_my_instance_id', {'instance_id': self.instance_id})
self.add_Brok(b, bname)
# first the program status
b = self.get_program_status_brok()
self.add_Brok(b, bname)
# We can't call initial_status from all this types
# The order is important, service need host...
initial_status_types = (self.timeperiods, self.commands,
self.contacts, self.contactgroups,
self.hosts, self.hostgroups,
self.services, self.servicegroups)
self.conf.skip_initial_broks = getattr(self.conf, 'skip_initial_broks', False)
logger.debug("Skipping initial broks? %s", str(self.conf.skip_initial_broks))
if not self.conf.skip_initial_broks:
for tab in initial_status_types:
for i in tab:
b = i.get_initial_status_brok()
self.add_Brok(b, bname)
# Only raises the all logs at the scheduler startup
if with_logs:
# Ask for INITIAL logs for services and hosts
for i in self.hosts:
i.raise_initial_state()
for i in self.services:
i.raise_initial_state()
# Add a brok to say that we finished all initial_pass
b = Brok('initial_broks_done', {'instance_id': self.instance_id})
self.add_Brok(b, bname)
# We now have all full broks
self.has_full_broks = True
logger.info("[%s] Created %d initial Broks for broker %s",
self.instance_name, len(self.brokers[bname]['broks']), bname)
# Crate a brok with program status info
def get_and_register_program_status_brok(self):
b = self.get_program_status_brok()
self.add(b)
# Crate a brok with program status info
def get_and_register_update_program_status_brok(self):
b = self.get_program_status_brok()
b.type = 'update_program_status'
self.add(b)
# Get a brok with program status
# TODO: GET REAL VALUES
def get_program_status_brok(self):
now = int(time.time())
data = {"is_running": 1,
"instance_id": self.instance_id,
"instance_name": self.instance_name,
"last_alive": now,
"interval_length": self.conf.interval_length,
"program_start": self.program_start,
"pid": os.getpid(),
"daemon_mode": 1,
"last_command_check": now,
"last_log_rotation": now,
"notifications_enabled": self.conf.enable_notifications,
"active_service_checks_enabled": self.conf.execute_service_checks,
"passive_service_checks_enabled": self.conf.accept_passive_service_checks,
"active_host_checks_enabled": self.conf.execute_host_checks,
"passive_host_checks_enabled": self.conf.accept_passive_host_checks,
"event_handlers_enabled": self.conf.enable_event_handlers,
"flap_detection_enabled": self.conf.enable_flap_detection,
"failure_prediction_enabled": 0,
"process_performance_data": self.conf.process_performance_data,
"obsess_over_hosts": self.conf.obsess_over_hosts,
"obsess_over_services": self.conf.obsess_over_services,
"modified_host_attributes": 0,
"modified_service_attributes": 0,
"global_host_event_handler": self.conf.global_host_event_handler,
'global_service_event_handler': self.conf.global_service_event_handler,
'check_external_commands': self.conf.check_external_commands,
'check_service_freshness': self.conf.check_service_freshness,
'check_host_freshness': self.conf.check_host_freshness,
'command_file': self.conf.command_file
}
b = Brok('program_status', data)
return b
# Called every 1sec to consume every result in services or hosts
# with these results, they are OK, CRITICAL, UP/DOWN, etc...
def consume_results(self):
# All results are in self.waiting_results
# We need to get them first
with self.waiting_results_lock:
waiting_results = self.waiting_results
self.waiting_results = []
for c in waiting_results:
self.put_results(c)
# Then we consume them
# print "**********Consume*********"
for c in self.checks.values():
if c.status == 'waitconsume':
item = c.ref
item.consume_result(c)
# All 'finished' checks (no more dep) raise checks they depends on
for c in self.checks.values():
if c.status == 'havetoresolvedep':
for dependent_checks in c.depend_on_me:
# Ok, now dependent will no more wait c
dependent_checks.depend_on.remove(c.id)
# REMOVE OLD DEP CHECK -> zombie
c.status = 'zombie'
# Now, reinteger dep checks
for c in self.checks.values():
if c.status == 'waitdep' and len(c.depend_on) == 0:
item = c.ref
item.consume_result(c)
# Called every 1sec to delete all checks in a zombie state
# zombie = not useful anymore
def delete_zombie_checks(self):
# print "**********Delete zombies checks****"
id_to_del = []
for c in self.checks.values():
if c.status == 'zombie':
id_to_del.append(c.id)
# une petite tape dans le dos et tu t'en vas, merci...
# *pat pat* GFTO, thks :)
for id in id_to_del:
del self.checks[id] # ZANKUSEN!
# Called every 1sec to delete all actions in a zombie state
# zombie = not useful anymore
def delete_zombie_actions(self):
# print "**********Delete zombies actions****"
id_to_del = []
for a in self.actions.values():
if a.status == 'zombie':
id_to_del.append(a.id)
# une petite tape dans le dos et tu t'en vas, merci...
# *pat pat* GFTO, thks :)
for id in id_to_del:
del self.actions[id] # ZANKUSEN!
# Check for downtimes start and stop, and register
# them if needed
def update_downtimes_and_comments(self):
broks = []
now = time.time()
# Look for in objects comments, and look if we already got them
for elt in self.iter_hosts_and_services():
for c in elt.comments:
if c.id not in self.comments:
self.comments[c.id] = c
# Check maintenance periods
for elt in self.iter_hosts_and_services():
if elt.maintenance_period is None:
continue
if elt.in_maintenance is None:
if elt.maintenance_period.is_time_valid(now):
start_dt = elt.maintenance_period.get_next_valid_time_from_t(now)
end_dt = elt.maintenance_period.get_next_invalid_time_from_t(start_dt + 1) - 1
dt = Downtime(elt, start_dt, end_dt, 1, 0, 0,
"system",
"this downtime was automatically scheduled "
"through a maintenance_period")
elt.add_downtime(dt)
self.add(dt)
self.get_and_register_status_brok(elt)
elt.in_maintenance = dt.id
else:
if elt.in_maintenance not in self.downtimes:
# the main downtimes has expired or was manually deleted
elt.in_maintenance = None
# Check the validity of contact downtimes
for elt in self.contacts:
for dt in elt.downtimes:
dt.check_activation()
# A loop where those downtimes are removed
# which were marked for deletion (mostly by dt.exit())
for dt in self.downtimes.values():
if dt.can_be_deleted is True:
ref = dt.ref
self.del_downtime(dt.id)
broks.append(ref.get_update_status_brok())
# Same for contact downtimes:
for dt in self.contact_downtimes.values():
if dt.can_be_deleted is True:
ref = dt.ref
self.del_contact_downtime(dt.id)
broks.append(ref.get_update_status_brok())
# Downtimes are usually accompanied by a comment.
# An exiting downtime also invalidates it's comment.
for c in self.comments.values():
if c.can_be_deleted is True:
ref = c.ref
self.del_comment(c.id)
broks.append(ref.get_update_status_brok())
# Check start and stop times
for dt in self.downtimes.values():
if dt.real_end_time < now:
# this one has expired
broks.extend(dt.exit()) # returns downtimestop notifications
elif now >= dt.start_time and dt.fixed and not dt.is_in_effect:
# this one has to start now
broks.extend(dt.enter()) # returns downtimestart notifications
broks.append(dt.ref.get_update_status_brok())
for b in broks:
self.add(b)
# Main schedule function to make the regular scheduling
def schedule(self):
# ask for service and hosts their next check
for elt in self.iter_hosts_and_services():
elt.schedule()
# Main actions reaper function: it get all new checks,
# notification and event handler from hosts and services
def get_new_actions(self):
self.hook_point('get_new_actions')
# ask for service and hosts their next check
for elt in self.iter_hosts_and_services():
for a in elt.actions:
self.add(a)
# We take all, we can clear it
elt.actions = []
# Similar as above, but for broks
def get_new_broks(self):
# ask for service and hosts their broks waiting
# be eaten
for elt in self.iter_hosts_and_services():
for b in elt.broks:
self.add(b)
# We take all, we can clear it
elt.broks = []
# Raises checks for no fresh states for services and hosts
def check_freshness(self):
# print "********** Check freshness******"
for elt in self.iter_hosts_and_services():
c = elt.do_check_freshness()
if c is not None:
self.add(c)
# Check for orphaned checks: checks that never returns back
# so if inpoller and t_to_go < now - 300s: pb!
# Warn only one time for each "worker"
# XXX I think we should make "time_to_orphanage" configurable
# each action type, each for notification, event_handler & check
# I think it will be a little more useful that way, not sure tho
def check_orphaned(self):
worker_names = {}
now = int(time.time())
for c in self.checks.values():
time_to_orphanage = c.ref.get_time_to_orphanage()
if time_to_orphanage:
if c.status == 'inpoller' and c.t_to_go < now - time_to_orphanage:
c.status = 'scheduled'
if c.worker not in worker_names:
worker_names[c.worker] = {"checks": 1}
continue
if "checks" not in worker_names[c.worker]:
worker_names[c.worker]["checks"] = 1
continue
worker_names[c.worker]["checks"] += 1
for a in self.actions.values():
time_to_orphanage = a.ref.get_time_to_orphanage()
if time_to_orphanage:
if a.status == 'inpoller' and a.t_to_go < now - time_to_orphanage:
a.status = 'scheduled'
if a.worker not in worker_names:
worker_names[a.worker] = {"actions": 1}
continue
if "actions" not in worker_names[a.worker]:
worker_names[a.worker]["actions"] = 1
continue
worker_names[a.worker]["actions"] += 1
reenabled = {"checks": 0, "actions": 0}
for w in worker_names:
for _type in worker_names[w]:
reenabled[_type] += worker_names[w][_type]
logger.warning("%d %s never came back for the satellite "
"'%s'. I reenable them for polling",
worker_names[w][_type], _type, w)
for _type in reenabled:
count = reenabled[_type]
if count:
statsmgr.incr("scheduler.%s.reenabled" % _type, count, "queue")
# Each loop we are going to send our broks to our modules (if need)
def send_broks_to_modules(self):
t0 = time.time()
nb_sent = 0
for mod in self.sched_daemon.modules_manager.get_external_instances():
logger.debug("Look for sending to module %s", mod.get_name())
q = mod.to_q
to_send = [b for b in self.broks
if not getattr(b, 'sent_to_sched_externals', False) and mod.want_brok(b)]
q.put(to_send)
nb_sent += len(to_send)
# No more need to send them
for b in self.broks:
b.sent_to_sched_externals = True
logger.debug("Time to send %s broks (after %d secs)", nb_sent, time.time() - t0)
# special one for scheduler ; see Daemon.get_objects_from_from_queues()
def get_objects_from_from_queues(self):
''' Same behavior than Daemon.get_objects_from_from_queues(). '''
return self.sched_daemon.get_objects_from_from_queues()
# Gets internal metrics for both statsd and
def get_internal_metrics(self):
# Queues
metrics = [
('core.scheduler.mem', get_memory(), 'system'),
('core.scheduler.checks.queue', len(self.checks), 'queue'),
('core.scheduler.actions.queue', len(self.actions), 'queue'),
('core.scheduler.broks.queue', len(self.broks), 'queue'),
('core.scheduler.downtimes.queue', len(self.downtimes), 'queue'),
('core.scheduler.comments.queue', len(self.comments), 'queue'),
]
# Queues
for s in ("scheduled", "inpoller", "zombie", "timeout",
"waitconsume", "waitdep", "havetoresolvedep"):
count = len([c for c in self.checks.values() if c.status == s])
metrics.append(('core.scheduler.checks.%s' % s, count, 'queue'))
# Latency
latencies = [s.latency for s in self.services]
lat_avg, lat_min, lat_max = nighty_five_percent(latencies)
if lat_min:
metrics.append(('core.scheduler.latency.min', lat_min, 'queue'))
metrics.append(('core.scheduler.latency.avg', lat_avg, 'queue'))
metrics.append(('core.scheduler.latency.max', lat_max, 'queue'))
# Objects
for t in ("contacts", "contactgroups", "hosts", "hostgroups",
"services", "servicegroups", "commands"):
count = len(getattr(self, t))
metrics.append(('core.scheduler.%s' % t, count, 'object'))
return metrics
# stats threads is asking us a main structure for stats
def get_stats_struct(self):
now = int(time.time())
res = self.sched_daemon.get_stats_struct()
instance_name = getattr(self, "instance_name", "")
res.update({'name': instance_name, 'type': 'scheduler'})
# Get a overview of the latencies with just
# a 95 percentile view, but lso min/max values
latencies = [s.latency for s in self.services]
lat_avg, lat_min, lat_max = nighty_five_percent(latencies)
res['latency'] = (0.0, 0.0, 0.0)
if lat_avg:
res['latency'] = {'avg': lat_avg, 'min': lat_min, 'max': lat_max}
# Managed objects
res["objects"] = {}
for t in ("contacts", "contactgroups", "hosts", "hostgroups",
"services", "servicegroups", "commands"):
res["objects"][t] = len(getattr(self, t))
# metrics specific
metrics = res['metrics']
for metric in self.get_internal_metrics():
name, value, mtype = metric
metrics.append(name, value, now, mtype)
all_commands = {}
# compute some stats
for elt in self.iter_hosts_and_services():
last_cmd = elt.last_check_command
if not last_cmd:
continue
interval = elt.check_interval
if interval == 0:
interval = 1
cmd = os.path.split(last_cmd.split(' ', 1)[0])[1]
u_time = elt.u_time
s_time = elt.s_time
old_u_time, old_s_time = all_commands.get(cmd, (0.0, 0.0))
old_u_time += u_time / interval
old_s_time += s_time / interval
all_commands[cmd] = (old_u_time, old_s_time)
# now sort it
p = []
for (c, e) in all_commands.iteritems():
u_time, s_time = e
p.append({'cmd': c, 'u_time': u_time, 's_time': s_time})
def p_sort(e1, e2):
if e1['u_time'] > e2['u_time']:
return 1
if e1['u_time'] < e2['u_time']:
return -1
return 0
p.sort(p_sort)
# takethe first 10 ones for the put
res['commands'] = p[:10]
return res
# Main function
def run(self):
# Then we see if we've got info in the retention file
self.retention_load()
# Finally start the external modules now we got our data
self.hook_point('pre_scheduler_mod_start')
self.sched_daemon.modules_manager.start_external_instances(late_start=True)
# Ok, now all is initialized, we can make the initial broks
logger.info("[%s] First scheduling launched", self.instance_name)
self.schedule()
logger.info("[%s] First scheduling done", self.instance_name)
# Now connect to the passive satellites if needed
for p_id in self.pollers:
self.pynag_con_init(p_id, type='poller')
for r_id in self.reactionners:
self.pynag_con_init(r_id, type='reactionner')
# Ticks are for recurrent function call like consume
# del zombies etc
ticks = 0
timeout = 1.0 # For the select
gogogo = time.time()
# We must reset it if we received a new conf from the Arbiter.
# Otherwise, the stat check average won't be correct
self.nb_check_received = 0
self.load_one_min = Load(initial_value=1)
logger.debug("First loop at %d", time.time())
while self.must_run:
# print "Loop"
# Before answer to brokers, we send our broks to modules
# Ok, go to send our broks to our external modules
# self.send_broks_to_modules()
elapsed, _, _ = self.sched_daemon.handleRequests(timeout)
if elapsed:
timeout -= elapsed
if timeout > 0:
continue
self.load_one_min.update_load(self.sched_daemon.sleep_time)
# load of the scheduler is the percert of time it is waiting
l = min(100, 100.0 - self.load_one_min.get_load() * 100)
logger.debug("Load: (sleep) %.2f (average: %.2f) -> %d%%",
self.sched_daemon.sleep_time, self.load_one_min.get_load(), l)
self.sched_daemon.sleep_time = 0.0
# Timeout or time over
timeout = 1.0
ticks += 1
# Do recurrent works like schedule, consume
# delete_zombie_checks
for i in self.recurrent_works:
(name, f, nb_ticks) = self.recurrent_works[i]
# A 0 in the tick will just disable it
if nb_ticks != 0:
if ticks % nb_ticks == 0:
# Call it and save the time spend in it
_t = time.time()
f()
statsmgr.timing('loop.scheduler.%s' % name, time.time() - _t, 'perf')
# Getting memory has a cost, do not cellect it if not needed
# DBG: push actions to passives?
self.push_actions_to_passives_satellites()
self.get_actions_from_passives_satellites()
# stats
nb_scheduled = len([c for c in self.checks.values() if c.status == 'scheduled'])
nb_inpoller = len([c for c in self.checks.values() if c.status == 'inpoller'])
nb_zombies = len([c for c in self.checks.values() if c.status == 'zombie'])
nb_notifications = len(self.actions)
logger.debug("Checks: total %s, scheduled %s,"
"inpoller %s, zombies %s, notifications %s",
len(self.checks), nb_scheduled, nb_inpoller, nb_zombies, nb_notifications)
# Get a overview of the latencies with just
# a 95 percentile view, but lso min/max values
latencies = [s.latency for s in self.services]
lat_avg, lat_min, lat_max = nighty_five_percent(latencies)
if lat_avg is not None:
logger.debug("Latency (avg/min/max): %.2f/%.2f/%.2f", lat_avg, lat_min, lat_max)
# print "Notifications:", nb_notifications
now = time.time()
if self.nb_checks_send != 0:
logger.debug("Nb checks/notifications/event send: %s", self.nb_checks_send)
self.nb_checks_send = 0
if self.nb_broks_send != 0:
logger.debug("Nb Broks send: %s", self.nb_broks_send)
self.nb_broks_send = 0
time_elapsed = now - gogogo
logger.debug("Check average = %d checks/s", int(self.nb_check_received / time_elapsed))
if self.need_dump_memory:
self.sched_daemon.dump_memory()
self.need_dump_memory = False
if self.need_objects_dump:
logger.debug('I need to dump my objects!')
self.dump_objects()
self.dump_config()
self.need_objects_dump = False
# Checks if memory consumption did not exceed allowed thresold
self.sched_daemon.check_memory_usage()
# WE must save the retention at the quit BY OURSELF
# because our daemon will not be able to do it for us
self.update_retention_file(True)
|
staute/shinken_package
|
shinken/scheduler.py
|
Python
|
agpl-3.0
| 76,404
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Copyright 2012-2015 OpenBroadcaster, Inc.
This file is part of OpenBroadcaster Player.
OpenBroadcaster Player is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenBroadcaster Player is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with OpenBroadcaster Player. If not, see <http://www.gnu.org/licenses/>.
"""
import obplayer
import os
import os.path
import time
import datetime
import traceback
import gi
gi.require_version('Gst', '1.0')
from gi.repository import GObject, Gst
from .recorder import Recorder
AUDIOLOG_SAMPLE_RATE = '22050'
AUDIOLOG_CHANNELS = '1'
# class Recorder(threading.Thread):
# def __init__(self, fm_feq, sample_rate):
# obplayer.ObThread.__init__(self, 'Oboff_air_AudioLog-Recorder')
# self.daemon = True
# self.audio_data = []
# self.recording = False
# self.process = subprocess.Popen(['rtl_fm', '-f', fm_feq, '-m', 'wbfm', '-r', sample_rate], stdout=subprocess.PIPE)
# self._record_audio()
#
# def _record_audio(self):
# self.recording = True
# while self.recording:
# self.audio_data.append(self.process.read())
#
# def get_audio(self):
# return self.audio_data
#
# def stop(self):
# self.recording = False
class Oboff_air_AudioLog (object):
def __init__(self):
self.recording = False
self.purge_files = obplayer.Config.setting('audiolog_purge_files')
self.date = time.strftime('%Y-%m-%d-%H')
self.audio_data = []
self.recorder = None
self.start()
# try:
# self.sdr = RtlSdr()
# except Exception as OSError:
# obplayer.Log.log("Could not start off-air audio log.\n\
# Make sure your sdr is connected.", 'offair-audiolog')
# self.sdr = None
# if self.sdr != None:
# self.sample_rate = AUDIOLOG_SAMPLE_RATE
# self.fm_feq = obplayer.Config.setting('offair_audiolog_feq')
# self.start()
def start(self):
if self.recording == False:
self.outfile = obplayer.ObData.get_datadir() + '/offair-audiologs/' + time.strftime('%Y-%m-%d_%H:%M:%S') + '.wav'
self.recorder = Recorder(self.outfile)
self.recorder.start()
#self.recorder.join()
# log that a new recording is being started.
obplayer.Log.log("starting new off-air audio log", 'offair-audiolog')
self.log_rotate()
else:
# log if already recording.
obplayer.Log.log("can't start new off-air audio log because already recording log.", 'offair-audiolog')
def stop(self):
self.recording = False
self.recorder.stop()
def log_rotate(self):
if self.date != time.strftime('%Y-%m-%d-%H'):
self.date = time.strftime('%Y-%m-%d-%H')
self.stop()
self.start()
if self.purge_files:
self.log_purge()
GObject.timeout_add(10.0, self.log_rotate)
def log_purge(self):
basedir = obplayer.ObData.get_datadir() + "/offair-audiologs"
then = datetime.datetime.now() - datetime.timedelta(days=90)
for filename in os.listdir(basedir):
parts = filename[:10].split('-')
if len(parts) != 3:
continue
filedate = datetime.datetime(int(parts[0]), int(parts[1]), int(parts[2]))
if filedate < then:
obplayer.Log.log("deleting audiolog file " + filename, 'debug')
os.remove(os.path.join(basedir, filename))
|
openbroadcaster/obplayer
|
obplayer/offair_audiolog/audiolog.py
|
Python
|
agpl-3.0
| 4,073
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, os
from frappe import _
from frappe.utils import cint
import frappe.defaults
from frappe.model.document import Document
class Company(Document):
def onload(self):
self.get("__onload").transactions_exist = self.check_if_transactions_exist()
def check_if_transactions_exist(self):
exists = False
for doctype in ["Sales Invoice", "Delivery Note", "Sales Order", "Quotation",
"Purchase Invoice", "Purchase Receipt", "Purchase Order", "Supplier Quotation"]:
if frappe.db.sql("""select name from `tab%s` where company=%s and docstatus=1
limit 1""" % (doctype, "%s"), self.name):
exists = True
break
return exists
def validate(self):
if self.get('__islocal') and len(self.abbr) > 5:
frappe.throw(_("Abbreviation cannot have more than 5 characters"))
self.previous_default_currency = frappe.db.get_value("Company", self.name, "default_currency")
if self.default_currency and self.previous_default_currency and \
self.default_currency != self.previous_default_currency and \
self.check_if_transactions_exist():
frappe.throw(_("Cannot change company's default currency, because there are existing transactions. Transactions must be cancelled to change the default currency."))
self.validate_default_accounts()
def validate_default_accounts(self):
for field in ["default_bank_account", "default_cash_account", "receivables_group", "payables_group",
"default_expense_account", "default_income_account", "stock_received_but_not_billed",
"stock_adjustment_account", "expenses_included_in_valuation"]:
if self.get(field):
for_company = frappe.db.get_value("Account", self.get(field), "company")
if for_company != self.name:
frappe.throw(_("Account {0} does not belong to company: {1}")
.format(self.get(field), self.name))
def on_update(self):
if not frappe.db.sql("""select name from tabAccount
where company=%s and docstatus<2 limit 1""", self.name):
self.create_default_accounts()
self.create_default_warehouses()
self.install_country_fixtures()
if not frappe.db.get_value("Cost Center", {"group_or_ledger": "Ledger",
"company": self.name}):
self.create_default_cost_center()
self.set_default_accounts()
if self.default_currency:
frappe.db.set_value("Currency", self.default_currency, "enabled", 1)
def install_country_fixtures(self):
if os.path.exists(os.path.join(os.path.dirname(__file__), "fixtures", self.country.lower())):
frappe.get_attr("erpnext.setup.doctype.company.fixtures.{0}.install".format(self.country.lower()))(self)
def create_default_warehouses(self):
for whname in (_("Stores"), _("Work In Progress"), _("Finished Goods")):
if not frappe.db.exists("Warehouse", whname + " - " + self.abbr):
stock_group = frappe.db.get_value("Account", {"account_type": "Stock",
"group_or_ledger": "Group", "company": self.name})
if stock_group:
frappe.get_doc({
"doctype":"Warehouse",
"warehouse_name": whname,
"company": self.name,
"create_account_under": stock_group
}).insert()
def create_default_accounts(self):
if self.chart_of_accounts:
self.import_chart_of_account()
else:
self.create_standard_accounts()
frappe.db.set(self, "receivables_group", _("Accounts Receivable") + " - " + self.abbr)
frappe.db.set(self, "payables_group", _("Accounts Payable") + " - " + self.abbr)
def import_chart_of_account(self):
chart = frappe.get_doc("Chart of Accounts", self.chart_of_accounts)
chart.create_accounts(self.name)
def add_acc(self, lst):
account = frappe.get_doc({
"doctype": "Account",
"freeze_account": "No",
"master_type": "",
"company": self.name
})
for d in self.fld_dict.keys():
account.set(d, (d == 'parent_account' and lst[self.fld_dict[d]]) and lst[self.fld_dict[d]] +' - '+ self.abbr or lst[self.fld_dict[d]])
if not account.parent_account:
account.ignore_mandatory = True
account.insert()
def set_default_accounts(self):
def _set_default_account(fieldname, account_type):
if self.get(fieldname):
return
account = frappe.db.get_value("Account", {"account_type": account_type,
"group_or_ledger": "Ledger", "company": self.name})
if account:
self.db_set(fieldname, account)
_set_default_account("default_cash_account", "Cash")
_set_default_account("default_bank_account", "Bank")
if cint(frappe.db.get_value("Accounts Settings", None, "auto_accounting_for_stock")):
_set_default_account("stock_received_but_not_billed", "Stock Received But Not Billed")
_set_default_account("stock_adjustment_account", "Stock Adjustment")
_set_default_account("expenses_included_in_valuation", "Expenses Included In Valuation")
if not self.default_income_account:
self.db_set("default_income_account", frappe.db.get_value("Account",
{"account_name": _("Sales"), "company": self.name}))
def create_default_cost_center(self):
cc_list = [
{
'cost_center_name': self.name,
'company':self.name,
'group_or_ledger':'Group',
'parent_cost_center':None
},
{
'cost_center_name':_('Main'),
'company':self.name,
'group_or_ledger':'Ledger',
'parent_cost_center':self.name + ' - ' + self.abbr
},
]
for cc in cc_list:
cc.update({"doctype": "Cost Center"})
cc_doc = frappe.get_doc(cc)
cc_doc.ignore_permissions = True
if cc.get("cost_center_name") == self.name:
cc_doc.ignore_mandatory = True
cc_doc.insert()
frappe.db.set(self, "cost_center", _("Main") + " - " + self.abbr)
def on_trash(self):
"""
Trash accounts and cost centers for this company if no gl entry exists
"""
rec = frappe.db.sql("SELECT name from tabGL_Entry where company = %s", self.name)
if not rec:
#delete tabAccount
frappe.db.sql("delete from tabAccount where company = %s order by lft desc, rgt desc", self.name)
#delete cost center child table - budget detail
frappe.db.sql("delete bd.* from tabBudget_Detail bd, tabCost_Center cc where bd.parent = cc.name and cc.company = %s", self.name)
#delete cost center
frappe.db.sql("delete from tabCost_Center WHERE company = %s order by lft desc, rgt desc", self.name)
if not frappe.db.get_value("Stock Ledger Entry", {"company": self.name}):
frappe.db.sql("""delete from tabWarehouse where company=%s""", self.name)
frappe.defaults.clear_default("company", value=self.name)
frappe.db.sql("""update tabSingles set value=""
where doctype='Global Defaults' and field='default_company'
and value=%s""", self.name)
def before_rename(self, olddn, newdn, merge=False):
if merge:
frappe.throw(_("Sorry, companies cannot be merged"))
def after_rename(self, olddn, newdn, merge=False):
frappe.db.set(self, "company_name", newdn)
frappe.db.sql("""update tabDefaultValue set defvalue=%s
where defkey='Company' and defvalue=%s""", (newdn, olddn))
frappe.defaults.clear_cache()
def create_standard_accounts(self):
self.fld_dict = {
'account_name': 0,
'parent_account': 1,
'group_or_ledger': 2,
'account_type': 3,
'report_type': 4,
'tax_rate': 5,
'root_type': 6
}
acc_list_common = [
[_('Application of Funds (Assets)'), None,'Group', None,'Balance Sheet', None, 'Asset'],
[_('Current Assets'),_('Application of Funds (Assets)'),'Group', None,'Balance Sheet', None, 'Asset'],
[_('Accounts Receivable'),_('Current Assets'),'Group', None,'Balance Sheet', None, 'Asset'],
[_('Bank Accounts'),_('Current Assets'),'Group','Bank','Balance Sheet', None, 'Asset'],
[_('Cash In Hand'),_('Current Assets'),'Group','Cash','Balance Sheet', None, 'Asset'],
[_('Cash'),_('Cash In Hand'),'Ledger','Cash','Balance Sheet', None, 'Asset'],
[_('Loans and Advances (Assets)'),_('Current Assets'),'Group', None,'Balance Sheet', None, 'Asset'],
[_('Securities and Deposits'),_('Current Assets'),'Group', None,'Balance Sheet', None, 'Asset'],
[_('Earnest Money'),_('Securities and Deposits'),'Ledger', None,'Balance Sheet', None, 'Asset'],
[_('Stock Assets'),_('Current Assets'),'Group','Stock','Balance Sheet', None, 'Asset'],
[_('Tax Assets'),_('Current Assets'),'Group', None,'Balance Sheet', None, 'Asset'],
[_('Fixed Assets'),_('Application of Funds (Assets)'),'Group', None,'Balance Sheet', None, 'Asset'],
[_('Capital Equipments'),_('Fixed Assets'),'Ledger','Fixed Asset','Balance Sheet', None, 'Asset'],
[_('Computers'),_('Fixed Assets'),'Ledger','Fixed Asset','Balance Sheet', None, 'Asset'],
[_('Furniture and Fixture'),_('Fixed Assets'),'Ledger','Fixed Asset','Balance Sheet', None, 'Asset'],
[_('Office Equipments'),_('Fixed Assets'),'Ledger','Fixed Asset','Balance Sheet', None, 'Asset'],
[_('Plant and Machinery'),_('Fixed Assets'),'Ledger','Fixed Asset','Balance Sheet', None, 'Asset'],
[_('Investments'),_('Application of Funds (Assets)'),'Group', None,'Balance Sheet', None, 'Asset'],
[_('Temporary Accounts (Assets)'),_('Application of Funds (Assets)'),'Group', None,'Balance Sheet', None, 'Asset'],
[_('Temporary Assets'),_('Temporary Accounts (Assets)'),'Ledger', None,'Balance Sheet', None, 'Asset'],
[_('Expenses'), None,'Group','Expense Account','Profit and Loss', None, 'Expense'],
[_('Direct Expenses'),_('Expenses'),'Group','Expense Account','Profit and Loss', None, 'Expense'],
[_('Stock Expenses'),_('Direct Expenses'),'Group','Expense Account','Profit and Loss', None, 'Expense'],
[_('Cost of Goods Sold'),_('Stock Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Stock Adjustment'),_('Stock Expenses'),'Ledger','Stock Adjustment','Profit and Loss', None, 'Expense'],
[_('Expenses Included In Valuation'), _("Stock Expenses"), 'Ledger', 'Expenses Included In Valuation', 'Profit and Loss', None, 'Expense'],
[_('Indirect Expenses'), _('Expenses'),'Group','Expense Account','Profit and Loss', None, 'Expense'],
[_('Marketing Expenses'), _('Indirect Expenses'),'Ledger','Chargeable','Profit and Loss', None, 'Expense'],
[_('Sales Expenses'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Administrative Expenses'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Charity and Donations'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Commission on Sales'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Travel Expenses'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Entertainment Expenses'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Depreciation'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Freight and Forwarding Charges'), _('Indirect Expenses'),'Ledger','Chargeable','Profit and Loss', None, 'Expense'],
[_('Legal Expenses'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Miscellaneous Expenses'), _('Indirect Expenses'),'Ledger','Chargeable','Profit and Loss', None, 'Expense'],
[_('Office Maintenance Expenses'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Office Rent'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Postal Expenses'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Print and Stationary'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Rounded Off'), _('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Salary') ,_('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Telephone Expenses') ,_('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Utility Expenses') ,_('Indirect Expenses'),'Ledger','Expense Account','Profit and Loss', None, 'Expense'],
[_('Income'), None,'Group', None,'Profit and Loss', None, 'Income'],
[_('Direct Income'),_('Income'),'Group','Income Account','Profit and Loss', None, 'Income'],
[_('Sales'),_('Direct Income'),'Ledger','Income Account','Profit and Loss', None, 'Income'],
[_('Service'),_('Direct Income'),'Ledger','Income Account','Profit and Loss', None, 'Income'],
[_('Indirect Income'),_('Income'),'Group','Income Account','Profit and Loss', None, 'Income'],
[_('Source of Funds (Liabilities)'), None,'Group', None,'Balance Sheet', None, 'Liability'],
[_('Capital Account'),_('Source of Funds (Liabilities)'),'Group', None,'Balance Sheet', None, 'Liability'],
[_('Reserves and Surplus'),_('Capital Account'),'Ledger', None,'Balance Sheet', None, 'Liability'],
[_('Shareholders Funds'),_('Capital Account'),'Ledger', None,'Balance Sheet', None, 'Liability'],
[_('Current Liabilities'),_('Source of Funds (Liabilities)'),'Group', None,'Balance Sheet', None, 'Liability'],
[_('Accounts Payable'),_('Current Liabilities'),'Group', None,'Balance Sheet', None, 'Liability'],
[_('Stock Liabilities'),_('Current Liabilities'),'Group', None,'Balance Sheet', None, 'Liability'],
[_('Stock Received But Not Billed'), _('Stock Liabilities'), 'Ledger', 'Stock Received But Not Billed', 'Balance Sheet', None, 'Liability'],
[_('Duties and Taxes'),_('Current Liabilities'),'Group', None,'Balance Sheet', None, 'Liability'],
[_('Loans (Liabilities)'),_('Current Liabilities'),'Group', None,'Balance Sheet', None, 'Liability'],
[_('Secured Loans'),_('Loans (Liabilities)'),'Group', None,'Balance Sheet', None, 'Liability'],
[_('Unsecured Loans'),_('Loans (Liabilities)'),'Group', None,'Balance Sheet', None, 'Liability'],
[_('Bank Overdraft Account'),_('Loans (Liabilities)'),'Group', None,'Balance Sheet', None, 'Liability'],
[_('Temporary Accounts (Liabilities)'),_('Source of Funds (Liabilities)'),'Group', None,'Balance Sheet', None, 'Liability'],
[_('Temporary Liabilities'),_('Temporary Accounts (Liabilities)'),'Ledger', None,'Balance Sheet', None, 'Liability']
]
# load common account heads
for d in acc_list_common:
self.add_acc(d)
@frappe.whitelist()
def replace_abbr(company, old, new):
frappe.only_for("System Manager")
frappe.db.set_value("Company", company, "abbr", new)
def _rename_record(dt):
for d in frappe.db.sql("select name from `tab%s` where company=%s" % (dt, '%s'), company):
parts = d[0].split(" - ")
if parts[-1].lower() == old.lower():
name_without_abbr = " - ".join(parts[:-1])
frappe.rename_doc(dt, d[0], name_without_abbr + " - " + new)
for dt in ["Account", "Cost Center", "Warehouse"]:
_rename_record(dt)
frappe.db.commit()
def get_name_with_abbr(name, company):
company_abbr = frappe.db.get_value("Company", company, "abbr")
parts = name.split(" - ")
if parts[-1].lower() != company_abbr.lower():
parts.append(company_abbr)
return " - ".join(parts)
|
suyashphadtare/test
|
erpnext/setup/doctype/company/company.py
|
Python
|
agpl-3.0
| 15,327
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.exceptions import ValidationError
# YTI PLEASE SPLIT ME
class Project(models.Model):
_inherit = 'project.project'
@api.model
def default_get(self, fields):
""" Pre-fill timesheet product as "Time" data product when creating new project allowing billable tasks by default. """
result = super(Project, self).default_get(fields)
if 'timesheet_product_id' in fields and result.get('allow_billable') and result.get('allow_timesheets') and not result.get('timesheet_product_id'):
default_product = self.env.ref('sale_timesheet.time_product', False)
if default_product:
result['timesheet_product_id'] = default_product.id
return result
def _default_timesheet_product_id(self):
return self.env.ref('sale_timesheet.time_product', False)
billable_type = fields.Selection([
('task_rate', 'At Task Rate'),
('employee_rate', 'At Employee Rate'),
('no', 'No Billable')
], string="Billable Type", compute='_compute_billable_type', compute_sudo=True, store=True,
help='At which rate timesheets will be billed:\n'
' - At task rate: each time spend on a task is billed at task rate.\n'
' - At employee rate: each employee log time billed at his rate.\n'
' - No Billable: track time without invoicing it')
sale_line_employee_ids = fields.One2many('project.sale.line.employee.map', 'project_id', "Sale line/Employee map", copy=False,
help="Employee/Sale Order Item Mapping:\n Defines to which sales order item an employee's timesheet entry will be linked."
"By extension, it defines the rate at which an employee's time on the project is billed.")
allow_billable = fields.Boolean("Bill from Tasks")
display_create_order = fields.Boolean(compute='_compute_display_create_order')
timesheet_product_id = fields.Many2one(
'product.product', string='Timesheet Product',
domain="""[
('type', '=', 'service'),
('invoice_policy', '=', 'delivery'),
('service_type', '=', 'timesheet'),
'|', ('company_id', '=', False), ('company_id', '=', company_id)]""",
help='Select a Service product with which you would like to bill your time spent on tasks.',
default=_default_timesheet_product_id)
_sql_constraints = [
('timesheet_product_required_if_billable_and_timesheets', """
CHECK(
(allow_billable = 't' AND allow_timesheets = 't' AND timesheet_product_id IS NOT NULL)
OR (allow_billable = 'f')
OR (allow_timesheets = 'f')
OR (allow_billable IS NULL)
OR (allow_timesheets IS NULL)
)""", 'The timesheet product is required when the task can be billed and timesheets are allowed.'),
]
@api.depends('billable_type', 'allow_billable', 'sale_order_id', 'partner_id')
def _compute_display_create_order(self):
for project in self:
project._compute_billable_type()
show = True
if not project.partner_id or project.billable_type != 'no' or project.allow_billable or project.sale_order_id:
show = False
project.display_create_order = show
@api.depends('sale_order_id', 'sale_line_id', 'sale_line_employee_ids')
def _compute_billable_type(self):
for project in self:
billable_type = 'no'
if project.sale_order_id:
if project.sale_line_employee_ids:
billable_type = 'employee_rate'
else:
billable_type = 'task_rate'
project.billable_type = billable_type
@api.onchange('sale_line_employee_ids', 'billable_type')
def _onchange_sale_line_employee_ids(self):
if self.billable_type == 'task_rate':
if self.sale_line_employee_ids:
self.billable_type = 'employee_rate'
else:
if self.billable_type == 'no':
self.sale_line_employee_ids = False
@api.depends('allow_timesheets', 'allow_billable')
def _compute_timesheet_product_id(self):
default_product = self.env.ref('sale_timesheet.time_product', False)
for project in self:
if not project.allow_timesheets or not project.allow_billable:
project.timesheet_product_id = False
elif not project.timesheet_product_id:
project.timesheet_product_id = default_product
@api.constrains('sale_line_id', 'billable_type')
def _check_sale_line_type(self):
for project in self:
if project.billable_type == 'task_rate':
if project.sale_line_id and not project.sale_line_id.is_service:
raise ValidationError(_("A billable project should be linked to a Sales Order Item having a Service product."))
if project.sale_line_id and project.sale_line_id.is_expense:
raise ValidationError(_("A billable project should be linked to a Sales Order Item that does not come from an expense or a vendor bill."))
def action_view_timesheet(self):
self.ensure_one()
if self.allow_timesheets:
return self.action_view_timesheet_plan()
return {
'type': 'ir.actions.act_window',
'name': _('Timesheets of %s', self.name),
'domain': [('project_id', '!=', False)],
'res_model': 'account.analytic.line',
'view_id': False,
'view_mode': 'tree,form',
'help': _("""
<p class="o_view_nocontent_smiling_face">
Record timesheets
</p><p>
You can register and track your workings hours by project every
day. Every time spent on a project will become a cost and can be re-invoiced to
customers if required.
</p>
"""),
'limit': 80,
'context': {
'default_project_id': self.id,
'search_default_project_id': [self.id]
}
}
def action_view_timesheet_plan(self):
action = self.env.ref('sale_timesheet.project_timesheet_action_client_timesheet_plan').read()[0]
action['params'] = {
'project_ids': self.ids,
}
action['context'] = {
'active_id': self.id,
'active_ids': self.ids,
'search_default_name': self.name,
}
return action
def action_make_billable(self):
return {
"name": _("Create Sales Order"),
"type": 'ir.actions.act_window',
"res_model": 'project.create.sale.order',
"views": [[False, "form"]],
"target": 'new',
"context": {
'active_id': self.id,
'active_model': 'project.project',
'default_product_id': self.timesheet_product_id.id,
},
}
class ProjectTask(models.Model):
_inherit = "project.task"
# override sale_order_id and make it computed stored field instead of regular field.
sale_order_id = fields.Many2one(compute='_compute_sale_order_id', store=True, readonly=False,
domain="['|', '|', ('partner_id', '=', partner_id), ('partner_id', 'child_of', commercial_partner_id), ('partner_id', 'parent_of', partner_id)]")
analytic_account_id = fields.Many2one('account.analytic.account', related='sale_order_id.analytic_account_id')
billable_type = fields.Selection([
('task_rate', 'At Task Rate'),
('employee_rate', 'At Employee Rate'),
('no', 'No Billable')
], string="Billable Type", compute='_compute_billable_type', compute_sudo=True, store=True)
is_project_map_empty = fields.Boolean("Is Project map empty", compute='_compute_is_project_map_empty')
has_multi_sol = fields.Boolean(compute='_compute_has_multi_sol', compute_sudo=True)
allow_billable = fields.Boolean(related="project_id.allow_billable")
display_create_order = fields.Boolean(compute='_compute_display_create_order')
@api.depends(
'allow_billable', 'allow_timesheets', 'sale_order_id')
def _compute_display_create_order(self):
for task in self:
show = True
if not task.allow_billable or not task.allow_timesheets or \
task.billable_type == 'employee_rate' or not task.partner_id or \
task.sale_order_id:
show = False
task.display_create_order = show
@api.onchange('sale_line_id')
def _onchange_sale_line_id(self):
if self._get_timesheet() and self.allow_timesheets:
if self.sale_line_id:
if self.sale_line_id.product_id.service_policy == 'delivered_timesheet' and self._origin.sale_line_id.product_id.service_policy == 'delivered_timesheet':
message = _("All timesheet hours that are not yet invoiced will be assigned to the selected Sales Order Item on save. Discard to avoid the change.")
else:
message = _("All timesheet hours will be assigned to the selected Sales Order Item on save. Discard to avoid the change.")
else:
message = _("All timesheet hours that are not yet invoiced will be removed from the selected Sales Order Item on save. Discard to avoid the change.")
return {'warning': {
'title': _("Warning"),
'message': message
}}
@api.onchange('project_id')
def _onchange_project_id(self):
if self._origin.allow_timesheets and self._get_timesheet():
message = _("All timesheet hours that are not yet invoiced will be assigned to the selected Project on save. Discard to avoid the change.")
return {'warning': {
'title': _("Warning"),
'message': message
}}
@api.depends('analytic_account_id.active')
def _compute_analytic_account_active(self):
super()._compute_analytic_account_active()
for task in self:
task.analytic_account_active = task.analytic_account_active or task.analytic_account_id.active
@api.depends('sale_line_id', 'project_id', 'billable_type')
def _compute_sale_order_id(self):
for task in self:
if task.billable_type == 'task_rate':
task.sale_order_id = task.sale_line_id.sudo().order_id or task.project_id.sale_order_id
elif task.billable_type == 'employee_rate':
task.sale_order_id = task.project_id.sale_order_id
elif task.billable_type == 'no':
task.sale_order_id = False
@api.depends('project_id.billable_type', 'sale_line_id')
def _compute_billable_type(self):
for task in self:
billable_type = 'no'
if task.project_id.billable_type == 'employee_rate':
billable_type = task.project_id.billable_type
elif (task.project_id.billable_type in ['task_rate', 'no'] and task.sale_line_id): # create a task in global project (non billable)
billable_type = 'task_rate'
task.billable_type = billable_type
@api.depends('project_id.sale_line_employee_ids')
def _compute_is_project_map_empty(self):
for task in self:
task.is_project_map_empty = not bool(task.sudo().project_id.sale_line_employee_ids)
@api.depends('timesheet_ids')
def _compute_has_multi_sol(self):
for task in self:
task.has_multi_sol = task.timesheet_ids and task.timesheet_ids.so_line != task.sale_line_id
@api.onchange('project_id')
def _onchange_project(self):
super(ProjectTask, self)._onchange_project()
if self.project_id:
if self.project_id.billable_type == 'employee_rate':
if not self.partner_id:
self.partner_id = self.project_id.sale_order_id.partner_id
elif self.project_id.billable_type == 'task_rate':
if not self.sale_line_id:
self.sale_line_id = self.project_id.sale_line_id
if not self.partner_id:
self.partner_id = self.sale_line_id.order_partner_id
# set domain on SO: on non billable project, all SOL of customer, otherwise the one from the SO
def write(self, values):
old_sale_line_id = dict([(t.id, t.sale_line_id.id) for t in self])
if values.get('project_id'):
project_dest = self.env['project.project'].browse(values['project_id'])
if project_dest.billable_type == 'employee_rate':
values['sale_line_id'] = False
res = super(ProjectTask, self).write(values)
if 'sale_line_id' in values and self.filtered('allow_timesheets').sudo().timesheet_ids:
so = self.env['sale.order.line'].browse(values['sale_line_id']).order_id
if so and not so.analytic_account_id:
so.analytic_account_id = self.project_id.analytic_account_id
timesheet_ids = self.filtered('allow_timesheets').timesheet_ids.filtered(
lambda t: (not t.timesheet_invoice_id or t.timesheet_invoice_id.state == 'cancel') and t.so_line.id == old_sale_line_id[t.task_id.id]
)
timesheet_ids.write({'so_line': values['sale_line_id']})
if 'project_id' in values:
# Special case when we edit SOL an project in same time, as we edit SOL of
# timesheet lines, function '_get_timesheet' won't find the right timesheet
# to edit so we must edit those here.
project = self.env['project.project'].browse(values.get('project_id'))
if project.allow_timesheets:
timesheet_ids.write({'project_id': values.get('project_id')})
return res
def action_make_billable(self):
return {
"name": _("Create Sales Order"),
"type": 'ir.actions.act_window',
"res_model": 'project.task.create.sale.order',
"views": [[False, "form"]],
"target": 'new',
"context": {
'active_id': self.id,
'active_model': 'project.task',
'form_view_initial_mode': 'edit',
'default_product_id': self.project_id.timesheet_product_id.id,
},
}
def _get_timesheet(self):
# return not invoiced timesheet and timesheet without so_line or so_line linked to task
timesheet_ids = super(ProjectTask, self)._get_timesheet()
return timesheet_ids.filtered(lambda t: (not t.timesheet_invoice_id or t.timesheet_invoice_id.state == 'cancel') and (not t.so_line or t.so_line == t.task_id._origin.sale_line_id))
def _get_action_view_so_ids(self):
return list(set((self.sale_order_id + self.timesheet_ids.so_line.order_id).ids))
|
ddico/odoo
|
addons/sale_timesheet/models/project.py
|
Python
|
agpl-3.0
| 15,166
|
# -*- coding: utf-8 -*-
# Copyright 2017 Joaquin Gutierrez Pedrosa <joaquin@gutierrezweb.es>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Hr Payroll Advance Account',
'version': '11.0.0.1.0',
'category': 'Human Resources',
'sequence': 36,
'summary': 'Payroll advances for employees with Account',
'author': 'Joaquin Gutierrez',
'website': 'http://www.gutierrezweb.es',
'description': """
Management advances payroll for employees with Account
""",
'depends': [
'hr',
'hr_payroll',
'hr_payroll_advance',
],
'data': [
'views/hr_view.xml',
],
'installable': True,
'auto_install': False,
}
|
percevaq/odoo_addons
|
hr_payroll_advance_account/__manifest__.py
|
Python
|
agpl-3.0
| 718
|
from pylm.servers import Master
class MyMaster(Master):
def __init__(self, *args, **kwargs):
self.counter = 0
super(MyMaster, self).__init__(*args, **kwargs)
def scatter(self, message):
for i in range(3):
yield message
def gather(self, message):
self.counter += 1
if self.counter == 30:
yield self.change_payload(message, b'final message')
else:
yield message
server = MyMaster(name='server',
pull_address='tcp://127.0.0.1:5555',
pub_address='tcp://127.0.0.1:5556',
worker_pull_address='tcp://127.0.0.1:5557',
worker_push_address='tcp://127.0.0.1:5558',
db_address='tcp://127.0.0.1:5559')
if __name__ == '__main__':
server.start()
|
nfqsolutions/pylm
|
examples/gather/master.py
|
Python
|
agpl-3.0
| 832
|
from datetime import datetime
from sqlalchemy import Column, DateTime
from inbox.sqlalchemy_ext.util import Base36UID, generate_public_id
class HasPublicID(object):
public_id = Column(Base36UID, nullable=False,
index=True, default=generate_public_id)
class AutoTimestampMixin(object):
# We do all default/update in Python not SQL for these because MySQL
# < 5.6 doesn't support multiple TIMESTAMP cols per table, and can't
# do function defaults or update triggers on DATETIME rows.
created_at = Column(DateTime, default=datetime.utcnow,
nullable=False, index=True)
updated_at = Column(DateTime, default=datetime.utcnow,
onupdate=datetime.utcnow, nullable=False, index=True)
deleted_at = Column(DateTime, nullable=True, index=True)
@property
def is_deleted(self):
return self.deleted_at is not None
def mark_deleted(self):
"""
Safer object deletion: mark as deleted and garbage collect later.
"""
self.deleted_at = datetime.utcnow()
|
abhishekgahlot/inbox
|
inbox/models/mixins.py
|
Python
|
agpl-3.0
| 1,091
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2018 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
import datetime
import factory
from osis_common.utils.datetime import get_tzinfo
def generate_email(user, domain=None):
if domain is None:
domain = factory.Faker('domain_name').generate({})
return '{0.first_name}.{0.last_name}@{1}'.format(user, domain).lower()
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = 'auth.User'
django_get_or_create = ('username',)
username = factory.Sequence(lambda n: 'username_{}'.format(n))
first_name = factory.Faker('first_name')
last_name = factory.Faker('last_name')
email = factory.LazyAttribute(generate_email)
password = factory.PostGenerationMethodCall('set_password', 'password123')
is_active = True
is_staff = False
is_superuser = False
last_login = factory.LazyAttribute(lambda _o: datetime.datetime(2000, 1, 1, tzinfo=get_tzinfo()))
date_joined = factory.LazyAttribute(lambda _o: datetime.datetime(1999, 1, 1, tzinfo=get_tzinfo()))
class SuperUserFactory(UserFactory):
username = factory.Sequence(lambda n: 'superusername_{0}'.format(n))
is_superuser = True
is_staff = True
is_active = True
|
uclouvain/osis_louvain
|
base/tests/factories/user.py
|
Python
|
agpl-3.0
| 2,440
|
# Copyright (C) 2017-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
"""
Django common settings for swh-web.
"""
import os
import sys
from typing import Any, Dict
from swh.web.auth.utils import OIDC_SWH_WEB_CLIENT_ID
from swh.web.config import get_config
swh_web_config = get_config()
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = swh_web_config["secret_key"]
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = swh_web_config["debug"]
DEBUG_PROPAGATE_EXCEPTIONS = swh_web_config["debug"]
ALLOWED_HOSTS = ["127.0.0.1", "localhost"] + swh_web_config["allowed_hosts"]
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"rest_framework",
"swh.web.common",
"swh.web.api",
"swh.web.auth",
"swh.web.browse",
"webpack_loader",
"django_js_reverse",
"corsheaders",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"corsheaders.middleware.CorsMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"swh.auth.django.middlewares.OIDCSessionExpiredMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"swh.web.common.middlewares.ThrottlingHeadersMiddleware",
"swh.web.common.middlewares.ExceptionMiddleware",
]
# Compress all assets (static ones and dynamically generated html)
# served by django in a local development environment context.
# In a production environment, assets compression will be directly
# handled by web servers like apache or nginx.
if swh_web_config["serve_assets"]:
MIDDLEWARE.insert(0, "django.middleware.gzip.GZipMiddleware")
ROOT_URLCONF = "swh.web.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(PROJECT_DIR, "../templates")],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"swh.web.common.utils.context_processor",
],
"libraries": {"swh_templatetags": "swh.web.common.swh_templatetags",},
},
},
]
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": swh_web_config.get("development_db", ""),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", # noqa
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = "/static/"
# static folder location when swh-web has been installed with pip
STATIC_DIR = os.path.join(sys.prefix, "share/swh/web/static")
if not os.path.exists(STATIC_DIR):
# static folder location when developping swh-web
STATIC_DIR = os.path.join(PROJECT_DIR, "../../../static")
STATICFILES_DIRS = [STATIC_DIR]
INTERNAL_IPS = ["127.0.0.1"]
throttle_rates = {}
http_requests = ["GET", "HEAD", "POST", "PUT", "DELETE", "OPTIONS", "PATCH"]
throttling = swh_web_config["throttling"]
for limiter_scope, limiter_conf in throttling["scopes"].items():
if "default" in limiter_conf["limiter_rate"]:
throttle_rates[limiter_scope] = limiter_conf["limiter_rate"]["default"]
# for backward compatibility
else:
throttle_rates[limiter_scope] = limiter_conf["limiter_rate"]
# register sub scopes specific for HTTP request types
for http_request in http_requests:
if http_request in limiter_conf["limiter_rate"]:
throttle_rates[limiter_scope + "_" + http_request.lower()] = limiter_conf[
"limiter_rate"
][http_request]
REST_FRAMEWORK: Dict[str, Any] = {
"DEFAULT_RENDERER_CLASSES": (
"rest_framework.renderers.JSONRenderer",
"swh.web.api.renderers.YAMLRenderer",
"rest_framework.renderers.TemplateHTMLRenderer",
),
"DEFAULT_THROTTLE_CLASSES": (
"swh.web.api.throttling.SwhWebRateThrottle",
"swh.web.api.throttling.SwhWebUserRateThrottle",
),
"DEFAULT_THROTTLE_RATES": throttle_rates,
"DEFAULT_AUTHENTICATION_CLASSES": [
"rest_framework.authentication.SessionAuthentication",
"swh.auth.django.backends.OIDCBearerTokenAuthentication",
],
"EXCEPTION_HANDLER": "swh.web.api.apiresponse.error_response_handler",
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"filters": {
"require_debug_false": {"()": "django.utils.log.RequireDebugFalse",},
"require_debug_true": {"()": "django.utils.log.RequireDebugTrue",},
},
"formatters": {
"request": {
"format": "[%(asctime)s] [%(levelname)s] %(request)s %(status_code)s",
"datefmt": "%d/%b/%Y %H:%M:%S",
},
"simple": {
"format": "[%(asctime)s] [%(levelname)s] %(message)s",
"datefmt": "%d/%b/%Y %H:%M:%S",
},
"verbose": {
"format": (
"[%(asctime)s] [%(levelname)s] %(name)s.%(funcName)s:%(lineno)s "
"- %(message)s"
),
"datefmt": "%d/%b/%Y %H:%M:%S",
},
},
"handlers": {
"console": {
"level": "DEBUG",
"filters": ["require_debug_true"],
"class": "logging.StreamHandler",
"formatter": "simple",
},
"file": {
"level": "WARNING",
"filters": ["require_debug_false"],
"class": "logging.FileHandler",
"filename": os.path.join(swh_web_config["log_dir"], "swh-web.log"),
"formatter": "simple",
},
"file_request": {
"level": "WARNING",
"filters": ["require_debug_false"],
"class": "logging.FileHandler",
"filename": os.path.join(swh_web_config["log_dir"], "swh-web.log"),
"formatter": "request",
},
"console_verbose": {
"level": "DEBUG",
"filters": ["require_debug_true"],
"class": "logging.StreamHandler",
"formatter": "verbose",
},
"file_verbose": {
"level": "WARNING",
"filters": ["require_debug_false"],
"class": "logging.FileHandler",
"filename": os.path.join(swh_web_config["log_dir"], "swh-web.log"),
"formatter": "verbose",
},
"null": {"class": "logging.NullHandler",},
},
"loggers": {
"": {
"handlers": ["console_verbose", "file_verbose"],
"level": "DEBUG" if DEBUG else "WARNING",
},
"django": {
"handlers": ["console"],
"level": "DEBUG" if DEBUG else "WARNING",
"propagate": False,
},
"django.request": {
"handlers": ["file_request"],
"level": "DEBUG" if DEBUG else "WARNING",
"propagate": False,
},
"django.db.backends": {"handlers": ["null"], "propagate": False},
"django.utils.autoreload": {"level": "INFO",},
},
}
WEBPACK_LOADER = {
"DEFAULT": {
"CACHE": False,
"BUNDLE_DIR_NAME": "./",
"STATS_FILE": os.path.join(STATIC_DIR, "webpack-stats.json"),
"POLL_INTERVAL": 0.1,
"TIMEOUT": None,
"IGNORE": [".+\\.hot-update.js", ".+\\.map"],
}
}
LOGIN_URL = "/admin/login/"
LOGIN_REDIRECT_URL = "admin"
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
CACHES = {
"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"},
}
JS_REVERSE_JS_MINIFY = False
CORS_ORIGIN_ALLOW_ALL = True
CORS_URLS_REGEX = r"^/(badge|api)/.*$"
AUTHENTICATION_BACKENDS = [
"django.contrib.auth.backends.ModelBackend",
"swh.auth.django.backends.OIDCAuthorizationCodePKCEBackend",
]
SWH_AUTH_SERVER_URL = swh_web_config["keycloak"]["server_url"]
SWH_AUTH_REALM_NAME = swh_web_config["keycloak"]["realm_name"]
SWH_AUTH_CLIENT_ID = OIDC_SWH_WEB_CLIENT_ID
SWH_AUTH_SESSION_EXPIRED_REDIRECT_VIEW = "logout"
|
SoftwareHeritage/swh-web-ui
|
swh/web/settings/common.py
|
Python
|
agpl-3.0
| 9,546
|
# from lino_xl.lib.contacts.fixtures.demo import site_company_objects as objects
from lino_xl.lib.contacts.fixtures.demo import objects
|
lsaffre/noi
|
lino_noi/lib/contacts/fixtures/demo.py
|
Python
|
agpl-3.0
| 136
|
"""
Custom settings for the text rendering app.
"""
from django.conf import settings
# Emoticons images directory path with trailing slash
EMOTICONS_IMG_DIR = getattr(settings, 'EMOTICONS_IMG_DIR', 'images/smileys/')
# Base URL for relative-to-absolute conversion
RELATIVE_URL_BASE = getattr(settings, 'RELATIVE_URL_BASE', 'https://www.carnetdumaker.net/')
|
TamiaLab/carnetdumaker
|
apps/txtrender/settings.py
|
Python
|
agpl-3.0
| 361
|
# This file was translated from the original Java test from the Apache
# Cassandra source repository, as of commit 6ca34f81386dc8f6020cdf2ea4246bca2a0896c5
#
# The original Apache Cassandra license:
#
# SPDX-License-Identifier: Apache-2.0
from cassandra_tests.porting import *
import time
from cassandra.query import UNSET_VALUE
def testNegativeTimestamps(cql, test_keyspace):
with create_table(cql, test_keyspace, "(k int PRIMARY KEY, v int)") as table:
execute(cql, table, "INSERT INTO %s (k, v) VALUES (?, ?) USING TIMESTAMP ?", 1, 1, -42)
assert_rows(execute(cql, table, "SELECT writetime(v) FROM %s WHERE k = ?", 1), [-42])
assert_invalid(cql, table, "INSERT INTO %s (k, v) VALUES (?, ?) USING TIMESTAMP ?", 2, 2, -2**63)
# Test timestmp and ttl
# migrated from cql_tests.py:TestCQL.timestamp_and_ttl_test()
def testTimestampTTL(cql, test_keyspace):
with create_table(cql, test_keyspace, "(k int PRIMARY KEY, c text, d text)") as table:
execute(cql, table, "INSERT INTO %s (k, c) VALUES (1, 'test')")
execute(cql, table, "INSERT INTO %s (k, c) VALUES (2, 'test') USING TTL 400")
res = list(execute(cql, table, "SELECT k, c, writetime(c), ttl(c) FROM %s"))
assert len(res) == 2
for r in res:
if r[0] == 1:
assert r[3] == None
else:
assert r[2] != None and r[3] != None
# wrap writetime(), ttl() in other functions (test for CASSANDRA-8451)
res = list(execute(cql, table, "SELECT k, c, blobAsBigint(bigintAsBlob(writetime(c))), ttl(c) FROM %s"))
assert len(res) == 2
for r in res:
assert r[2] != None
if r[0] == 1:
assert r[3] == None
else:
assert r[3] != None
res = list(execute(cql, table, "SELECT k, c, writetime(c), blobAsInt(intAsBlob(ttl(c))) FROM %s"))
assert len(res) == 2
for r in res:
assert r[2] != None
if r[0] == 1:
assert r[3] == None
else:
assert r[3] != None
assert_invalid(cql, table, "SELECT k, c, writetime(k) FROM %s")
assert_rows(execute(cql, table, "SELECT k, d, writetime(d) FROM %s WHERE k = 1"),
[1, None, None])
# Migrated from cql_tests.py:TestCQL.invalid_custom_timestamp_test()
def testInvalidCustomTimestamp(cql, test_keyspace):
# Conditional updates
with create_table(cql, test_keyspace, "(k int, v int, PRIMARY KEY (k, v))") as table:
execute(cql, table, "BEGIN BATCH " +
"INSERT INTO %s (k, v) VALUES(0, 0) IF NOT EXISTS; " +
"INSERT INTO %s (k, v) VALUES(0, 1) IF NOT EXISTS; " +
"APPLY BATCH")
assert_invalid(cql, table, "BEGIN BATCH " +
"INSERT INTO %s (k, v) VALUES(0, 2) IF NOT EXISTS USING TIMESTAMP 1; " +
"INSERT INTO %s (k, v) VALUES(0, 3) IF NOT EXISTS; " +
"APPLY BATCH")
assert_invalid(cql, table, "BEGIN BATCH " +
"USING TIMESTAMP 1 INSERT INTO %s (k, v) VALUES(0, 4) IF NOT EXISTS; " +
"INSERT INTO %s (k, v) VALUES(0, 1) IF NOT EXISTS; " +
"APPLY BATCH")
execute(cql, table, "INSERT INTO %s (k, v) VALUES(1, 0) IF NOT EXISTS")
assert_invalid(cql, table, "INSERT INTO %s (k, v) VALUES(1, 1) IF NOT EXISTS USING TIMESTAMP 5")
# Counters
with create_table(cql, test_keyspace, "(k int PRIMARY KEY, c counter)") as table:
execute(cql, table, "UPDATE %s SET c = c + 1 WHERE k = 0")
assert_invalid(cql, table, "UPDATE %s USING TIMESTAMP 10 SET c = c + 1 WHERE k = 0")
execute(cql, table, "BEGIN COUNTER BATCH " +
"UPDATE %s SET c = c + 1 WHERE k = 0; " +
"UPDATE %s SET c = c + 1 WHERE k = 0; " +
"APPLY BATCH")
assert_invalid(cql, table, "BEGIN COUNTER BATCH " +
"UPDATE %s USING TIMESTAMP 3 SET c = c + 1 WHERE k = 0; " +
"UPDATE %s SET c = c + 1 WHERE k = 0; " +
"APPLY BATCH")
assert_invalid(cql, table, "BEGIN COUNTER BATCH " +
"USING TIMESTAMP 3 UPDATE %s SET c = c + 1 WHERE k = 0; " +
"UPDATE %s SET c = c + 1 WHERE k = 0; " +
"APPLY BATCH")
def testInsertTimestampWithUnset(cql, test_keyspace):
with create_table(cql, test_keyspace, "(k int PRIMARY KEY, i int)") as table:
execute(cql, table, "INSERT INTO %s (k, i) VALUES (1, 1) USING TIMESTAMP ?", UNSET_VALUE) # treat as 'now'
def testTimestampsOnUnsetColumns(cql, test_keyspace):
with create_table(cql, test_keyspace, "(k int PRIMARY KEY, i int)") as table:
execute(cql, table, "INSERT INTO %s (k, i) VALUES (1, 1) USING TIMESTAMP 1;")
execute(cql, table, "INSERT INTO %s (k) VALUES (2) USING TIMESTAMP 2;")
execute(cql, table, "INSERT INTO %s (k, i) VALUES (3, 3) USING TIMESTAMP 1;")
assert_rows_ignoring_order(execute(cql, table, "SELECT k, i, writetime(i) FROM %s "),
[1, 1, 1],
[2, None, None],
[3, 3, 1])
def testTimestampsOnUnsetColumnsWide(cql, test_keyspace):
with create_table(cql, test_keyspace, "(k int, c int, i int, PRIMARY KEY (k, c))") as table:
execute(cql, table, "INSERT INTO %s (k, c, i) VALUES (1, 1, 1) USING TIMESTAMP 1;")
execute(cql, table, "INSERT INTO %s (k, c) VALUES (1, 2) USING TIMESTAMP 1;")
execute(cql, table, "INSERT INTO %s (k, c, i) VALUES (1, 3, 1) USING TIMESTAMP 1;")
execute(cql, table, "INSERT INTO %s (k, c) VALUES (2, 2) USING TIMESTAMP 2;")
execute(cql, table, "INSERT INTO %s (k, c, i) VALUES (3, 3, 3) USING TIMESTAMP 1;")
assert_rows_ignoring_order(execute(cql, table, "SELECT k, c, i, writetime(i) FROM %s "),
[1, 1, 1, 1],
[1, 2, None, None],
[1, 3, 1, 1],
[2, 2, None, None],
[3, 3, 3, 1])
@pytest.mark.skip(reason="a very slow test (6 seconds), skipping it")
def testTimestampAndTTLPrepared(cql, test_keyspace):
with create_table(cql, test_keyspace, "(k int, c int, i int, PRIMARY KEY (k, c))") as table:
execute(cql, table, "INSERT INTO %s (k, c, i) VALUES (1, 1, 1) USING TIMESTAMP ? AND TTL ?;", 1, 5)
execute(cql, table, "INSERT INTO %s (k, c) VALUES (1, 2) USING TIMESTAMP ? AND TTL ? ;", 1, 5)
execute(cql, table, "INSERT INTO %s (k, c, i) VALUES (1, 3, 1) USING TIMESTAMP ? AND TTL ?;", 1, 5)
execute(cql, table, "INSERT INTO %s (k, c) VALUES (2, 2) USING TIMESTAMP ? AND TTL ?;", 2, 5)
execute(cql, table, "INSERT INTO %s (k, c, i) VALUES (3, 3, 3) USING TIMESTAMP ? AND TTL ?;", 1, 5)
assert_rows_ignoring_order(execute(cql, table, "SELECT k, c, i, writetime(i) FROM %s "),
[1, 1, 1, 1],
[1, 2, None, None],
[1, 3, 1, 1],
[2, 2, None, None],
[3, 3, 3, 1])
time.sleep(6)
assert_empty(execute(cql, table, "SELECT k, c, i, writetime(i) FROM %s "))
@pytest.mark.skip(reason="a very slow test (6 seconds), skipping it")
def testTimestampAndTTLUpdatePrepared(cql, test_keyspace):
with create_table(cql, test_keyspace, "(k int, c int, i int, PRIMARY KEY (k, c))") as table:
execute(cql, table, "UPDATE %s USING TIMESTAMP ? AND TTL ? SET i=1 WHERE k=1 AND c = 1 ;", 1, 5)
execute(cql, table, "UPDATE %s USING TIMESTAMP ? AND TTL ? SET i=1 WHERE k=1 AND c = 3 ;", 1, 5)
execute(cql, table, "UPDATE %s USING TIMESTAMP ? AND TTL ? SET i=1 WHERE k=2 AND c = 2 ;", 2, 5)
execute(cql, table, "UPDATE %s USING TIMESTAMP ? AND TTL ? SET i=3 WHERE k=3 AND c = 3 ;", 1, 5)
assert_rows_ignoring_order(execute(cql, table, "SELECT k, c, i, writetime(i) FROM %s "),
[1, 1, 1, 1],
[1, 3, 1, 1],
[2, 2, 1, 2],
[3, 3, 3, 1])
time.sleep(6)
assert_empty(execute(cql, table, "SELECT k, c, i, writetime(i) FROM %s "))
|
scylladb/scylla
|
test/cql-pytest/cassandra_tests/validation/entities/timestamp_test.py
|
Python
|
agpl-3.0
| 8,212
|
#!/usr/bin/env python
# rebuild.py
#
# Copyright (C) 2008-2018 Veselin Penev, https://bitdust.io
#
# This file (rebuild.py) is part of BitDust Software.
#
# BitDust is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BitDust Software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with BitDust Software. If not, see <http://www.gnu.org/licenses/>.
#
# Please contact us if you have any questions at bitdust.io@gmail.com
from __future__ import absolute_import
import os
import lib.packetid
import main.settings
import raid.read
from six.moves import range
def rebuild(backupID, blockNum, eccMap, availableSuppliers, remoteMatrix, localMatrix):
# try:
supplierCount = len(availableSuppliers)
missingData = [0] * supplierCount
missingParity = [0] * supplierCount
reconstructedData = [0] * supplierCount
reconstructedParity = [0] * supplierCount
remoteData = list(remoteMatrix['D'])
remoteParity = list(remoteMatrix['P'])
localData = list(localMatrix['D'])
localParity = list(localMatrix['P'])
customer, localPath = lib.packetid.SplitPacketID(backupID)
def _build_raid_file_name(supplierNumber, dataOrParity):
return os.path.join(
main.settings.getLocalBackupsDir(),
customer,
localPath,
str(blockNum) + '-' + str(supplierNumber) + '-' + dataOrParity)
# This builds a list of missing pieces.
# The file is missing if value in the corresponding cell
# in the "remote" matrix (see ``p2p.backup_matrix``) is -1 or 0
# but the supplier who must keep that file is online.
# In other words, if supplier is online but do not have that piece - this piece is missing.
for supplierNum in range(supplierCount):
if availableSuppliers[supplierNum] == 0:
continue
# if remote Data file not exist and supplier is online
# we mark it as missing and will try to rebuild this file and send to him
if remoteData[supplierNum] != 1:
# mark file as missing
missingData[supplierNum] = 1
# same for Parity file
if remoteParity[supplierNum] != 1:
missingParity[supplierNum] = 1
# This made an attempt to rebuild the missing pieces
# from pieces we have on hands.
# lg.out(14, 'block_rebuilder.AttemptRebuild %s %d BEGIN' % (self.backupID, self.blockNum))
newData = False
madeProgress = True
while madeProgress:
madeProgress = False
# will check all data packets we have
for supplierNum in range(supplierCount):
dataFileName = _build_raid_file_name(supplierNum, 'Data')
# if we do not have this item on hands - we will reconstruct it from other items
if localData[supplierNum] == 0:
parityNum, parityMap = eccMap.GetDataFixPath(localData, localParity, supplierNum)
if parityNum != -1:
rebuildFileList = []
rebuildFileList.append(_build_raid_file_name(parityNum, 'Parity'))
for supplierParity in parityMap:
if supplierParity != supplierNum:
filename = _build_raid_file_name(supplierParity, 'Data')
if os.path.isfile(filename):
rebuildFileList.append(filename)
# lg.out(10, ' rebuilding file %s from %d files' % (os.path.basename(dataFileName), len(rebuildFileList)))
raid.read.RebuildOne(rebuildFileList, len(rebuildFileList), dataFileName)
if os.path.exists(dataFileName):
localData[supplierNum] = 1
madeProgress = True
# lg.out(10, ' Data file %s found after rebuilding for supplier %d' % (os.path.basename(dataFileName), supplierNum))
# now we check again if we have the data on hand after rebuild at it is missing - send it
# but also check to not duplicate sending to this man
# now sending is separated, see the file data_sender.py
if localData[supplierNum] == 1 and missingData[supplierNum] == 1: # and self.dataSent[supplierNum] == 0:
# lg.out(10, ' rebuilt a new Data for supplier %d' % supplierNum)
newData = True
reconstructedData[supplierNum] = 1
# self.outstandingFilesList.append((dataFileName, self.BuildFileName(supplierNum, 'Data'), supplierNum))
# self.dataSent[supplierNum] = 1
# now with parities ...
for supplierNum in range(supplierCount):
parityFileName = _build_raid_file_name(supplierNum, 'Parity')
if localParity[supplierNum] == 0:
parityMap = eccMap.ParityToData[supplierNum]
HaveAllData = True
for segment in parityMap:
if localData[segment] == 0:
HaveAllData = False
break
if HaveAllData:
rebuildFileList = []
for supplierParity in parityMap:
filename = _build_raid_file_name(supplierParity, 'Data') # ??? why not 'Parity'
if os.path.isfile(filename):
rebuildFileList.append(filename)
# lg.out(10, ' rebuilding file %s from %d files' % (os.path.basename(parityFileName), len(rebuildFileList)))
raid.read.RebuildOne(rebuildFileList, len(rebuildFileList), parityFileName)
if os.path.exists(parityFileName):
# lg.out(10, ' Parity file %s found after rebuilding for supplier %d' % (os.path.basename(parityFileName), supplierNum))
localParity[supplierNum] = 1
# so we have the parity on hand and it is missing - send it
if localParity[supplierNum] == 1 and missingParity[supplierNum] == 1: # and self.paritySent[supplierNum] == 0:
# lg.out(10, ' rebuilt a new Parity for supplier %d' % supplierNum)
newData = True
reconstructedParity[supplierNum] = 1
# self.outstandingFilesList.append((parityFileName, self.BuildFileName(supplierNum, 'Parity'), supplierNum))
# self.paritySent[supplierNum] = 1
# lg.out(14, 'block_rebuilder.AttemptRebuild END')
return (newData, localData, localParity, reconstructedData, reconstructedParity)
# except:
# return None
|
vesellov/bitdust.devel
|
raid/rebuild.py
|
Python
|
agpl-3.0
| 6,927
|
# -*- coding: utf-8 -*-
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# snippy - software development and maintenance notes manager.
# Copyright 2017-2020 Heikki J. Laaksonen <laaksonen.heikki.j@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""reference: Default references for testing."""
from tests.lib.helper import Helper
class Reference(object): # pylint: disable=too-few-public-methods
"""Default references for testing."""
_GITLOG = 0
_REGEXP = 1
_PYTEST = 2
# Default time is same for the default content. See 'Test case layouts and
# data structures' for more information.
DEFAULT_TIME = '2018-06-22T13:11:13.678729+00:00'
# Default content must be always set so that it reflects content stored
# into database. For example the tags must be sorted in correct order.
# This forces defining erroneous content in each test case. This improves
# the readability and maintainability of failure testing.
_DEFAULTS = ({
'category': 'reference',
'data': (),
'brief': 'How to write commit messages',
'description': '',
'name': '',
'groups': ('git',),
'tags': ('commit', 'git', 'howto'),
'links': ('https://chris.beams.io/posts/git-commit/', ),
'source': '',
'versions': (),
'languages': (),
'filename': '',
'created': DEFAULT_TIME,
'updated': DEFAULT_TIME,
'uuid': '31cd5827-b6ef-4067-b5ac-3ceac07dde9f',
'digest': '5c2071094dbfaa33787064a6669e1fdfe49a86d07e58f12fffa0780eecdb227f'
}, {
'category': 'reference',
'data': (),
'brief': 'Python regular expression',
'description': '',
'name': '',
'groups': ('python',),
'tags': ('howto', 'online', 'python', 'regexp'),
'links': ('https://www.cheatography.com/davechild/cheat-sheets/regular-expressions/',
'https://pythex.org/'),
'source': '',
'versions': (),
'languages': (),
'filename': '',
'created': DEFAULT_TIME,
'updated': DEFAULT_TIME,
'uuid': '32cd5827-b6ef-4067-b5ac-3ceac07dde9f',
'digest': 'cb9225a81eab8ced090649f795001509b85161246b46de7d12ab207698373832'
}, {
'category': 'reference',
'data': (),
'brief': 'Python pytest framework',
'description': '',
'name': '',
'groups': ('python',),
'tags': ('docs', 'pytest', 'python'),
'links': ('https://docs.pytest.org/en/latest/skipping.html', ),
'source': '',
'versions': (),
'languages': (),
'filename': '',
'created': '2016-04-21T12:10:11.678729+00:00',
'updated': '2016-04-21T12:10:11.678729+00:00',
'uuid': '33cd5827-b6ef-4067-b5ac-3ceac07dde9f',
'digest': '1f9d9496005736efe321d44a28c05ca9ed0e53f7170743df361ddcd7b884455e'
})
GITLOG_CREATED = _DEFAULTS[_GITLOG]['created']
GITLOG_UPDATED = _DEFAULTS[_GITLOG]['updated']
REGEXP_CREATED = _DEFAULTS[_REGEXP]['created']
REGEXP_UPDATED = _DEFAULTS[_REGEXP]['updated']
PYTEST_CREATED = _DEFAULTS[_PYTEST]['created']
PYTEST_UPDATED = _DEFAULTS[_PYTEST]['updated']
if not DEFAULT_TIME == GITLOG_CREATED == GITLOG_UPDATED == REGEXP_CREATED == REGEXP_UPDATED:
raise Exception('default content timestamps must be same - see \'Test case layouts and data structures\'')
GITLOG_DIGEST = _DEFAULTS[_GITLOG]['digest']
REGEXP_DIGEST = _DEFAULTS[_REGEXP]['digest']
PYTEST_DIGEST = _DEFAULTS[_PYTEST]['digest']
GITLOG_UUID = _DEFAULTS[_GITLOG]['uuid']
REGEXP_UUID = _DEFAULTS[_REGEXP]['uuid']
PYTEST_UUID = _DEFAULTS[_PYTEST]['uuid']
GITLOG = _DEFAULTS[_GITLOG]
REGEXP = _DEFAULTS[_REGEXP]
PYTEST = _DEFAULTS[_PYTEST]
DEFAULT_REFERENCES = (GITLOG, REGEXP)
TEMPLATE = Helper.read_template('reference.txt').split('\n')
TEMPLATE_DIGEST_EMPTY = 'bb4c2540fab3a12b051b77b6902f426812ec95f8a1fa9e07ca1b7dc3cca0cc0d'
TEMPLATE_TEXT = (
'# Commented lines will be ignored.',
'#',
'# Add mandatory links below one link per line.',
'',
'',
'# Add optional brief description below.',
'Add brief title for content',
'',
'# Add optional description below.',
'Add a description that defines the content in one chapter.',
'',
'# Add optional name below.',
'example content handle',
'',
'# Add optional comma separated list of groups below.',
'groups',
'',
'# Add optional comma separated list of tags below.',
'example,tags',
'',
'# Add optional source reference below.',
'https://www.example.com/source.md',
'',
'# Add optional comma separated list of key-value versions below.',
'example=3.9.0,python>=3',
'',
'# Add optional comma separated list of languages below.',
'example-language',
'',
'# Add optional filename below.',
'example-content.md',
'',
'# Meta',
'category : reference',
'created : 2018-02-02T02:02:02.000001+00:00',
'digest : f5693f66ec42e8f42687e8639df806e79aead1dad1a6c1b5448b6220995518bd',
'updated : 2018-02-02T02:02:02.000001+00:00',
'uuid : a1cd5827-b6ef-4067-b5ac-3ceac07dde9f',
''
)
TEMPLATE_MKDN = (
'# Add brief title for content @groups',
'',
'> Add a description that defines the content in one chapter.',
'',
'> [1] https://www.example.com/add-links-here.html',
'',
'## Meta',
'',
'> category : reference ',
'created : 2018-02-02T02:02:02.000001+00:00 ',
'digest : bb4c2540fab3a12b051b77b6902f426812ec95f8a1fa9e07ca1b7dc3cca0cc0d ',
'filename : example-content.md ',
'languages : example-language ',
'name : example content handle ',
'source : https://www.example.com/source.md ',
'tags : example,tags ',
'updated : 2018-02-02T02:02:02.000001+00:00 ',
'uuid : a1cd5827-b6ef-4067-b5ac-3ceac07dde9f ',
'versions : example=3.9.0,python>=3 ',
''
)
|
heilaaks/snippy
|
tests/lib/reference.py
|
Python
|
agpl-3.0
| 6,929
|
import logging
from base64 import b64decode
import pem
from OpenSSL.crypto import FILETYPE_PEM, load_certificate, verify, X509, \
X509Store, X509StoreContext, load_crl, X509StoreFlags
from django.conf import settings # type: ignore
from rest_framework.exceptions import ValidationError
logger = logging.getLogger(__name__)
class CertificateConfiguration:
def __init__(self) -> None:
self.digest = settings.CERTIFICATE_DIGEST
class InvalidSignatureException(ValidationError):
pass
class InvalidCertificateException(ValidationError):
pass
class CertificateAppIdMismatchException(ValidationError):
pass
class CertificateValidator:
"""
See https://pyopenssl.readthedocs.io/en/stable/api/crypto.html#signing
-and-verifying-signatures
"""
def __init__(self, config: CertificateConfiguration) -> None:
self.config = config
def validate_signature(self, certificate: str, signature: str,
data: bytes) -> None:
"""
Tests if a value is a valid certificate using SHA512
:param certificate: the certificate to use as string
:param signature: the signature base64 encoded string to test
:param data: the binary file content that was signed
:raises: InvalidSignatureException if the signature is invalid
:return: None
"""
cert = self._to_cert(certificate)
err_msg = 'Signature is invalid'
try:
result = verify(cert, b64decode(signature.encode()), data,
self.config.digest)
if result is not None:
raise InvalidSignatureException(err_msg)
except Exception as e:
raise InvalidSignatureException('%s: %s' % (err_msg, str(e)))
def validate_certificate(self, certificate: str, chain: str,
crl: str = None) -> None:
"""
Tests if a certificate has been signed by the chain, is not revoked
and has not yet been expired.
:param certificate: the certificate to test as string
:param chain: the certificate chain file content as string
:param crl: the certificate revocation list file content as string
:raises: InvalidCertificateException if the certificate is invalid
:return: None
"""
# root and intermediary certificate need to be split
cas = pem.parse(chain.encode())
store = X509Store()
for ca in cas:
store.add_cert(self._to_cert(str(ca)))
cert = self._to_cert(certificate)
if crl:
parsed_crl = load_crl(FILETYPE_PEM, crl)
store.set_flags(X509StoreFlags.CRL_CHECK)
store.add_crl(parsed_crl)
ctx = X509StoreContext(store, cert)
err_msg = 'Certificate is invalid'
try:
result = ctx.verify_certificate()
if result is not None:
raise InvalidCertificateException(err_msg)
except Exception as e:
raise InvalidCertificateException('%s: %s' % (err_msg, str(e)))
def get_cn(self, certificate: str) -> str:
"""
Extracts the CN from a certificate and removes the leading
slash, e.g. /news should return news
:param certificate: certificate
:return: the certificate's subject without the leading slash
"""
cert = self._to_cert(certificate)
return cert.get_subject().CN
def validate_app_id(self, certificate: str, app_id: str) -> None:
"""
Validates if the CN matches the app id
:param certificate: app certificate
:param app_id: the app id
:raises CertificateAppIdMismatchException: if the app id and cert CN do
not match
:return: None
"""
cn = self.get_cn(certificate)
if cn != app_id:
msg = 'App id %s does not match cert CN %s' % (app_id, cn)
raise CertificateAppIdMismatchException(msg)
def _to_cert(self, certificate: str) -> X509:
try:
return load_certificate(FILETYPE_PEM, certificate.encode())
except Exception as e:
msg = '%s: %s' % ('Invalid certificate', str(e))
raise InvalidCertificateException(msg)
|
clone1612/appstore
|
nextcloudappstore/core/certificate/validator.py
|
Python
|
agpl-3.0
| 4,284
|
import pytest
from selenium.common.exceptions import NoSuchElementException
from ..base_test import SeleniumTestCase
@pytest.mark.batch3
class TestLocationUpdating(SeleniumTestCase):
@pytest.fixture
def records_org_prj(self, basic_org, records_prj):
self.org = basic_org
self.prj = records_prj
self.prj_dashboard_path = '/organizations/{}/projects/{}/'.format(
basic_org['slug'], records_prj['slug'])
# ------ Utility functions ------
def click_save_button(self):
button = self.wd.BY_XPATH(
'//*[@type="submit" and normalize-space()="Save"]')
self.scroll_element_into_view(button)
button.click()
# ------ Test cases ------
def test_user_can_update_tenure_relationship_type(
self, records_org_prj, basic_water_rights,
data_collector
):
"""Verifies Records test case #RU1."""
self.log_in(data_collector)
self.open(self.prj_dashboard_path + 'relationships/{}/'.format(
basic_water_rights['pk']))
self.wd.BY_CSS('[title="Edit relationship"]').click()
self.update_form_field('tenure_type', 'UC')
self.click_save_button()
self.wd.BY_XPATH(
'//tr['
' .//td[contains(.,"Type")] and '
' .//td[contains(.,"Undivided Co-ownership")]'
']')
self.wd.BY_XPATH(
'//tr['
' .//td[contains(.,"Party")] and '
' .//td//a[contains(.,"{}")]'
']'.format(basic_water_rights['party']['name']))
# [REVERSION]
self.wd.BY_CSS('[title="Edit relationship"]').click()
self.update_form_field('tenure_type', 'WR')
self.click_save_button()
def test_unauthorized_user_cannot_update_tenure_relationship(
self, records_org_prj, basic_water_rights, prj_user
):
"""Verifies Records test case #RU3."""
self.log_in(prj_user)
self.open(self.prj_dashboard_path + 'relationships/{}/'.format(
basic_water_rights['pk']))
try:
self.wd.BY_CSS('[title="Edit relationship"]')
raise AssertionError('Edit relationship button is present')
except NoSuchElementException:
pass
self.open(self.get_url_path() + 'edit')
self.wait_for_alert(
"You don't have permission to update this tenure relationship.")
|
Cadasta/cadasta-test
|
cadasta/test/record_tests/test_tenure_relationship_updating.py
|
Python
|
agpl-3.0
| 2,424
|
import ddt
from mock import patch
from xblock.core import XBlock
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from ..xblock_service import OptionalService
class PureBlock(XBlock):
pass
class BadBlock(object):
def __init__(self, location):
self.location = location
@ddt.ddt
class OptionalServiceTest(ModuleStoreTestCase):
def setUp(self):
super(OptionalServiceTest, self).setUp()
self.course = CourseFactory.create()
chapter = ItemFactory.create(parent=self.course, category='chapter')
section = ItemFactory.create(parent=chapter, category='sequential')
self.vertical = ItemFactory.create(parent=section, category='vertical')
@XBlock.register_temp_plugin(PureBlock, 'pure_block')
@ddt.data(True, False)
@patch('openedx.core.djangoapps.ga_optional.api.is_available')
def test_is_available(self, enabled, mock_is_available):
service = OptionalService()
mock_is_available.return_value = enabled
xblock = ItemFactory.create(parent=self.vertical, category='pure_block')
if enabled:
self.assertTrue(service.is_available(xblock, 'test-key'))
else:
self.assertFalse(service.is_available(xblock, 'test-key'))
mock_is_available.assert_called_with('test-key', course_key=self.course.id)
@patch('openedx.core.djangoapps.ga_optional.api.is_available')
def test_is_available_invalid_xblock(self, mock_is_available):
service = OptionalService()
mock_is_available.return_value = True
# no location
xblock = object()
self.assertFalse(service.is_available(xblock, 'test-key'))
# invalid location
xblock = BadBlock('test-location')
self.assertFalse(service.is_available(xblock, 'test-key'))
mock_is_available.assert_not_called()
|
nttks/edx-platform
|
openedx/core/djangoapps/ga_optional/tests/test_xblock_service.py
|
Python
|
agpl-3.0
| 1,959
|
#!/usr/bin/python
import sys, math
from borgy.graph.bulge_graph import BulgeGraph
def print_neato(bg):
stems = bg.get_bulged_stem_names()
# The different nodes for different types of bulges
node_defs = dict()
node_lines = dict()
# loops
node_defs[1] = '\t{node [style=filled,shape=circle,fillcolor=red,fontsize=12'
node_defs[2] = '\t{node [style=filled,shape=circle,fillcolor=yellow,fontsize=12'
node_defs[3] = '\t{node [style=filled,shape=hexagon,fillcolor=red,fontsize=12'
node_defs[4] = '\t{node [style=filled,shape=octagon,fillcolor=red,fontsize=12'
node_defs[5] = '\t{node [style=filled,shape=octagon,fillcolor=red,fontsize=12'
node_defs[6] = '\t{node [style=filled,shape=octagon,fillcolor=red,fontsize=12'
node_defs[7] = '\t{node [style=filled,shape=octagon,fillcolor=red,fontsize=12'
node_defs[8] = '\t{node [style=filled,shape=octagon,fillcolor=red,fontsize=12'
node_defs[9] = '\t{node [style=filled,shape=octagon,fillcolor=red,fontsize=12'
node_lines = ''
connection_lines = ''
fancy = True
print "graph G {"
print "\tgraph [overlap=scale];"
print "\tnode [shape=box];"
if fancy:
for key2 in bg.defines.keys():
if key2[0] == 's':
node_lines += '\t{node [style=filled,fillcolor=green,label=\"%s\\n(%d)\"] %s};\n' % (key2, bg.stem_length(key2), key2)
continue
if len(bg.edges[key2]) == 2:
if bg.weights[key2] == 2:
node_lines += '\t{node [style=filled,shape=circle,fillcolor=yellow,fontsize=12'
if bg.weights[key2] == 1:
node_lines += '\t{node [style=filled,shape=circle,fillcolor=red,fontsize=12'
else:
node_lines += '\t{node [style=filled,shape=circle,fillcolor=blue,fontsize=12'
#node_lines += node_defs[bg.weights[key2]]
node_lines += ',label=\"%s \\n(' % (key2)
total_bulge = 0
for j in range(0, len(bg.defines[key2]), 2):
if j != 0:
node_lines += ','
total_bulge += abs((int(bg.defines[key2][j+1]) - int(bg.defines[key2][j]) + 1))
node_lines += "%d" % (int(bg.defines[key2][j+1]) - int(bg.defines[key2][j]) + 1)
j = j / 2
while j < bg.weights[key2]-1:
node_lines += ",0"
j += 1
width = math.sqrt(1.5 * total_bulge / 10.0)
height = width
if bg.weights[key2] == 2:
node_lines += ")\",width=%f,heigh=%f] %s};\n" % (width, height, key2)
else:
node_lines += ")\"] %s};\n" % (key2)
else:
for key2 in bg.defines.keys():
if key2[0] == 's':
node_lines += '\t{node [style=filled,fillcolor=green,label=\"%s\"] %s};\n' % (key2, key2)
else:
node_lines += node_defs[bg.weights[key2]]
node_lines += ',label=\"%s\"] %s};\n' % (key2, key2)
for key1 in bg.edges:
if key1[0] == 's':
for key2 in bg.edges[key1]:
connection_lines += "\t%s -- %s;\n" % (key1, key2)
for key1 in bg.longrange.keys():
for key2 in bg.longrange[key1]:
connection_lines += "\t%s -- %s [style=dashed]" % (key1, key2)
print node_lines
print connection_lines
print "}"
#print bg.get_named_define
def main():
if len(sys.argv) < 2:
print "Usage: ./graph_to_angles.py struct.graph"
print
print "Traverse a structure and output the stems that are connected by a bulge"
sys.exit(1)
bg = BulgeGraph(sys.argv[1])
print_neato(bg)
if __name__=="__main__":
main()
|
pkerpedjiev/ernwin
|
fess/scripts/graph_to_neato.py
|
Python
|
agpl-3.0
| 3,764
|
# coding=utf-8
from django.core.mail import send_mail
from django.db.models.signals import pre_delete
from django.dispatch import receiver
from . import models
@receiver(pre_delete, sender=models.Need)
def send_email_notifications(sender, instance, **kwargs):
"""
HACK ALERT
This needed to be done quickly. Please use a proper email template,
add some error handling, some sane max recipient handling, tests, etc.
"""
subject = u'Schicht gelöscht'
message = u'''
Hallo ihr,
leider mussten wir die folgende Schicht löschen:
{need}
Dies hier ist eine automatisch generierte Email. Im Helpdesk steht mit ein
bisschen Glück eine Erklärung, warum die Schicht entfernt wurde.
Liebe Grüße vom Volunteer Planner.
'''.format(need=instance)
from_email = "Volunteer-Planner.org <no-reply@volunteer-planner.org>"
addresses = instance.get_volunteers().values_list('user__email', flat=True)
send_mail(subject, message, from_email, addresses, fail_silently=True)
|
mei-li/volunteer_planner
|
scheduler/signals.py
|
Python
|
agpl-3.0
| 1,033
|
import numpy as np
from scipy.stats import norm, expon
from tikon.datos.datos import máximo, f_numpy
from tikon.ecs.aprioris import APrioriDist
from tikon.ecs.árb_mód import Parám
from ._plntll_ec import EcuaciónEdad
class PrTDevMínBNLT(Parám):
nombre = 't_dev_mín'
líms = (None, None)
unids = 'C'
apriori = APrioriDist(norm(20, 10))
class PrDeltaTLetalBNLT(Parám):
nombre = 'delta_t_letal'
líms = (0, None)
unids = 'C'
apriori = APrioriDist(expon(scale=10))
class PrMBNLT(Parám):
nombre = 'm'
líms = (0, None)
unids = None
apriori = APrioriDist(expon(scale=10))
class FuncBrièreNoLinearTemperatura(EcuaciónEdad):
"""
Edad calculada con la taza de desarrollo de la ecuación de temperatura no linear de Brière.
.. math::
f(T) = T(T-T_b)(T_l-T)^(1/m)
En esta ecuación, tal como en las otras con taza de desarrollo, quitamos el parámetro típicamente multiplicado por
toda la ecuación, porque eso duplicaría el propósito del parámetro de ubicación de las distribuciones
de probabilidad empleadas después.
References
----------
.. [1] Youngsoo Son et al. 2012. Estimation of developmental parameters for adult emergence of Gonatocerus
morgani, a novel egg parasitoid of the glassy-winged sharpshooter, and development of a degree-day
model. Biological Control 60(3): 233-260.
.. [2] J.-F. Briere, P. Pracros, A.-Y. Le Broux, J.-S. Pierre. A novel rate model of temperature-dependent
development for arthropods. Environmental Entomology, 28 (1999), pp. 22-29.
"""
nombre = 'Brière No Linear Temperatura'
cls_ramas = [PrTDevMínBNLT, PrDeltaTLetalBNLT, PrMBNLT]
def eval(símismo, paso, sim):
cf = símismo.cf
temp_prom = símismo.obt_valor_extern(sim, 'clima.temp_prom')
return máximo(
temp_prom * (temp_prom - cf['t_dev_mín']), 0
) * f_numpy(
np.power,
máximo(cf['t_dev_mín'] + cf['delta_t_letal'] - temp_prom, 0),
1 / cf['m']
) * paso
@classmethod
def requísitos(cls, controles=False):
if not controles:
return {'clima.temp_prom'}
|
julienmalard/Tikon
|
tikon/móds/rae/orgs/ecs/edad/brr_no_lín_temp.py
|
Python
|
agpl-3.0
| 2,223
|
# -*- coding: utf-8 -*-
###############################################################################
#
# ODOO (ex OpenERP)
# Open Source Management Solution
# Copyright (C) 2001-2015 Micronaet S.r.l. (<http://www.micronaet.it>)
# Developer: Nicola Riolini @thebrush (<https://it.linkedin.com/in/thebrush>)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
{
'name': 'Custom price for manage pricelist generation',
'version': '0.0.1',
'category': 'Generic Modules/Customization',
'author': 'Micronaet s.r.l.',
'website': 'http://www.micronaet.it',
'depends': [
'base',
'product',
'product_dimension_fields',
'base_accounting_program',
'sale',
'report_aeroo',
],
'init_xml': [],
'data': [
'security/ir.model.access.csv',
'product_views.xml',
],
'demo_xml': [],
'active': False,
'installable': True,
}
|
Micronaet/micronaet-migration
|
product_extra_packaging/__openerp__.py
|
Python
|
agpl-3.0
| 1,621
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
from configparser import ConfigParser
import os
import random
import requests
import osrframework.utils.configuration as configuration
class Browser():
"""Utility used to code a Browser and to wrap the requests methods.
Attributes:
auth (tuple): The username and password authentication.
proxies (list): A list of proxies.
timeout (int): The number of seconds to wait until timeout.
user_agents (list): The list of User Agents recognised for this browser.
"""
def __init__(self):
"""Recovering an instance of a new Browser"""
self.auth = None
self.user_agents = []
self.proxies = {}
self.timeout = 2
# Trying to read the configuration
# --------------------------------
# If a current.cfg has not been found, creating it by copying from default
config_path = os.path.join(configuration.get_config_path()["appPath"], "browser.cfg")
# Checking if the configuration file exists
if not os.path.exists(config_path):
try:
# Copy the data from the default folder
default_config_path = os.path.join(configuration.get_config_path()["appPathDefaults"], "browser.cfg")
with open(default_config_path) as file:
cont = file.read()
with open(config_path, "w") as output_file:
output_file.write(cont)
except Exception:
print("WARNING. No configuration file could be found and the default file was not found either, so configuration will be set as default.")
print(str(e))
print()
# Storing configuration as default
self.user_agents = ['Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/55.0.2883.87 Chrome/55.0.2883.87 Safari/537.36']
self.proxies = {}
return None
# Reading the configuration file
config = ConfigParser()
config.read(config_path)
proxy = {}
# Iterating through all the sections, which contain the platforms
for conf in config.sections():
if conf == "Browser":
# Iterating through parametgers
for (param, value) in config.items(conf):
if param == "user_agent":
if value != '':
self.user_agents.append(value)
else:
self.user_agents = ['Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/55.0.2883.87 Chrome/55.0.2883.87 Safari/537.36']
if param == "timeout":
try:
self.timeout = int(value)
except:
self.timeout = 2
else:
proxy[conf] = {}
# Iterating through parameters
for (param, value) in config.items(conf):
if value != '':
proxy[conf][param] = value
# Configuring the proxy as it will be used by br.set_proxies
for p in proxy.keys():
# p ~= ProxyHTTP --> Protocol = p.lower()[5:]
#print p, p.lower()[5:], proxy[p]
try:
# Adding credentials if they exist
self.proxies[ p.lower()[5:] ] = proxy[p]["username"] + ":" + proxy[p]["password"] + "@" + proxy[p]["host"] + ":" + proxy[p]["port"]
except:
try:
self.proxies[ p.lower()[5:] ] = proxy[p]["host"] + ":" + proxy[p]["port"]
except:
# We are not adding this protocol to be proxied
pass
def recover_url(self, url):
"""Public method to recover a resource.
Args:
url (str): The URL to be collected.
Returns:
Returns a resource that has to be read, for instance, with
html = self.br.read()
"""
headers = {
"User-Agent": self.getUserAgent(),
}
# Opening the resource
try:
r = requests.get(
url,
headers=headers,
auth=self.auth
)
return r.text
except Exception:
# Something happened. Maybe the request was forbidden?
return None
def setNewPassword(self, username, password):
"""Public method to manually set the credentials for a url in the browser
Args:
username (str): The username of the session.
password (str): The password of the session.
"""
self.auth = (username, password)
def getUserAgent(self):
"""This method will be called whenever a new query will be executed
Returns:
Returns a string with the User Agent.
"""
if self.user_agents:
# User-Agent (this is cheating, ok?)
return random.choice(self.user_agents)
else:
return "Python3"
|
i3visio/osrframework
|
osrframework/utils/browser.py
|
Python
|
agpl-3.0
| 6,104
|
# -*- coding: utf-8 -*-
# © 2011 Guewen Baconnier,Camptocamp,Elico-Corp
# © 2016 Sodexis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import models, fields, api
class ProductLink(models.Model):
_name = 'product.link'
_rec_name = 'linked_product_id'
@api.model
def get_link_type_selection(self):
# selection can be inherited and extended
return [('cross_sell', 'Cross-Sell'),
('up_sell', 'Up-Sell'),
('related', 'Related')]
product_id = fields.Many2one(
comodel_name='product.product',
string='Source Product',
required=True,
ondelete='cascade')
linked_product_id = fields.Many2one(
comodel_name='product.product',
string='Linked product',
required=True,
ondelete='cascade')
type = fields.Selection(
selection='get_link_type_selection',
string='Link type',
required=True)
is_active = fields.Boolean('Active', default=True)
class Product(models.Model):
_inherit = 'product.product'
product_link_ids = fields.One2many(
comodel_name='product.link',
inverse_name='product_id',
string='Product links'
)
|
JayVora-SerpentCS/e-commerce
|
product_multi_link/models/product.py
|
Python
|
agpl-3.0
| 1,248
|
# -*- coding: utf-8 -*-
import numpy as np
import pylab as pl
# https://math.stackexchange.com/questions/920351/selecting-at-least-one-ball-of-each-color
# https://en.wikipedia.org/wiki/Hypergeometric_distribution
# Example. From the example given on the web Wikipedia: The Free
# Encyclopedia [http://en.wikipedia.org/wiki/Hypergeometric_distribution].
# Suppose there are 5 black, 10 white, and 15 red marbles in an urn. You
# reach in and randomly select six marbles without replacement. What is
# the probability that you pick exactly two of each color?
#
# ele_set = [2,2,2]; comb = [5,10,15];
#
# Calling the function:
# x = multivariate_hypergeometric_like(comb, ele_set)
#
# Answer is: (in format long)
#
# x =
#
# 0.07957559681698
from pymc import multivariate_hypergeometric_like
def multichoose(n,k):
if k < 0 or n < 0: return "Error"
if not k: return []
if not n: return []
if n == 1: return [[k]]
return [[0]+val for val in multichoose(n-1,k)] + \
[[val[0]+1]+val[1:] for val in multichoose(n,k-1)]
def remove_comb(list_):
x2 = list_[:]
for m in list_:
for z in m:
if z == 0:
x2.remove(m)
break
return x2
#main
p = input('Tell me the probability you want to ensure (0.95?): \n')
x = input('Tell me the number of mutations (classes): \n')
n = input('Tell me the number of elements in total (1000?): \n')
for x in range(2,7):
num_ele = np.floor((n/x)) #number of elements x set
ele_set = np.ones(x)*num_ele #elements x set fixed
a=0 # unknown - starts in x
cum_prob = 0
vector_a= np.zeros(1)
vector_cum_prob= np.zeros(1)
while cum_prob<p:
a += 1
all_comb = multichoose(x,a)
our_comb=remove_comb(all_comb)
cum_prob = 0
for comb in our_comb:
if not comb:
break
log_like=multivariate_hypergeometric_like(comb, ele_set)
prob_one_comb=np.e**log_like
cum_prob += prob_one_comb
print "Number of experiments:", a ,"\n" ,"Prob:", cum_prob ,"\n"
vector_a = np.append(vector_a,[a])
vector_cum_prob = np.append(vector_cum_prob, [cum_prob])
print "Number of experiments:", a ,"\n" ,"Prob:", cum_prob ,"\n"
pl.plot(vector_a, vector_cum_prob)
|
erramuzpe/LAB_TOOLS
|
source/EXP_SAVER/saver.py
|
Python
|
agpl-3.0
| 2,401
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 - KMEE INFORMATICA LTDA (<http://kmee.com.br>).
# Luis Felipe Miléo - mileo@kmee.com.br
#
# All other contributions are (C) by their respective contributors
#
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
schedule_date = fields.Date('Data Programada')
all_posted_moves = fields.Boolean(u'Títulos em aberto', default=True)
@api.model
def default_get(self, field_list):
res = super(PaymentOrderCreate, self).default_get(field_list)
context = self.env.context
if ('entries' in field_list and context.get('line_ids') and
context.get('populate_results')):
res.update({'entries': context['line_ids']})
return res
@api.multi
def extend_payment_order_domain(self, payment_order, domain):
self.ensure_one()
# Search for all posted moves
if self.all_posted_moves:
index = domain.index(('date_maturity', '<=', self.duedate))
domain[index + 1] = ('date_maturity', '>', self.duedate)
if payment_order.payment_order_type == 'payment':
# For payables, propose all unreconciled credit lines,
# including partially reconciled ones.
# If they are partially reconciled with a supplier refund,
# the residual will be added to the payment order.
#
# For receivables, propose all unreconciled credit lines.
# (ie customer refunds): they can be refunded with a payment.
# Do not propose partially reconciled credit lines,
# as they are deducted from a customer invoice, and
# will not be refunded with a payment.
domain += [('credit', '>', 0),
'|',
('account_id.type', '=', 'payable'),
'&',
('account_id.type', '=', 'receivable'),
('reconcile_partial_id', '=', False)]
@api.multi
def filter_lines(self, lines):
""" Filter move lines before proposing them for inclusion
in the payment order.
This implementation filters out move lines that are already
included in draft or open payment orders. This prevents the
user to include the same line in two different open payment
orders. When the payment order is sent, it is assumed that
the move will be reconciled soon (or immediately with
account_banking_payment_transfer), so it will not be
proposed anymore for payment.
See also https://github.com/OCA/bank-payment/issues/93.
:param lines: recordset of move lines
:returns: list of move line ids
"""
self.ensure_one()
payment_lines = self.env['payment.line'].\
search([('order_id.state', 'in', ('draft', 'open', 'done')),
('move_line_id', 'in', lines.ids)])
to_exclude = set([l.move_line_id.id for l in payment_lines])
return [l.id for l in lines if l.id not in to_exclude]
@api.multi
def search_entries(self):
"""This method taken from account_payment module.
We adapt the domain based on the payment_order_type
"""
line_obj = self.env['account.move.line']
model_data_obj = self.env['ir.model.data']
# -- start account_banking_payment --
payment = self.env['payment.order'].browse(
self.env.context['active_id'])
# Search for move line to pay:
domain = [('move_id.state', '=', 'posted'),
('reconcile_id', '=', False),
('company_id', '=', payment.mode.company_id.id),
'|',
('date_maturity', '<=', self.duedate),
('date_maturity', '=', False)]
self.extend_payment_order_domain(payment, domain)
# -- end account_direct_debit --
lines = line_obj.search(domain)
context = self.env.context.copy()
context['line_ids'] = self.filter_lines(lines)
context['populate_results'] = self.populate_results
if payment.payment_order_type == 'payment':
context['display_credit'] = True
context['display_debit'] = False
else:
context['display_credit'] = False
context['display_debit'] = True
model_datas = model_data_obj.search(
[('model', '=', 'ir.ui.view'),
('name', '=', 'view_create_payment_order_lines')])
return {'name': _('Entry Lines'),
'context': context,
'view_type': 'form',
'view_mode': 'form',
'res_model': 'payment.order.create',
'views': [(model_datas[0].res_id, 'form')],
'type': 'ir.actions.act_window',
'target': 'new',
}
@api.multi
def _prepare_payment_line(self, payment, line):
res = super(PaymentOrderCreate, self)._prepare_payment_line(
payment, line)
if line.invoice:
if line.invoice.type in ('in_invoice', 'in_refund'):
if line.invoice.reference_type == 'structured':
res['communication'] = line.invoice.reference
else:
if line.invoice.reference:
res['communication'] = line.invoice.reference
elif line.invoice.supplier_invoice_number:
res['communication'] = \
line.invoice.supplier_invoice_number
else:
# Make sure that the communication includes the
# customer invoice number (in the case of debit order)
res['communication'] = line.name
return res
@api.multi
def create_payment(self):
"""This method is a slightly modified version of the existing method on
this model in account_payment.
- pass the payment mode to line2bank()
- allow invoices to create influence on the payment process: not only
'Free' references are allowed, but others as well
- check date_to_pay is not in the past.
"""
if not self.entries:
return {'type': 'ir.actions.act_window_close'}
context = self.env.context
payment_line_obj = self.env['payment.line']
payment = self.env['payment.order'].browse(context['active_id'])
# Populate the current payment with new lines:
for line in self.entries:
vals = self._prepare_payment_line(payment, line)
payment_line_obj.create(vals)
# Force reload of payment order view as a workaround for lp:1155525
return {'name': _('Payment Orders'),
'context': context,
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': 'payment.order',
'res_id': context['active_id'],
'type': 'ir.actions.act_window'}
|
kmee/odoo-brazil-banking
|
l10n_br_account_banking_payment/wizard/payment_order_create.py
|
Python
|
agpl-3.0
| 8,012
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# LICENSE
#
# Copyright (c) 2010-2013, GEM Foundation, G. Weatherill, M. Pagani,
# D. Monelli.
#
# The Hazard Modeller's Toolkit is free software: you can redistribute
# it and/or modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>
#
# DISCLAIMER
#
# The software Hazard Modeller's Toolkit (hmtk) provided herein
# is released as a prototype implementation on behalf of
# scientists and engineers working within the GEM Foundation (Global
# Earthquake Model).
#
# It is distributed for the purpose of open collaboration and in the
# hope that it will be useful to the scientific, engineering, disaster
# risk and software design communities.
#
# The software is NOT distributed as part of GEM’s OpenQuake suite
# (http://www.globalquakemodel.org/openquake) and must be considered as a
# separate entity. The software provided herein is designed and implemented
# by scientific staff. It is not developed to the design standards, nor
# subject to same level of critical review by professional software
# developers, as GEM’s OpenQuake software suite.
#
# Feedback and contribution to the software is welcome, and can be
# directed to the hazard scientific staff of the GEM Model Facility
# (hazard@globalquakemodel.org).
#
# The Hazard Modeller's Toolkit (hmtk) is therefore distributed WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# The GEM Foundation, and the authors of the software, assume no
# liability for use of the software.
#/usr/bin/env/python
'''Tests set of seismicity utility functions including:
i) decimal_year
ii) decimal_time
iii) haversine
iv) greg2julian
v) piecewise_linear_scalar
'''
import unittest
import numpy as np
from hmtk.seismicity import utils
class TestSeismicityUtilities(unittest.TestCase):
'''Class for testing seismicity utilities'''
def setUp(self):
'''Sets up the test class'''
self.year = []
self.month = []
self.day = []
self.hour = []
self.minute = []
self.second = []
self.longitude = []
self.latitude = []
def test_leap_year_check(self):
'''Tests the leap year check'''
# 1900 - Not leap year
# 1995 - Not leap year
# 2000 - Leap year
# 2012 - Leap year
test_years = np.array([1900, 1995, 2000, 2012])
leap_values = utils.leap_check(test_years)
self.assertFalse(leap_values[0])
self.assertFalse(leap_values[1])
self.assertTrue(leap_values[2])
self.assertTrue(leap_values[3])
def test_decimal_year(self):
'''Tests the function utils.decimal_year'''
self.year = np.array([1990., 1995., 2000.])
self.month = np.array([1., 6., 12.])
self.day = np.array([1., 30., 31.])
self.assertTrue(np.allclose(
utils.decimal_year(self.year, self.month, self.day),
np.array([1990., 1995.49315068, 2000.99726027])))
def test_decimal_time(self):
'''Tests the function utils.decimal_time'''
self.year = np.array([1990, 1995, 2000])
self.month = np.array([1, 6, 12])
self.day = np.array([1, 30, 31])
self.hour = np.array([0, 12, 23])
self.minute = np.array([0, 30, 59])
self.second = np.array([0.0, 30.0, 59.0])
self.assertTrue(np.allclose(
utils.decimal_time(self.year, self.month, self.day, self.hour,
self.minute, self.second),
np.array([1990., 1995.49457858, 2000.99999997])))
def test_decimal_time1(self):
'''Tests the function utils.decimal_time'''
self.year = np.array([1990])
self.month = np.array([1])
self.day = np.array([1])
self.hour = np.array([0])
self.minute = np.array([0])
self.second = np.array([0.0, 30.0, 59.0])
self.assertTrue(np.allclose(
utils.decimal_time(self.year, self.month, self.day, self.hour,
self.minute, self.second),
np.array([1990.])))
def test_decimal_time2(self):
'''Tests the function utils.decimal_time'''
self.year = np.array([1990])
self.assertTrue(np.allclose(
utils.decimal_time(self.year, [], [], [], [], []),
np.array([1990.])))
def test_haversine(self):
'''Tests the function utils.haversine
Distances tested against i) Matlab implementation of the haversine
formula
ii) Matlab "distance" function (also based on
the haversine formula (assumes
Earth Radius = 6371.0 not 6371.227 as
assumed here!)
'''
# Simple test
self.longitude = np.arange(30., 40., 1.)
self.latitude = np.arange(30., 40., 1.)
distance = utils.haversine(self.longitude, self.latitude, 35.0, 35.0)
expected_distance = np.array([[727.09474718],
[580.39194024],
[434.3102452],
[288.87035021],
[144.09319874],
[0.],
[143.38776088],
[286.04831311],
[427.95959077],
[569.09922383]])
self.assertTrue(np.allclose(distance, expected_distance))
# 2-D test
self.longitude = np.array([30., 35., 40.])
self.latitude = np.array([30., 35., 40.])
distance = utils.haversine(self.longitude, self.latitude,
self.longitude, self.latitude)
expected_distance = np.array([[ 0., 727.09474718, 1435.38402047],
[727.09474718, 0., 709.44452948],
[1435.38402047, 709.44452948, 0.]])
self.assertTrue(np.allclose(distance, expected_distance))
# Crossing International Dateline
self.longitude = np.array([179.5, 180.0, -179.5])
self.latitude = np.array([45., 45., 45.])
distance = utils.haversine(self.longitude, self.latitude, 179.9, 45.)
expected_distance = np.array([[31.45176332],
[7.86294832],
[47.1775851]])
self.assertTrue(np.allclose(distance, expected_distance))
def test_piecewise_linear_function(self):
'''Test the piecewise linear calculator'''
# Good parameter set - 2 segments
params = [2.0, -1.0, 5.0, 0.0]
values = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]
expected = [0.0, 2.0, 4.0, 6.0, 8.0, 10.0, 9.0, 8.0, 7.0, 6.0]
for iloc, xval in enumerate(values):
self.assertAlmostEqual(expected[iloc],
utils.piecewise_linear_scalar(params, xval))
# Odd-number of values in parameters - raise value error
params = [2.0, -1.0, 5.0, 0.0, 3.4]
with self.assertRaises(ValueError):
utils.piecewise_linear_scalar(params, 1.0)
# Single segment test
params1seg = [2.0, 0.0]
self.assertAlmostEqual(2.0,
utils.piecewise_linear_scalar(params1seg, 1.0))
# 3- segment test
params = np.array([2.0, -1.0, 3.0, 4.0, 8.0, 0.0])
expected = [0.0, 2.0, 4.0, 6.0, 8.0, 7.0, 6.0, 5.0, 4.0, 7.0]
for iloc, xval in enumerate(values):
self.assertAlmostEqual(expected[iloc],
utils.piecewise_linear_scalar(params, xval))
def _tester_for_truncated_gaussian(self, data, uncertainties, low_limit,
high_limit, number_samples=1000):
"""
Tests that for a given data set and uncertainties that no values
exceed the data limits
"""
xlow = []
xhigh = []
for iloc in range(0, number_samples):
xval = utils.sample_truncated_gaussian_vector(
data,
uncertainties,
(low_limit, high_limit))
xlow.append(np.min(xval))
xhigh.append(np.max(xval))
self.assertTrue(np.max(np.array(xhigh)) <= high_limit)
self.assertTrue(np.min(np.array(xlow)) >= low_limit)
def test_sample_truncated_gaussian_distribution_with_bounds(self):
"""
Tests the function to sample a truncated Gaussian distribution
"""
data = 10.0 * np.ones(100)
uncertainties = np.random.uniform(0., 2., 100)
# Add bounds between 5.0 and 15.0
self._tester_for_truncated_gaussian(data, uncertainties, 5., 15.)
# Test case with infinite bounds
self._tester_for_truncated_gaussian(data,
uncertainties,
-np.inf,
np.inf)
class TestBootstrapHistograms(unittest.TestCase):
"""
Class to test bootstrapped histogram functions
hmtk.seismicity.utils.bootstrap_histogram_1D
hmtk.seismicity.utils.bootstrap_histogram_2D
"""
def setUp(self):
"""
"""
[x, y] = np.meshgrid(np.arange(5., 50., 5.),
np.arange(5.5, 9.0, 0.5))
nx, ny = np.shape(x)
x.reshape([nx * ny, 1])
y.reshape([nx * ny, 1])
self.x = x.flatten()
self.y = y.flatten()
self.x_sigma = None
self.y_sigma = None
def test_hmtk_histogram_1D_general(self):
"""
Tests the 1D hmtk histogram with the general case (i.e. no edge-cases)
Should be exactly equivalent to numpy's histogram function
"""
xdata = np.random.uniform(0., 10., 100)
xbins = np.arange(0., 11., 1.)
np.testing.assert_array_almost_equal(
utils.hmtk_histogram_1D(xdata, xbins),
np.histogram(xdata, bins=xbins)[0].astype(float))
def test_hmtk_histogram_1D_edgecase(self):
"""
Tests the 1D hmtk histogram with edge cases
Should be exactly equivalent to numpy's histogram function
"""
xdata = np.array([3.1, 4.1, 4.56, 4.8, 5.2])
xbins = np.arange(3.0, 5.35, 0.1)
expected_counter = np.array([0., 1., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 1., 0., 0., 0., 1., 0., 0., 1., 0.,
0., 0., 1.])
np.testing.assert_array_almost_equal(
utils.hmtk_histogram_1D(xdata, xbins),
expected_counter)
def test_hmtk_histogram_2D_general(self):
"""
Tests the 2D hmtk histogram with the general case (i.e. no edge-cases)
Should be exactly equivalent to numpy's histogram function
"""
xdata = np.random.uniform(0., 10., 100)
ydata = np.random.uniform(10., 20., 100)
xbins = np.arange(0., 11., 1.)
ybins = np.arange(10., 21., 1.)
np.testing.assert_array_almost_equal(
utils.hmtk_histogram_2D(xdata, ydata, (xbins, ybins)),
np.histogram2d(xdata, ydata, bins=(xbins, ybins))[0].astype(float))
def test_hmtk_histogram_2D_edgecase(self):
"""
Tests the 2D hmtk histogram with edge cases
Should be exactly equivalent to numpy's histogram function
"""
xdata = np.array([3.1, 4.1, 4.56, 4.8, 5.2])
ydata = np.array([1990., 1991.2, 1994., 1997., 1998.2])
xbins = np.arange(3.0, 5.35, 0.1)
ybins = np.arange(1990., 2001.5, 1.)
expected_counter = np.array(
# 90 91 92 93 94 95 96 97 98 99 00
[[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #3.0-3.1
[1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #3.1-3.2
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #3.2-3.3
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #3.3-3.4
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #3.4-3.5
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #3.5-3.6
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #3.6-3.7
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #3.7-3.8
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #3.8-3.9
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #3.9-4.0
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #4.0-4.1
[0., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #4.1-4.2
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #4.2-4.3
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #4.3-4.4
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #4.4-4.5
[0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0.], #4.5-4.6
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #4.6-4.7
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #4.7-4.8
[0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0.], #4.8-4.9
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #4.9-5.0
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #5.0-5.1
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], #5.1-5.2
[0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0.]])#5.2-5.3
np.testing.assert_array_almost_equal(
utils.hmtk_histogram_2D(xdata, ydata, bins=(xbins, ybins)),
expected_counter)
def test_1D_bootstrap_no_uncertainty(self):
"""
Tests the bootstrap 1D histrogram function with no uncertainties
"""
# Without normalisation
x_range = np.arange(0., 60., 10.)
expected_array = np.array([7., 14., 14., 14., 14.])
np.testing.assert_array_almost_equal(expected_array,
utils.bootstrap_histogram_1D(self.x, x_range))
# Now with normalisaton
expected_array = expected_array / np.sum(expected_array)
np.testing.assert_array_almost_equal(expected_array,
utils.bootstrap_histogram_1D(self.x, x_range, normalisation=True))
def test_1D_bootstrap_with_uncertainty(self):
"""
Tests the bootstrap 1D histrogram function with uncertainties
"""
self.x_sigma = 1.0 * np.ones(len(self.x), dtype=float)
expected_array = np.array([0.17, 0.22, 0.22, 0.22, 0.17])
x_range = np.arange(0., 60., 10.)
hist_values = utils.bootstrap_histogram_1D(
self.x,
x_range,
uncertainties=self.x_sigma,
number_bootstraps=1000,
normalisation=True)
np.testing.assert_array_almost_equal(np.round(hist_values, 2),
expected_array)
def test_2D_bootstrap_no_uncertainty(self):
"""
Tests the bootstrap 1D histrogram function with no uncertainties
"""
# Without normalisation
x_range = np.arange(0., 60., 10.)
y_range = np.arange(5., 10., 1.0)
expected_array = np.array([[1., 2., 2., 2.],
[2., 4., 4., 4.],
[2., 4., 4., 4.],
[2., 4., 4., 4.],
[2., 4., 4., 4.]])
np.testing.assert_array_almost_equal(expected_array,
utils.bootstrap_histogram_2D(self.x, self.y, x_range, y_range))
# With normalisation
expected_array = expected_array / np.sum(expected_array)
np.testing.assert_array_almost_equal(expected_array,
utils.bootstrap_histogram_2D(self.x, self.y, x_range, y_range,
normalisation=True))
def test_2D_bootstrap_with_uncertainty(self):
"""
Tests the bootstrap 1D histrogram function with uncertainties
"""
# Without normalisation
self.y_sigma = 0.1 * np.ones(len(self.y), dtype=float)
x_range = np.arange(0., 60., 10.)
y_range = np.arange(5., 10., 1.0)
expected_array = np.array([[1.5, 2.0, 2.0, 1.5],
[3.0, 4.0, 4.0, 3.0],
[3.0, 4.0, 4.0, 3.0],
[3.0, 4.0, 4.0, 3.0],
[3.0, 4.0, 4.0, 3.0]])
hist_values = utils.bootstrap_histogram_2D(
self.x,
self.y,
x_range,
y_range,
normalisation=False,
xsigma=self.x_sigma,
ysigma=self.y_sigma,
number_bootstraps=1000)
array_diff = expected_array - np.round(hist_values, 1)
print expected_array, hist_values, array_diff
self.assertTrue(np.all(np.fabs(array_diff) < 0.2))
# With normalisation
expected_array = np.array([[0.04, 0.05, 0.05, 0.04],
[0.05, 0.06, 0.06, 0.05],
[0.05, 0.06, 0.06, 0.05],
[0.05, 0.06, 0.06, 0.05],
[0.04, 0.05, 0.05, 0.04]])
hist_values = utils.bootstrap_histogram_2D(
self.x,
self.y,
x_range,
y_range,
normalisation=True,
xsigma=self.x_sigma,
ysigma=self.y_sigma,
number_bootstraps=1000)
array_diff = expected_array - hist_values
self.assertTrue(np.all(np.fabs(array_diff) < 0.02))
class Testlonlat2laea(unittest.TestCase):
"""
Tests the converter from longitude and latitude to lambert azimuthal
equal area coordinates
"""
def test_conversion(self):
"""
Tests lonlat to lambert equal area
Data from pyproj conversion taken from reference test case
"""
expected_x, expected_y = (-286700.13595616777, -325847.4698447622)
calc_x, calc_y = utils.lonlat_to_laea(5.0, 50.0, 9.0, 53.0)
self.assertAlmostEqual(calc_x, expected_x / 1000.0)
self.assertAlmostEqual(calc_y, expected_y / 1000.0)
|
g-weatherill/hmtk
|
tests/seismicity/test_seismicity_utils.py
|
Python
|
agpl-3.0
| 18,848
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__author__ = "Felix Brezo, Yaiza Rubio <contacto@i3visio.com>"
__version__ = "2.0"
import osrframework.utils.browser as browser
from osrframework.utils.platforms import Platform
class V7n(Platform):
"""A <Platform> object for V7n"""
def __init__(self):
self.platformName = "V7n"
self.tags = ["development"]
########################
# Defining valid modes #
########################
self.isValidMode = {}
self.isValidMode["phonefy"] = False
self.isValidMode["usufy"] = True
self.isValidMode["searchfy"] = False
######################################
# Search URL for the different modes #
######################################
# Strings with the URL for each and every mode
self.url = {}
#self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>"
self.url["usufy"] = "http://www.v7n.com/forums/members/" + "<usufy>" + ".html"
#self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>"
######################################
# Whether the user needs credentials #
######################################
self.needsCredentials = {}
#self.needsCredentials["phonefy"] = False
self.needsCredentials["usufy"] = False
#self.needsCredentials["searchfy"] = False
#################
# Valid queries #
#################
# Strings that will imply that the query number is not appearing
self.validQuery = {}
# The regular expression '.+' will match any query
#self.validQuery["phonefy"] = ".*"
self.validQuery["usufy"] = ".+"
#self.validQuery["searchfy"] = ".*"
###################
# Not_found clues #
###################
# Strings that will imply that the query number is not appearing
self.notFoundText = {}
#self.notFoundText["phonefy"] = []
self.notFoundText["usufy"] = ["This user has not registered and therefore does not have a profile to view."]
#self.notFoundText["searchfy"] = []
#########################
# Fields to be searched #
#########################
self.fieldsRegExp = {}
# Definition of regular expressions to be searched in phonefy mode
#self.fieldsRegExp["phonefy"] = {}
# Example of fields:
#self.fieldsRegExp["phonefy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in usufy mode
self.fieldsRegExp["usufy"] = {}
# Example of fields:
#self.fieldsRegExp["usufy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in searchfy mode
#self.fieldsRegExp["searchfy"] = {}
# Example of fields:
#self.fieldsRegExp["searchfy"]["i3visio.location"] = ""
################
# Fields found #
################
# This attribute will be feeded when running the program.
|
i3visio/osrframework
|
osrframework/wrappers/pending/v7n.py
|
Python
|
agpl-3.0
| 3,955
|
# Copyright 2020 ForgeFlow S.L.
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo.tests.common import TransactionCase
class TestMrpMtoWithStock(TransactionCase):
def setUp(self, *args, **kwargs):
super(TestMrpMtoWithStock, self).setUp(*args, **kwargs)
self.repair_obj = self.env["repair.order"]
self.repair_line_obj = self.env["repair.line"]
self.product_obj = self.env["product.product"]
self.move_obj = self.env["stock.move"]
self.stock_location_stock = self.env.ref("stock.stock_location_stock")
self.customer_location = self.env.ref("stock.stock_location_customers")
self.scrap_location = self.env["stock.location"].search(
[("scrap_location", "=", True)], limit=1
)
self.refurbish_loc = self.env.ref("repair_refurbish.stock_location_refurbish")
self.refurbish_product = self.product_obj.create(
{"name": "Refurbished Awesome Screen", "type": "product"}
)
self.product = self.product_obj.create(
{
"name": "Awesome Screen",
"type": "product",
"refurbish_product_id": self.refurbish_product.id,
}
)
self.material = self.product_obj.create({"name": "Materials", "type": "consu"})
self.material2 = self.product_obj.create(
{"name": "Materials", "type": "product"}
)
self._update_product_qty(self.product, self.stock_location_stock, 10.0)
def _update_product_qty(self, product, location, quantity):
inventory = self.env["stock.inventory"].create(
{
"name": "Test Inventory",
"product_ids": [(6, 0, product.ids)],
"state": "confirm",
"line_ids": [
(
0,
0,
{
"product_qty": quantity,
"location_id": location.id,
"product_id": product.id,
"product_uom_id": product.uom_id.id,
},
)
],
}
)
inventory.action_start()
inventory.line_ids[0].write({"product_qty": quantity})
inventory.action_validate()
return quantity
def test_01_repair_refurbish(self):
"""Tests that locations are properly set with a product to
refurbish, then complete repair."""
repair = self.repair_obj.create(
{
"product_id": self.product.id,
"product_qty": 3.0,
"product_uom": self.product.uom_id.id,
"location_dest_id": self.customer_location.id,
"location_id": self.stock_location_stock.id,
}
)
repair.onchange_product_id()
self.assertTrue(repair.to_refurbish)
repair._onchange_to_refurbish()
self.assertEqual(repair.refurbish_location_dest_id, self.customer_location)
self.assertEqual(repair.location_dest_id, self.product.property_stock_refurbish)
line = self.repair_line_obj.with_context(
to_refurbish=repair.to_refurbish,
refurbish_location_dest_id=repair.refurbish_location_dest_id,
).new(
{
"name": "consume stuff to repair",
"repair_id": repair.id,
"type": "add",
"product_id": self.material.id,
"product_uom": self.material.uom_id.id,
"product_uom_qty": 1.0,
}
)
line.onchange_product_id()
line.onchange_operation_type()
self.assertEqual(line.location_id, repair.location_id)
self.assertEqual(line.location_dest_id, self.customer_location)
# Complete repair:
repair.action_validate()
repair.action_repair_start()
repair.action_repair_end()
moves = self.move_obj.search([("reference", "=", repair.name)])
self.assertEqual(len(moves), 2)
for m in moves:
self.assertEqual(m.state, "done")
if m.product_id == self.product:
self.assertEqual(m.location_id, self.stock_location_stock)
self.assertEqual(m.location_dest_id, self.refurbish_loc)
self.assertEqual(
m.mapped("move_line_ids.location_id"), self.stock_location_stock
)
self.assertEqual(
m.mapped("move_line_ids.location_dest_id"), self.refurbish_loc
)
elif m.product_id == self.refurbish_product:
self.assertEqual(m.location_id, self.refurbish_loc)
self.assertEqual(m.location_dest_id, self.customer_location)
self.assertEqual(
m.mapped("move_line_ids.location_id"), self.refurbish_loc
)
self.assertEqual(
m.mapped("move_line_ids.location_dest_id"), self.customer_location
)
else:
self.assertTrue(False, "Unexpected move.")
def test_02_repair_no_refurbish(self):
"""Tests normal repairs does not fail and normal location for consumed
material"""
repair = self.repair_obj.create(
{
"product_id": self.product.id,
"product_qty": 3.0,
"product_uom": self.product.uom_id.id,
"location_dest_id": self.customer_location.id,
"to_refurbish": False,
"location_id": self.stock_location_stock.id,
}
)
line = self.repair_line_obj.with_context(
to_refurbish=repair.to_refurbish,
refurbish_location_dest_id=repair.refurbish_location_dest_id,
).create(
{
"name": "consume stuff to repair",
"repair_id": repair.id,
"type": "add",
"product_id": self.material2.id,
"product_uom": self.material2.uom_id.id,
"product_uom_qty": 1.0,
"price_unit": 50.0,
"location_id": self.stock_location_stock.id,
"location_dest_id": self.customer_location.id,
}
)
line.onchange_product_id()
line.onchange_operation_type()
# Complete repair:
repair.action_validate()
repair.action_repair_start()
repair.action_repair_end()
move = self.move_obj.search(
[("product_id", "=", self.material2.id)], order="create_date desc", limit=1
)[0]
self.assertEqual(move.location_dest_id, self.scrap_location)
|
OCA/manufacture
|
repair_refurbish/tests/test_repair_refurbish.py
|
Python
|
agpl-3.0
| 6,809
|
#!/usr/bin/env python
"""
Iterates over file in blocks, for FFT processing and/or cross-correlation
example data: https://zenodo.org/record/848275
./PlotLoop.py ~/data/eclipse/wwv_rp0_2017-08-22T13-14-52_15.0MHz.bin 192e3
"""
from pathlib import Path
import numpy as np
from matplotlib.pyplot import figure,draw,pause,show
#
from piradar.delayseq import nextpow2
Lbyte = 8 # complex64 is 8 bytes per sample
wintype = np.hanning
def mainloop(fn,fs, tlim):
fn = Path(fn).expanduser()
blocksize = nextpow2(1*fs) # arbitrary, bigger->more gain
win = wintype(blocksize)
nfft = blocksize
f = np.arange(-fs/2, fs/2., fs/nfft) # shifted fft freq. bins
print('blocksize',blocksize, ' Nfft',nfft)
# %% setup plot
ax = figure().gca()
ax.set_xlabel('frequency [Hz]')
ax.set_ylabel('PSD [dB/Hz]')
ht = ax.set_title(fn)
hp, = ax.plot(f,0*f)
ax.set_ylim((-140,-90))
# %% loop
with fn.open('rb') as fid:
i = 0
while fid:
block = np.fromfile(fid, np.complex64, blocksize)
if block.size != blocksize: # EOF
break
X = np.fft.fftshift(np.fft.fft(win * block, nfft))
Pxx = 1./(fs*nfft) * abs(X)**2
Pxx[1:-1] = 2*Pxx[1:-1] #scales DC appropriately
# %% live updating plot
hp.set_ydata(10*np.log10(Pxx))
ht.set_text(f'{fn.stem}: t= {i*blocksize/fs:0.2f} sec.')
i += 1
draw(); pause(1) # so that plots show while audio is playing
if __name__ == '__main__':
from argparse import ArgumentParser
p = ArgumentParser()
p.add_argument('fn', help='giant .bin file to process SDR radar from')
p.add_argument('fs', help='sampling frequency [Hz]', type=float)
p.add_argument('-t','--tlim',help='time limits to work on (sec)',nargs=2,type=float)
p = p.parse_args()
fs = int(p.fs)
mainloop(p.fn, fs, p.tlim)
#show()
|
scienceopen/piradar
|
Looper.py
|
Python
|
agpl-3.0
| 1,922
|
#------------------------------------------------------------------------------
# Copyright (C) 2009 Richard Lincoln
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation; version 2 dated June, 1991.
#
# This software is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANDABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#------------------------------------------------------------------------------
# <<< imports
# @generated
from dynamics.dynamics.excitation_systems.excitation_system import ExcitationSystem
from google.appengine.ext import db
# >>> imports
class ExcCZ(ExcitationSystem):
""" Czech proportional/integral excitation system model.
"""
# <<< exc_cz.attributes
# @generated
# >>> exc_cz.attributes
# <<< exc_cz.references
# @generated
# >>> exc_cz.references
# <<< exc_cz.operations
# @generated
# >>> exc_cz.operations
# EOF -------------------------------------------------------------------------
|
rwl/openpowersystem
|
dynamics/dynamics/excitation_systems/exc_cz.py
|
Python
|
agpl-3.0
| 1,436
|
# -*- coding: utf-8 -*-
# Django
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
# Third Party
import easy_thumbnails.fields
# MuckRock
import muckrock.core.fields
import muckrock.jurisdiction.models
class Migration(migrations.Migration):
dependencies = [
("jurisdiction", "0001_initial"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="Agency",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
("name", models.CharField(max_length=255)),
("slug", models.SlugField(max_length=255)),
("approved", models.BooleanField()),
("can_email_appeals", models.BooleanField()),
(
"image",
easy_thumbnails.fields.ThumbnailerImageField(
null=True, upload_to="agency_images", blank=True
),
),
(
"image_attr_line",
models.CharField(
help_text="May use html", max_length=255, blank=True
),
),
(
"public_notes",
models.TextField(help_text="May use html", blank=True),
),
("stale", models.BooleanField(default=False)),
("address", models.TextField(blank=True)),
("email", models.EmailField(max_length=75, blank=True)),
(
"other_emails",
muckrock.core.fields.EmailsListField(max_length=255, blank=True),
),
("contact_salutation", models.CharField(max_length=30, blank=True)),
("contact_first_name", models.CharField(max_length=100, blank=True)),
("contact_last_name", models.CharField(max_length=100, blank=True)),
("contact_title", models.CharField(max_length=255, blank=True)),
(
"url",
models.URLField(
help_text="Begin with http://",
verbose_name="FOIA Web Page",
blank=True,
),
),
("expires", models.DateField(null=True, blank=True)),
("phone", models.CharField(max_length=30, blank=True)),
("fax", models.CharField(max_length=30, blank=True)),
("notes", models.TextField(blank=True)),
("aliases", models.TextField(blank=True)),
("website", models.CharField(max_length=255, blank=True)),
("twitter", models.CharField(max_length=255, blank=True)),
("twitter_handles", models.TextField(blank=True)),
(
"foia_logs",
models.URLField(
help_text="Begin with http://",
verbose_name="FOIA Logs",
blank=True,
),
),
(
"foia_guide",
models.URLField(
help_text="Begin with http://",
verbose_name="FOIA Processing Guide",
blank=True,
),
),
("exempt", models.BooleanField(default=False)),
(
"appeal_agency",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
blank=True,
to="agency.Agency",
null=True,
),
),
(
"jurisdiction",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="agencies",
to="jurisdiction.Jurisdiction",
),
),
(
"parent",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="children",
blank=True,
to="agency.Agency",
null=True,
),
),
],
options={"verbose_name_plural": "agencies"},
bases=(models.Model, muckrock.jurisdiction.models.RequestHelper),
),
migrations.CreateModel(
name="AgencyType",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
("name", models.CharField(max_length=60)),
],
options={"ordering": ["name"]},
bases=(models.Model,),
),
migrations.AddField(
model_name="agency",
name="types",
field=models.ManyToManyField(to="agency.AgencyType", blank=True),
preserve_default=True,
),
migrations.AddField(
model_name="agency",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
blank=True,
to=settings.AUTH_USER_MODEL,
null=True,
),
preserve_default=True,
),
]
|
MuckRock/muckrock
|
muckrock/agency/migrations/0001_initial.py
|
Python
|
agpl-3.0
| 6,043
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('dashboard_app', '0012_auto_20150126_1644'),
]
operations = [
migrations.RemoveField(
model_name='imagereportchart',
name='is_delta',
),
migrations.AddField(
model_name='imagechartuser',
name='is_delta',
field=models.BooleanField(default=False, verbose_name='Delta reporting'),
preserve_default=True,
),
]
|
Linaro/lava-server
|
dashboard_app/migrations/0013_auto_20150127_1341.py
|
Python
|
agpl-3.0
| 601
|
# -*- coding: utَf-8 -*-
{
'name': "duplicate sale lines color",
'summary': """coloring duplicate sale order lines""",
'description': """
coloring duplicate sale order lines
""",
'author': "DVIT.me",
'website': "http://dvit.me",
'category': 'Sale Order',
'version': '10.0.2.0',
'depends': ['sale'],
'data': [
'views/views.xml',
],
}
|
mohamedhagag/dvit-odoo
|
dvit_sale_duplicate_lines/__manifest__.py
|
Python
|
agpl-3.0
| 394
|
# 2016 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
'name': 'AEAT modelo 123',
'version': '11.0.1.1.0',
'category': "Localisation/Accounting",
'author': "Tecnativa, "
"Spanish Localization Team, "
"Odoo Community Association (OCA)",
'website': "https://github.com/OCA/l10n-spain",
'license': 'AGPL-3',
'depends': [
'l10n_es',
'l10n_es_aeat',
],
'data': [
'data/aeat_export_mod123_data.xml',
'data/tax_code_map_mod123.xml',
'views/mod123_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
}
|
factorlibre/l10n-spain
|
l10n_es_aeat_mod123/__manifest__.py
|
Python
|
agpl-3.0
| 692
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('access_groups', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='groupmembership',
name='user',
field=models.ForeignKey(related_name='memberships', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='accessgroup',
name='groupdesc_content_type',
field=models.ForeignKey(to='contenttypes.ContentType'),
),
migrations.AddField(
model_name='accessgroup',
name='users',
field=models.ManyToManyField(related_name='access_groups', through='access_groups.GroupMembership', to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='groupmembership',
unique_together=set([('group', 'user')]),
),
migrations.AlterUniqueTogether(
name='accessgroup',
unique_together=set([('groupdesc_content_type', 'groupdesc_object_id')]),
),
]
|
asm-products/banyan-web
|
access_groups/migrations/0002_auto_20150721_0419.py
|
Python
|
agpl-3.0
| 1,349
|
# -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# Third Party
from django.contrib.auth.models import (
Permission,
User
)
from django.db import models
from django.db.models import Q
class GymManager(models.Manager):
'''
Custom query manager for Gyms
'''
def get_members(self, gym_pk):
'''
Returns all members for this gym (i.e non-admin ones)
'''
perm_gym = Permission.objects.get(codename='manage_gym')
perm_gyms = Permission.objects.get(codename='manage_gyms')
perm_trainer = Permission.objects.get(codename='gym_trainer')
users = User.objects.filter(userprofile__gym_id=gym_pk)
return users.exclude(Q(groups__permissions=perm_gym) |
Q(groups__permissions=perm_gyms) |
Q(groups__permissions=perm_trainer)).distinct()
def get_admins(self, gym_pk):
'''
Returns all admins for this gym (i.e trainers, managers, etc.)
'''
perm_gym = Permission.objects.get(codename='manage_gym')
perm_gyms = Permission.objects.get(codename='manage_gyms')
perm_trainer = Permission.objects.get(codename='gym_trainer')
users = User.objects.filter(userprofile__gym_id=gym_pk)
return users.filter(Q(groups__permissions=perm_gym) |
Q(groups__permissions=perm_gyms) |
Q(groups__permissions=perm_trainer)).distinct()
|
petervanderdoes/wger
|
wger/gym/managers.py
|
Python
|
agpl-3.0
| 2,083
|
#!/usr/bin/python
import os
import sys
import cgi
import time
import random
import string
import Cookie
import locale
import urlparse
import datetime
import traceback
try:
import simplejson as json
except:
import json
sys.path.append((os.path.dirname(__file__) or ".") + "/../db")
sys.path.append((os.path.dirname(__file__) or ".") + "/../")
import config
import db
import aespckfile
r"""
HTTP API
Runs as a CGI program.
Maps database calls to HTTP requests.
JSON output.
"""
def json_print(obj, header=None):
"""CGI-format JSON output.
Set Content-length and close stdout so client won't wait for further processing"""
print "Content-type: application/json"
if header:
print header
outstr = json.dumps(obj, indent=2)
print "Content-length: %s" % (len(outstr)+1)
print
print outstr
sys.stdout.close()
def exit_error(code, message):
"""CGI-format error message and exit"""
print "Status: %s" % (code)
print "Content-type: application/json"
print
print json.dumps({"error":message})
sys.exit(0)
form = cgi.FieldStorage()
query = urlparse.parse_qs(os.getenv("QUERY_STRING") or "")
action = form.getfirst("action")
username = form.getfirst("username")
password = form.getfirst("password")
# Unathenticated actions - create a new user, and get list of supported banks
if action == "newuser":
try:
if username and password:
json_print(db.create_db(username, password));
sys.exit(0)
else:
exit_error(400, "Incomplete username/password")
except Exception, e:
exit_error(500, "Couldn't create new user " + e)
elif action == "getbanks":
json_print(config.banks)
sys.exit(0)
# Get user's session from cookies if we can.
# If so, decrypt and load the session file, and pull out username/password
sessionfn = None
if os.getenv("HTTP_COOKIE"):
try:
cookies = Cookie.SimpleCookie()
cookies.load(os.getenv("HTTP_COOKIE"))
sessionfn = "%s/%s.pck" % (config.sessiondir, cookies["sessionid"].value)
# Time-out session after inactivity
if os.path.exists(sessionfn) and os.path.getmtime(sessionfn) < (time.time()-config.sessiontimeout):
os.remove(sessionfn)
if not os.path.exists(sessionfn):
if (not username or not password):
exit_error(403, "Session Expired")
else:
sessionfn = None
else:
try:
session = aespckfile.load(sessionfn, cookies["sessionkey"].value)
except:
exit_error(403,"Bad Session Token: %s" (e))
if not username:
username = session["username"]
if not password:
password = session["password"]
except (Cookie.CookieError, KeyError):
pass
if not username or not password:
if sessionfn:
os.remove(sessionfn)
exit_error(400,"incomplete username/password")
# Attempt to load the database with the given username/password
try:
mydb = db.DB(username, password)
except Exception, e:
if sessionfn:
os.remove(sessionfn)
exit_error(403,"Bad password: %s" % (e))
# Just check if our session is still good without updating the access time
if action == "checklogin":
json_print(True)
sys.exit(0)
# Touch the session file to show activity
if sessionfn:
os.utime(sessionfn, None)
# On login, create session file and set cookies.
if action == "login":
cookies = Cookie.SimpleCookie()
session = { "username": username, "password": password }
cookies["sessionid"] = ''.join(random.choice(string.ascii_letters + string.digits) for x in range(32))
cookies["sessionid"]["secure"] = True
cookies["sessionid"]["path"] = os.path.dirname(os.getenv("REQUEST_URI") or "/")
cookies["sessionkey"] = ''.join(random.choice(string.ascii_letters + string.digits) for x in range(32))
cookies["sessionkey"]["secure"] = True
cookies["sessionid"]["path"] = os.path.dirname(os.getenv("REQUEST_URI") or "/")
sessionfn = "%s/%s.pck" % (config.sessiondir, cookies["sessionid"].value)
aespckfile.dump(sessionfn, session, cookies["sessionkey"].value)
json_print(True, cookies)
mydb.backup()
# Clear session file, cookies on logout.
elif action == "logout":
if sessionfn and os.path.exists(sessionfn):
os.remove(sessionfn)
expire = (datetime.datetime.now() - datetime.timedelta(days=1)).strftime("%a, %d-%b-%Y %H:%M:%S PST")
cookies = Cookie.SimpleCookie()
cookies["sessionid"] = ""
cookies["sessionid"]["expires"] = expire
cookies["sessionid"]["path"] = os.path.dirname(os.getenv("REQUEST_URI") or "/")
cookies["sessionkey"] = ""
cookies["sessionkey"]["expires"] = expire
cookies["sessionid"]["path"] = os.path.dirname(os.getenv("REQUEST_URI") or "/")
json_print(True, cookies)
mydb.backup()
# The rest of the functions map to db calls
elif action == "newtransactions":
try:
data = json.loads(form.getfirst("data"))
except Exception, e:
exit_error(400, "Bad transactions: %s %s" % (e, form.getfirst("data")[:20]))
json_print(mydb.newtransactions(data))
elif action == "accountstodo":
json_print(mydb.accountstodo())
elif action == "accounts":
json_print(mydb.accounts())
elif action == "search":
try:
results = mydb.search(json.loads(form.getfirst("query") or "{}"),
form.getfirst("startdate") or "0",
form.getfirst("enddate") or "9",
int(form.getfirst("limit") or 100),
int(form.getfirst("skip") or 0),
form.getfirst("sort") or None)
if (form.getfirst("format")):
locale.setlocale(locale.LC_ALL, 'en_US.UTF8')
if form.getfirst("format") == "text":
print "Content-type: text/plain"
print
for res in results:
print "{0} {1:20} {2:40} {3:>12}".format(res["date"], (res.get("subcategory") or res.get("category",""))[:20], res["desc"][:40].encode("ascii","ignore"), locale.currency(float(res["amount"])/100, grouping=True))
if form.getfirst("details"):
print res.get("orig_desc")
print "%s Transactions, Total %s" % (len(results), locale.currency(float(sum([x["amount"] for x in results]))/100, grouping=True))
elif form.getfirst("format") == "csv":
print "Content-type: text/csv"
print
for res in results:
print "{0},{1},\"{2}\",\"{3}\",\"{4}\",{5},{6}".format(res["date"], (res.get("subcategory") or res.get("category","")), res["desc"].encode("ascii","ignore"), locale.currency(float(res["amount"])/100, grouping=True), locale.currency(float(res["orig_amount"])/100, grouping=True), res["account"], res["subaccount"])
else:
json_print(results)
except Exception, e:
exit_error(400, "Bad search: %s, %s" % (e, traceback.format_exc()))
elif action == "updatetransaction":
try:
data = json.loads(form.getfirst("data") or "{}")
if not data:
exit_error(400, "Bad transactions: no data")
except Exception, e:
exit_error(400, "Bad transactions: %s %s" % (e, form.getfirst("data")[:20]))
json_print(mydb.updatetransaction(form.getfirst("id"), data, save=True))
elif action == "image" or query.get("image"):
img = mydb.getimage(form.getfirst("id") or query["image"][0])
if img:
print "Content-type: image/png"
print "Content-length: %s" % (len(img))
print
print img
else:
exit_error(404, "Image not found")
elif action == "editaccount":
try:
json_print(mydb.editaccount(json.loads(form.getfirst("account"))))
except Exception, e:
exit_error(400, "Bad account %s" % (e))
elif action == "getcategories":
try:
json_print(mydb.getcategories())
except Exception, e:
exit_error(500, "Error %s" % (e))
elif action == "getcenters":
json_print(mydb.getcenters())
elif action == "gettags":
json_print(mydb.gettags())
elif action == "getquestions":
json_print(mydb.getquestions())
elif action == "balancehistory":
json_print(mydb.balancehistory())
elif action == "getcookies":
json_print(mydb.getcookies())
elif action == "setcookies":
try:
data = json.loads(form.getfirst("data") or "{}")
if not data:
exit_error(400, "Bad transactions: no data")
except Exception, e:
exit_error(400, "Bad transactions: %s %s" % (e, form.getfirst("data")[:20]))
json_print(mydb.setcookies(data, save=True))
elif action == "link":
json_print(mydb.link(form.getfirst("parent") or "null",
json.loads(form.getfirst("children") or "[]"),
form.getfirst("linktype") or "none"))
elif action == "summary":
json_print(mydb.summary(form.getfirst("startdate") or str((datetime.date.today().replace(day=1)-datetime.timedelta(days=1)).replace(day=1)),
form.getfirst("enddate") or str(datetime.date.today().replace(day=1)-datetime.timedelta(days=1)),
json.loads(form.getfirst("filter") or "{}"),
json.loads(form.getfirst("filterout") or "{}"),
form.getfirst("key") or "category",
form.getfirst("keydef") or "Uncategorized",
form.getfirst("keysort") or "amount",
json.loads(form.getfirst("keysortrev") or "true"),
form.getfirst("subkey") or "subcategory",
form.getfirst("subkeydef") or "None",
form.getfirst("subkekysort") or "amount",
json.loads(form.getfirst("subkeysortrev") or "true"),
form.getfirst("modify") or None))
else:
exit_error(404,"Method not found")
|
vincebusam/pyWebCash
|
web/api.py
|
Python
|
agpl-3.0
| 10,118
|
import ddt
from mock import patch, Mock
from cms.djangoapps.contentstore.signals.handlers import (
GRADING_POLICY_COUNTDOWN_SECONDS,
handle_grading_policy_changed
)
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@ddt.ddt
class LockedTest(ModuleStoreTestCase):
def setUp(self):
super(LockedTest, self).setUp()
self.course = CourseFactory.create(
org='edx',
name='course',
run='run',
)
self.user = UserFactory.create()
CourseEnrollment.enroll(self.user, self.course.id)
@patch('cms.djangoapps.contentstore.signals.handlers.cache.add')
@patch('cms.djangoapps.contentstore.signals.handlers.cache.delete')
@patch('cms.djangoapps.contentstore.signals.handlers.task_compute_all_grades_for_course.apply_async')
@ddt.data(True, False)
def test_locked(self, lock_available, compute_grades_async_mock, delete_mock, add_mock):
add_mock.return_value = lock_available
sender = Mock()
handle_grading_policy_changed(sender, course_key=unicode(self.course.id))
cache_key = 'handle_grading_policy_changed-{}'.format(unicode(self.course.id))
self.assertEqual(lock_available, compute_grades_async_mock.called)
if lock_available:
add_mock.assert_called_once_with(cache_key, "true", GRADING_POLICY_COUNTDOWN_SECONDS)
|
jolyonb/edx-platform
|
cms/djangoapps/contentstore/tests/test_signals.py
|
Python
|
agpl-3.0
| 1,547
|
# coding: utf-8
"""
Django settings for a boilerplate project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
import os
from django.utils.translation import ugettext_lazy as _
DEBUG = False
# PROJECT DIRECTORY AND GENERAL SETTINGS
PROJECT_ROOT = os.path.abspath(os.path.join(__file__, '..', '..'))
SITE_ROOT = os.path.dirname(PROJECT_ROOT)
SITE_ID = 1
SITE_NAME = os.path.basename(PROJECT_ROOT)
ROOT_URLCONF = '%s.urls' % SITE_NAME
# END PROJECT DIRECTORY AND GENERAL SETTINGS
# SECURITY
ACCOUNT_ACTIVATION_DAYS = 3
# APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.flatpages',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'djangocms_admin_style',
# Admin panel and documentation:
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'ckeditor',
# A prettier theme
'accounts.apps.RegistrationConfig',
'rest_framework',
'django_ajax'
)
LOCAL_APPS = (
'api',
'google_tools',
'accounts.apps.AccountsConfig',
'organizations',
'common',
'scheduler',
'blueprint',
'shiftmailer',
'stats',
'places',
'non_logged_in_area',
'registration_history',
'scheduletemplates',
'news',
'notifications',
'content'
)
INSTALLED_APPS = DJANGO_APPS + LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(SITE_ROOT, 'static')
STATICFILES_DIRS = (
os.path.join(SITE_ROOT, 'resources'),
)
MEDIA_URL = '/media/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(SITE_ROOT, 'templates'),
os.path.join(PROJECT_ROOT, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'google_tools.context_processors.google_tools_config'
],
},
},
]
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
LOGIN_REDIRECT_URL = '/'
LOGIN_URL = '/auth/login/'
TIME_ZONE = 'Europe/Berlin'
LANGUAGE_CODE = 'en'
USE_L10N = True
LANGUAGES = (
('en', _('English')),
('de', _('German')),
('el', _('Greek')),
('hu', _('Hungarian')),
('sv', _('Swedish')),
)
LOCALE_PATHS = (
SITE_ROOT + '/locale',
)
WSGI_APPLICATION = '%s.wsgi.application' % SITE_NAME
FIXTURE_DIRS = (
os.path.join(PROJECT_ROOT, 'fixtures'),
)
CKEDITOR_JQUERY_URL = '//ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js'
CKEDITOR_UPLOAD_PATH = "uploads/"
DATE_FORMAT = "l, d.m.Y"
|
alper/volunteer_planner
|
volunteer_planner/settings/base.py
|
Python
|
agpl-3.0
| 4,145
|
# Copyright 2009-2013 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Test BuildQueue features."""
from datetime import timedelta
from simplejson import dumps
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
from lp.app.interfaces.launchpad import ILaunchpadCelebrities
from lp.buildmaster.enums import BuildStatus
from lp.buildmaster.interfaces.builder import IBuilderSet
from lp.registry.interfaces.pocket import PackagePublishingPocket
from lp.registry.interfaces.series import SeriesStatus
from lp.registry.interfaces.sourcepackage import SourcePackageUrgency
from lp.services.database.interfaces import IStore
from lp.services.log.logger import DevNullLogger
from lp.services.webapp.interfaces import OAuthPermission
from lp.soyuz.enums import (
ArchivePurpose,
PackagePublishingStatus,
)
from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
from lp.soyuz.interfaces.buildfarmbuildjob import IBuildFarmBuildJob
from lp.soyuz.interfaces.buildpackagejob import (
COPY_ARCHIVE_SCORE_PENALTY,
IBuildPackageJob,
PRIVATE_ARCHIVE_SCORE_BONUS,
SCORE_BY_COMPONENT,
SCORE_BY_POCKET,
SCORE_BY_URGENCY,
)
from lp.soyuz.interfaces.processor import IProcessorSet
from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
from lp.soyuz.model.buildpackagejob import BuildPackageJob
from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
from lp.testing import (
anonymous_logged_in,
api_url,
person_logged_in,
TestCaseWithFactory,
)
from lp.testing.layers import (
DatabaseFunctionalLayer,
LaunchpadZopelessLayer,
)
from lp.testing.pages import webservice_for_person
def find_job(test, name, processor='386'):
"""Find build and queue instance for the given source and processor."""
for build in test.builds:
if (build.source_package_release.name == name
and build.processor.name == processor):
return (build, build.buildqueue_record)
return (None, None)
def builder_key(build):
"""Return processor and virtualization for the given build."""
return (build.processor.id, build.is_virtualized)
def assign_to_builder(test, job_name, builder_number, processor='386'):
"""Simulate assigning a build to a builder."""
def nth_builder(test, build, n):
"""Get builder #n for the given build processor and virtualization."""
builder = None
builders = test.builders.get(builder_key(build), [])
try:
builder = builders[n - 1]
except IndexError:
pass
return builder
build, bq = find_job(test, job_name, processor)
builder = nth_builder(test, build, builder_number)
bq.markAsBuilding(builder)
class TestBuildJobBase(TestCaseWithFactory):
"""Setup the test publisher and some builders."""
layer = LaunchpadZopelessLayer
def setUp(self):
super(TestBuildJobBase, self).setUp()
self.publisher = SoyuzTestPublisher()
self.publisher.prepareBreezyAutotest()
self.i8 = self.factory.makeBuilder(name='i386-n-8', virtualized=False)
self.i9 = self.factory.makeBuilder(name='i386-n-9', virtualized=False)
processor = getUtility(IProcessorSet).getByName('hppa')
self.h6 = self.factory.makeBuilder(
name='hppa-n-6', processor=processor, virtualized=False)
self.h7 = self.factory.makeBuilder(
name='hppa-n-7', processor=processor, virtualized=False)
self.builders = dict()
# x86 native
self.builders[(1, False)] = [self.i8, self.i9]
# hppa native
self.builders[(3, True)] = [self.h6, self.h7]
# Ensure all builders are operational.
for builders in self.builders.values():
for builder in builders:
builder.builderok = True
builder.manual = False
# Disable the sample data builders.
getUtility(IBuilderSet)['bob'].builderok = False
getUtility(IBuilderSet)['frog'].builderok = False
class TestBuildPackageJob(TestBuildJobBase):
"""Test dispatch time estimates for binary builds (i.e. single build
farm job type) targetting a single processor architecture and the primary
archive.
"""
def setUp(self):
"""Set up some native x86 builds for the test archive."""
super(TestBuildPackageJob, self).setUp()
# The builds will be set up as follows:
#
# j: 3 gedit p: hppa v:False e:0:01:00 *** s: 1001
# j: 4 gedit p: 386 v:False e:0:02:00 *** s: 1002
# j: 5 firefox p: hppa v:False e:0:03:00 *** s: 1003
# j: 6 firefox p: 386 v:False e:0:04:00 *** s: 1004
# j: 7 cobblers p: hppa v:False e:0:05:00 *** s: 1005
# j: 8 cobblers p: 386 v:False e:0:06:00 *** s: 1006
# j: 9 thunderpants p: hppa v:False e:0:07:00 *** s: 1007
# j:10 thunderpants p: 386 v:False e:0:08:00 *** s: 1008
# j:11 apg p: hppa v:False e:0:09:00 *** s: 1009
# j:12 apg p: 386 v:False e:0:10:00 *** s: 1010
# j:13 vim p: hppa v:False e:0:11:00 *** s: 1011
# j:14 vim p: 386 v:False e:0:12:00 *** s: 1012
# j:15 gcc p: hppa v:False e:0:13:00 *** s: 1013
# j:16 gcc p: 386 v:False e:0:14:00 *** s: 1014
# j:17 bison p: hppa v:False e:0:15:00 *** s: 1015
# j:18 bison p: 386 v:False e:0:16:00 *** s: 1016
# j:19 flex p: hppa v:False e:0:17:00 *** s: 1017
# j:20 flex p: 386 v:False e:0:18:00 *** s: 1018
# j:21 postgres p: hppa v:False e:0:19:00 *** s: 1019
# j:22 postgres p: 386 v:False e:0:20:00 *** s: 1020
#
# j=job, p=processor, v=virtualized, e=estimated_duration, s=score
# First mark all builds in the sample data as already built.
store = IStore(BinaryPackageBuild)
sample_data = store.find(BinaryPackageBuild)
for build in sample_data:
build.buildstate = BuildStatus.FULLYBUILT
store.flush()
# We test builds that target a primary archive.
self.non_ppa = self.factory.makeArchive(
name="primary", purpose=ArchivePurpose.PRIMARY)
self.non_ppa.require_virtualized = False
self.builds = []
sourcenames = [
"gedit",
"firefox",
"cobblers",
"thunderpants",
"apg",
"vim",
"gcc",
"bison",
"flex",
"postgres",
]
for sourcename in sourcenames:
self.builds.extend(
self.publisher.getPubSource(
sourcename=sourcename,
status=PackagePublishingStatus.PUBLISHED,
archive=self.non_ppa,
architecturehintlist='any').createMissingBuilds())
# We want the builds to have a lot of variety when it comes to score
# and estimated duration etc. so that the queries under test get
# exercised properly.
score = 1000
duration = 0
for build in self.builds:
score += 1
duration += 60
bq = build.buildqueue_record
bq.lastscore = score
removeSecurityProxy(bq).estimated_duration = timedelta(
seconds=duration)
def test_processor(self):
# Test that BuildPackageJob returns the correct processor.
build, bq = find_job(self, 'gcc', '386')
bpj = bq.specific_job
self.assertEqual(bpj.processor.id, 1)
build, bq = find_job(self, 'bison', 'hppa')
bpj = bq.specific_job
self.assertEqual(bpj.processor.id, 3)
def test_virtualized(self):
# Test that BuildPackageJob returns the correct virtualized flag.
build, bq = find_job(self, 'apg', '386')
bpj = bq.specific_job
self.assertEqual(bpj.virtualized, False)
build, bq = find_job(self, 'flex', 'hppa')
bpj = bq.specific_job
self.assertEqual(bpj.virtualized, False)
def test_providesInterfaces(self):
# Ensure that a BuildPackageJob generates an appropriate cookie.
build, bq = find_job(self, 'gcc', '386')
build_farm_job = bq.specific_job
self.assertProvides(build_farm_job, IBuildPackageJob)
self.assertProvides(build_farm_job, IBuildFarmBuildJob)
def test_jobStarted(self):
# Starting a build updates the status.
build, bq = find_job(self, 'gcc', '386')
build_package_job = bq.specific_job
build_package_job.jobStarted()
self.assertEqual(
BuildStatus.BUILDING, build_package_job.build.status)
self.assertIsNot(None, build_package_job.build.date_started)
self.assertIsNot(None, build_package_job.build.date_first_dispatched)
self.assertIs(None, build_package_job.build.date_finished)
class TestBuildPackageJobScore(TestCaseWithFactory):
layer = DatabaseFunctionalLayer
def makeBuildJob(self, purpose=None, private=False, component="main",
urgency="high", pocket="RELEASE", section_name=None):
if purpose is not None or private:
archive = self.factory.makeArchive(
purpose=purpose, private=private)
else:
archive = None
spph = self.factory.makeSourcePackagePublishingHistory(
archive=archive, component=component, urgency=urgency,
section_name=section_name)
naked_spph = removeSecurityProxy(spph) # needed for private archives
build = self.factory.makeBinaryPackageBuild(
source_package_release=naked_spph.sourcepackagerelease,
pocket=pocket)
return removeSecurityProxy(build).makeJob()
# The defaults for pocket, component, and urgency here match those in
# makeBuildJob.
def assertCorrectScore(self, job, pocket="RELEASE", component="main",
urgency="high", other_bonus=0):
self.assertEqual(
(SCORE_BY_POCKET[PackagePublishingPocket.items[pocket.upper()]] +
SCORE_BY_COMPONENT[component] +
SCORE_BY_URGENCY[SourcePackageUrgency.items[urgency.upper()]] +
other_bonus), job.score())
def test_score_unusual_component(self):
spph = self.factory.makeSourcePackagePublishingHistory(
component="unusual")
build = self.factory.makeBinaryPackageBuild(
source_package_release=spph.sourcepackagerelease)
build.queueBuild()
job = build.buildqueue_record.specific_job
# For now just test that it doesn't raise an Exception
job.score()
def test_main_release_low_score(self):
# 1500 (RELEASE) + 1000 (main) + 5 (low) = 2505.
job = self.makeBuildJob(component="main", urgency="low")
self.assertCorrectScore(job, "RELEASE", "main", "low")
def test_copy_archive_main_release_low_score(self):
# 1500 (RELEASE) + 1000 (main) + 5 (low) - 2600 (copy archive) = -95.
# With this penalty, even language-packs and build retries will be
# built before copy archives.
job = self.makeBuildJob(
purpose="COPY", component="main", urgency="low")
self.assertCorrectScore(
job, "RELEASE", "main", "low", -COPY_ARCHIVE_SCORE_PENALTY)
def test_copy_archive_relative_score_is_applied(self):
# Per-archive relative build scores are applied, in this case
# exactly offsetting the copy-archive penalty.
job = self.makeBuildJob(
purpose="COPY", component="main", urgency="low")
removeSecurityProxy(job.build.archive).relative_build_score = 2600
self.assertCorrectScore(
job, "RELEASE", "main", "low", -COPY_ARCHIVE_SCORE_PENALTY + 2600)
def test_archive_negative_relative_score_is_applied(self):
# Negative per-archive relative build scores are allowed.
job = self.makeBuildJob(component="main", urgency="low")
removeSecurityProxy(job.build.archive).relative_build_score = -100
self.assertCorrectScore(job, "RELEASE", "main", "low", -100)
def test_private_archive_bonus_is_applied(self):
# Private archives get a bonus of 10000.
job = self.makeBuildJob(private=True, component="main", urgency="high")
self.assertCorrectScore(
job, "RELEASE", "main", "high", PRIVATE_ARCHIVE_SCORE_BONUS)
def test_main_release_low_recent_score(self):
# 1500 (RELEASE) + 1000 (main) + 5 (low) = 2505.
job = self.makeBuildJob(component="main", urgency="low")
self.assertCorrectScore(job, "RELEASE", "main", "low")
def test_universe_release_high_five_minutes_score(self):
# 1500 (RELEASE) + 250 (universe) + 15 (high) = 1765.
job = self.makeBuildJob(component="universe", urgency="high")
self.assertCorrectScore(job, "RELEASE", "universe", "high")
def test_multiverse_release_medium_fifteen_minutes_score(self):
# 1500 (RELEASE) + 0 (multiverse) + 10 (medium) = 1510.
job = self.makeBuildJob(component="multiverse", urgency="medium")
self.assertCorrectScore(job, "RELEASE", "multiverse", "medium")
def test_main_release_emergency_thirty_minutes_score(self):
# 1500 (RELEASE) + 1000 (main) + 20 (emergency) = 2520.
job = self.makeBuildJob(component="main", urgency="emergency")
self.assertCorrectScore(job, "RELEASE", "main", "emergency")
def test_restricted_release_low_one_hour_score(self):
# 1500 (RELEASE) + 750 (restricted) + 5 (low) = 2255.
job = self.makeBuildJob(component="restricted", urgency="low")
self.assertCorrectScore(job, "RELEASE", "restricted", "low")
def test_backports_score(self):
# BACKPORTS is the lowest-priority pocket.
job = self.makeBuildJob(pocket="BACKPORTS")
self.assertCorrectScore(job, "BACKPORTS")
def test_release_score(self):
# RELEASE ranks next above BACKPORTS.
job = self.makeBuildJob(pocket="RELEASE")
self.assertCorrectScore(job, "RELEASE")
def test_proposed_updates_score(self):
# PROPOSED and UPDATES both rank next above RELEASE. The reason why
# PROPOSED and UPDATES have the same priority is because sources in
# both pockets are submitted to the same policy and should reach
# their audience as soon as possible (see more information about
# this decision in bug #372491).
proposed_job = self.makeBuildJob(pocket="PROPOSED")
self.assertCorrectScore(proposed_job, "PROPOSED")
updates_job = self.makeBuildJob(pocket="UPDATES")
self.assertCorrectScore(updates_job, "UPDATES")
def test_security_updates_score(self):
# SECURITY is the top-ranked pocket.
job = self.makeBuildJob(pocket="SECURITY")
self.assertCorrectScore(job, "SECURITY")
def test_score_packageset(self):
# Package sets alter the score of official packages for their
# series.
job = self.makeBuildJob(
component="main", urgency="low", purpose=ArchivePurpose.PRIMARY)
packageset = self.factory.makePackageset(
distroseries=job.build.distro_series)
removeSecurityProxy(packageset).add(
[job.build.source_package_release.sourcepackagename])
removeSecurityProxy(packageset).relative_build_score = 100
self.assertCorrectScore(job, "RELEASE", "main", "low", 100)
def test_score_packageset_in_ppa(self):
# Package set score boosts don't affect PPA packages.
job = self.makeBuildJob(
component="main", urgency="low", purpose=ArchivePurpose.PPA)
packageset = self.factory.makePackageset(
distroseries=job.build.distro_series)
removeSecurityProxy(packageset).add(
[job.build.source_package_release.sourcepackagename])
removeSecurityProxy(packageset).relative_build_score = 100
self.assertCorrectScore(job, "RELEASE", "main", "low", 0)
def test_translations_score(self):
# Language packs (the translations section) don't get any
# package-specific score bumps. They always have the archive's
# base score.
job = self.makeBuildJob(section_name='translations')
removeSecurityProxy(job.build.archive).relative_build_score = 666
self.assertEqual(666, job.score())
def assertScoreReadableByAnyone(self, obj):
"""An object's build score is readable by anyone."""
with person_logged_in(obj.owner):
obj_url = api_url(obj)
removeSecurityProxy(obj).relative_build_score = 100
webservice = webservice_for_person(
self.factory.makePerson(), permission=OAuthPermission.WRITE_PUBLIC)
entry = webservice.get(obj_url, api_version="devel").jsonBody()
self.assertEqual(100, entry["relative_build_score"])
def assertScoreNotWriteableByOwner(self, obj):
"""Being an object's owner does not allow changing its build score.
This affects a site-wide resource, and is thus restricted to
launchpad-buildd-admins.
"""
with person_logged_in(obj.owner):
obj_url = api_url(obj)
webservice = webservice_for_person(
obj.owner, permission=OAuthPermission.WRITE_PUBLIC)
entry = webservice.get(obj_url, api_version="devel").jsonBody()
response = webservice.patch(
entry["self_link"], "application/json",
dumps(dict(relative_build_score=100)))
self.assertEqual(401, response.status)
new_entry = webservice.get(obj_url, api_version="devel").jsonBody()
self.assertEqual(0, new_entry["relative_build_score"])
def assertScoreWriteableByTeam(self, obj, team):
"""Members of TEAM can change an object's build score."""
with person_logged_in(obj.owner):
obj_url = api_url(obj)
person = self.factory.makePerson(member_of=[team])
webservice = webservice_for_person(
person, permission=OAuthPermission.WRITE_PUBLIC)
entry = webservice.get(obj_url, api_version="devel").jsonBody()
response = webservice.patch(
entry["self_link"], "application/json",
dumps(dict(relative_build_score=100)))
self.assertEqual(209, response.status)
self.assertEqual(100, response.jsonBody()["relative_build_score"])
def test_score_packageset_readable(self):
# A packageset's build score is readable by anyone.
packageset = self.factory.makePackageset()
self.assertScoreReadableByAnyone(packageset)
def test_score_packageset_forbids_non_buildd_admin(self):
# Being the owner of a packageset is not enough to allow changing
# its build score, since this affects a site-wide resource.
packageset = self.factory.makePackageset()
self.assertScoreNotWriteableByOwner(packageset)
def test_score_packageset_allows_buildd_admin(self):
# Buildd admins can change a packageset's build score.
packageset = self.factory.makePackageset()
self.assertScoreWriteableByTeam(
packageset, getUtility(ILaunchpadCelebrities).buildd_admin)
def test_score_archive_readable(self):
# An archive's build score is readable by anyone.
archive = self.factory.makeArchive()
self.assertScoreReadableByAnyone(archive)
def test_score_archive_forbids_non_buildd_admin(self):
# Being the owner of an archive is not enough to allow changing its
# build score, since this affects a site-wide resource.
archive = self.factory.makeArchive()
self.assertScoreNotWriteableByOwner(archive)
def test_score_archive_allows_buildd_and_commercial_admin(self):
# Buildd and commercial admins can change an archive's build score.
archive = self.factory.makeArchive()
self.assertScoreWriteableByTeam(
archive, getUtility(ILaunchpadCelebrities).buildd_admin)
with anonymous_logged_in():
self.assertScoreWriteableByTeam(
archive, getUtility(ILaunchpadCelebrities).commercial_admin)
class TestBuildPackageJobPostProcess(TestCaseWithFactory):
layer = DatabaseFunctionalLayer
def makeBuildJob(self, pocket="RELEASE"):
build = self.factory.makeBinaryPackageBuild(pocket=pocket)
return build.queueBuild()
def test_release_job(self):
job = self.makeBuildJob()
build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job)
self.assertTrue(BuildPackageJob.postprocessCandidate(job, None))
self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
def test_security_job_is_failed(self):
job = self.makeBuildJob(pocket="SECURITY")
build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job)
BuildPackageJob.postprocessCandidate(job, DevNullLogger())
self.assertEqual(BuildStatus.FAILEDTOBUILD, build.status)
def test_obsolete_job_without_flag_is_failed(self):
job = self.makeBuildJob()
build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job)
distroseries = build.distro_arch_series.distroseries
removeSecurityProxy(distroseries).status = SeriesStatus.OBSOLETE
BuildPackageJob.postprocessCandidate(job, DevNullLogger())
self.assertEqual(BuildStatus.FAILEDTOBUILD, build.status)
def test_obsolete_job_with_flag_is_not_failed(self):
job = self.makeBuildJob()
build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job)
distroseries = build.distro_arch_series.distroseries
archive = build.archive
removeSecurityProxy(distroseries).status = SeriesStatus.OBSOLETE
removeSecurityProxy(archive).permit_obsolete_series_uploads = True
BuildPackageJob.postprocessCandidate(job, DevNullLogger())
self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
|
abramhindle/UnnaturalCodeFork
|
python/testdata/launchpad/lib/lp/soyuz/tests/test_buildpackagejob.py
|
Python
|
agpl-3.0
| 22,248
|
# -*- coding: utf-8 -*-
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from . import nuts_import
|
linkitspa/l10n-italy
|
l10n_it_location_nuts/wizard/__init__.py
|
Python
|
agpl-3.0
| 116
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016-2017 Ircam
# Copyright (c) 2016-2017 Guillaume Pellerin
# Copyright (c) 2016-2017 Emilie Zawadzki
# This file is part of mezzanine-organization.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.management.base import BaseCommand
from organization.network.models import Organization
class Command(BaseCommand):
help = "Create a default organization if it doesn't exist."
name = 'Default Organization'
def handle(self, *args, **options):
organizations = Organization.objects.filter(is_host=True)
if not organizations:
organization = Organization(name=self.name, is_host=True)
organization.save()
print('Default organization created')
|
Ircam-Web/mezzanine-organization
|
organization/network/management/commands/create-default-organization.py
|
Python
|
agpl-3.0
| 1,370
|
from glob import glob
import logging
import os.path
from askomics.libaskomics.ParamManager import ParamManager
from askomics.libaskomics.source_file.SourceFileGff import SourceFileGff
from askomics.libaskomics.source_file.SourceFileTsv import SourceFileTsv
from askomics.libaskomics.source_file.SourceFileTtl import SourceFileTtl
from askomics.libaskomics.source_file.SourceFileBed import SourceFileBed
class SourceFileConvertor(ParamManager):
"""
A SourceFileConvertor instance provides methods to:
- display an overview of the tabulated files the user want to convert in AskOmics.
- convert the tabulated files in turtle files, taking care of:
* the format of the data already in the database
(detection of new and missing headers in the user files).
* the abstraction generation corresponding to the header of the user files.
* the generation of the part of the domain code that wan be automatically generated.
"""
def __init__(self, settings, session):
ParamManager.__init__(self, settings, session)
self.manage_rdf_format = ['application/rdf+xml','owl','rdf','n3','nt','json-ld']
self.log = logging.getLogger(__name__)
def get_source_files(self, selectedFiles, forced_type=None, uri_set=None):
"""Get all source files
:returns: a list of source file
:rtype: list
"""
src_dir = self.get_upload_directory()
paths = glob(src_dir + '/*')
files = []
for path in paths:
(filepath, filename) = os.path.split(path)
if filename not in selectedFiles:
continue
try:
file_type = self.guess_file_type(path)
if file_type == 'gff' or forced_type == 'gff':
files.append(SourceFileGff(self.settings, self.session, path, uri_set=uri_set))
elif file_type == 'ttl' or forced_type == 'ttl':
files.append(SourceFileTtl(self.settings, self.session, path))
elif file_type in self.manage_rdf_format or forced_type in self.manage_rdf_format:
#### TODO: mnage *** forced_type ***
files.append(SourceFileTtl(self.settings, self.session, path, file_type))
elif file_type == 'bed' or forced_type == 'bed':
files.append(SourceFileBed(self.settings, self.session, path, uri_set=uri_set))
elif file_type == 'csv' or forced_type == 'csv':
files.append(SourceFileTsv(self.settings, self.session, path, int(self.settings["askomics.overview_lines_limit"]), uri_set=uri_set))
else:
raise Exception("AskOmics manage only rdf format file: "+str(self.manage_rdf_format)+ " file_type:"+file_type)
except Exception as inst:
raise Exception(str(inst)+" file:"+filename)
return files
@staticmethod
def guess_file_type(filepath):
"""Guess the file type in function of their extention
:param filepath: path of file
:type filepath: string
:returns: file type
:rtype: string
"""
extension = os.path.splitext(filepath)[1]
if extension.lower() in ('.gff', '.gff2', '.gff3'):
return 'gff'
if extension.lower() == '.ttl':
return 'ttl'
if extension.lower() in ('.rdf-xml','.xml','.rdf'):
return 'application/rdf+xml'
if extension.lower() == '.owl':
return 'owl'
if extension.lower() in ('.jsonld','.json'):
return 'json-ld'
if extension.lower() == '.n3':
return 'n3'
if extension.lower() == '.nt':
return 'nt'
if extension.lower() == '.bed':
return 'bed'
return 'csv'
|
ofilangi/askomics
|
askomics/libaskomics/SourceFileConvertor.py
|
Python
|
agpl-3.0
| 3,882
|
"""
WSGI config for conceptserver project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "conceptserver.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
pmitros/concept-tag-server
|
conceptserver/conceptserver/wsgi.py
|
Python
|
agpl-3.0
| 1,148
|
"""
Install Python and Node prerequisites.
"""
import hashlib
import io
import os
import re
import subprocess
import sys
from distutils import sysconfig
import six
from paver.easy import BuildFailure, sh, task
from six.moves import range
from .utils.envs import Env
from .utils.timer import timed
PREREQS_STATE_DIR = os.getenv('PREREQ_CACHE_DIR', Env.REPO_ROOT / '.prereqs_cache')
NO_PREREQ_MESSAGE = "NO_PREREQ_INSTALL is set, not installing prereqs"
NO_PYTHON_UNINSTALL_MESSAGE = 'NO_PYTHON_UNINSTALL is set. No attempts will be made to uninstall old Python libs.'
COVERAGE_REQ_FILE = 'requirements/edx/coverage.txt'
# If you make any changes to this list you also need to make
# a corresponding change to circle.yml, which is how the python
# prerequisites are installed for builds on circleci.com
if 'TOXENV' in os.environ:
PYTHON_REQ_FILES = ['requirements/edx/testing.txt']
else:
PYTHON_REQ_FILES = ['requirements/edx/development.txt']
# Developers can have private requirements, for local copies of github repos,
# or favorite debugging tools, etc.
PRIVATE_REQS = 'requirements/private.txt'
if os.path.exists(PRIVATE_REQS):
PYTHON_REQ_FILES.append(PRIVATE_REQS)
def str2bool(s):
s = str(s)
return s.lower() in ('yes', 'true', 't', '1')
def no_prereq_install():
"""
Determine if NO_PREREQ_INSTALL should be truthy or falsy.
"""
return str2bool(os.environ.get('NO_PREREQ_INSTALL', 'False'))
def no_python_uninstall():
""" Determine if we should run the uninstall_python_packages task. """
return str2bool(os.environ.get('NO_PYTHON_UNINSTALL', 'False'))
def create_prereqs_cache_dir():
"""Create the directory for storing the hashes, if it doesn't exist already."""
try:
os.makedirs(PREREQS_STATE_DIR)
except OSError:
if not os.path.isdir(PREREQS_STATE_DIR):
raise
def compute_fingerprint(path_list):
"""
Hash the contents of all the files and directories in `path_list`.
Returns the hex digest.
"""
hasher = hashlib.sha1()
for path_item in path_list:
# For directories, create a hash based on the modification times
# of first-level subdirectories
if os.path.isdir(path_item):
for dirname in sorted(os.listdir(path_item)):
path_name = os.path.join(path_item, dirname)
if os.path.isdir(path_name):
hasher.update(str(os.stat(path_name).st_mtime).encode('utf-8'))
# For files, hash the contents of the file
if os.path.isfile(path_item):
with io.open(path_item, "rb") as file_handle:
hasher.update(file_handle.read())
return hasher.hexdigest()
def prereq_cache(cache_name, paths, install_func):
"""
Conditionally execute `install_func()` only if the files/directories
specified by `paths` have changed.
If the code executes successfully (no exceptions are thrown), the cache
is updated with the new hash.
"""
# Retrieve the old hash
cache_filename = cache_name.replace(" ", "_")
cache_file_path = os.path.join(PREREQS_STATE_DIR, "{}.sha1".format(cache_filename))
old_hash = None
if os.path.isfile(cache_file_path):
with io.open(cache_file_path, "r") as cache_file:
old_hash = cache_file.read()
# Compare the old hash to the new hash
# If they do not match (either the cache hasn't been created, or the files have changed),
# then execute the code within the block.
new_hash = compute_fingerprint(paths)
if new_hash != old_hash:
install_func()
# Update the cache with the new hash
# If the code executed within the context fails (throws an exception),
# then this step won't get executed.
create_prereqs_cache_dir()
with io.open(cache_file_path, "wb") as cache_file:
# Since the pip requirement files are modified during the install
# process, we need to store the hash generated AFTER the installation
post_install_hash = compute_fingerprint(paths)
cache_file.write(post_install_hash.encode('utf-8'))
else:
print(u'{cache} unchanged, skipping...'.format(cache=cache_name))
def node_prereqs_installation():
"""
Configures npm and installs Node prerequisites
"""
# NPM installs hang sporadically. Log the installation process so that we
# determine if any packages are chronic offenders.
shard_str = os.getenv('SHARD', None)
if shard_str:
npm_log_file_path = '{}/npm-install.{}.log'.format(Env.GEN_LOG_DIR, shard_str)
else:
npm_log_file_path = '{}/npm-install.log'.format(Env.GEN_LOG_DIR)
npm_log_file = io.open(npm_log_file_path, 'wb')
npm_command = 'npm install --verbose'.split()
cb_error_text = "Subprocess return code: 1"
# Error handling around a race condition that produces "cb() never called" error. This
# evinces itself as `cb_error_text` and it ought to disappear when we upgrade
# npm to 3 or higher. TODO: clean this up when we do that.
try:
# The implementation of Paver's `sh` function returns before the forked
# actually returns. Using a Popen object so that we can ensure that
# the forked process has returned
proc = subprocess.Popen(npm_command, stderr=npm_log_file)
proc.wait()
except BuildFailure as error:
if cb_error_text in six.text_type(error):
print("npm install error detected. Retrying...")
proc = subprocess.Popen(npm_command, stderr=npm_log_file)
proc.wait()
else:
raise
print(u"Successfully installed NPM packages. Log found at {}".format(
npm_log_file_path
))
def python_prereqs_installation():
"""
Installs Python prerequisites
"""
for req_file in PYTHON_REQ_FILES:
pip_install_req_file(req_file)
def pip_install_req_file(req_file):
"""Pip install the requirements file."""
pip_cmd = 'pip install -q --disable-pip-version-check --exists-action w'
sh(u"{pip_cmd} -r {req_file}".format(pip_cmd=pip_cmd, req_file=req_file))
@task
@timed
def install_node_prereqs():
"""
Installs Node prerequisites
"""
if no_prereq_install():
print(NO_PREREQ_MESSAGE)
return
prereq_cache("Node prereqs", ["package.json"], node_prereqs_installation)
# To add a package to the uninstall list, just add it to this list! No need
# to touch any other part of this file.
PACKAGES_TO_UNINSTALL = [
"MySQL-python", # Because mysqlclient shares the same directory name
"South", # Because it interferes with Django 1.8 migrations.
"edxval", # Because it was bork-installed somehow.
"django-storages",
"django-oauth2-provider", # Because now it's called edx-django-oauth2-provider.
"edx-oauth2-provider", # Because it moved from github to pypi
"i18n-tools", # Because now it's called edx-i18n-tools
"moto", # Because we no longer use it and it conflicts with recent jsondiff versions
"python-saml", # Because python3-saml shares the same directory name
"pdfminer", # Replaced by pdfminer.six, which shares the same directory name
"pytest-faulthandler", # Because it was bundled into pytest
]
@task
@timed
def uninstall_python_packages():
"""
Uninstall Python packages that need explicit uninstallation.
Some Python packages that we no longer want need to be explicitly
uninstalled, notably, South. Some other packages were once installed in
ways that were resistant to being upgraded, like edxval. Also uninstall
them.
"""
if no_python_uninstall():
print(NO_PYTHON_UNINSTALL_MESSAGE)
return
# So that we don't constantly uninstall things, use a hash of the packages
# to be uninstalled. Check it, and skip this if we're up to date.
hasher = hashlib.sha1()
hasher.update(repr(PACKAGES_TO_UNINSTALL).encode('utf-8'))
expected_version = hasher.hexdigest()
state_file_path = os.path.join(PREREQS_STATE_DIR, "Python_uninstall.sha1")
create_prereqs_cache_dir()
if os.path.isfile(state_file_path):
with io.open(state_file_path) as state_file:
version = state_file.read()
if version == expected_version:
print('Python uninstalls unchanged, skipping...')
return
# Run pip to find the packages we need to get rid of. Believe it or not,
# edx-val is installed in a way that it is present twice, so we have a loop
# to really really get rid of it.
for _ in range(3):
uninstalled = False
frozen = sh("pip freeze", capture=True)
for package_name in PACKAGES_TO_UNINSTALL:
if package_in_frozen(package_name, frozen):
# Uninstall the pacakge
sh(u"pip uninstall --disable-pip-version-check -y {}".format(package_name))
uninstalled = True
if not uninstalled:
break
else:
# We tried three times and didn't manage to get rid of the pests.
print("Couldn't uninstall unwanted Python packages!")
return
# Write our version.
with io.open(state_file_path, "wb") as state_file:
state_file.write(expected_version.encode('utf-8'))
def package_in_frozen(package_name, frozen_output):
"""Is this package in the output of 'pip freeze'?"""
# Look for either:
#
# PACKAGE-NAME==
#
# or:
#
# blah_blah#egg=package_name-version
#
pattern = r"(?mi)^{pkg}==|#egg={pkg_under}-".format(
pkg=re.escape(package_name),
pkg_under=re.escape(package_name.replace("-", "_")),
)
return bool(re.search(pattern, frozen_output))
@task
@timed
def install_coverage_prereqs():
""" Install python prereqs for measuring coverage. """
if no_prereq_install():
print(NO_PREREQ_MESSAGE)
return
pip_install_req_file(COVERAGE_REQ_FILE)
@task
@timed
def install_python_prereqs():
"""
Installs Python prerequisites.
"""
if no_prereq_install():
print(NO_PREREQ_MESSAGE)
return
uninstall_python_packages()
# Include all of the requirements files in the fingerprint.
files_to_fingerprint = list(PYTHON_REQ_FILES)
# Also fingerprint the directories where packages get installed:
# ("/edx/app/edxapp/venvs/edxapp/lib/python2.7/site-packages")
files_to_fingerprint.append(sysconfig.get_python_lib())
# In a virtualenv, "-e installs" get put in a src directory.
src_dir = os.path.join(sys.prefix, "src")
if os.path.isdir(src_dir):
files_to_fingerprint.append(src_dir)
# Also fingerprint this source file, so that if the logic for installations
# changes, we will redo the installation.
this_file = __file__
if this_file.endswith(".pyc"):
this_file = this_file[:-1] # use the .py file instead of the .pyc
files_to_fingerprint.append(this_file)
prereq_cache("Python prereqs", files_to_fingerprint, python_prereqs_installation)
@task
@timed
def install_prereqs():
"""
Installs Node and Python prerequisites
"""
if no_prereq_install():
print(NO_PREREQ_MESSAGE)
return
if not str2bool(os.environ.get('SKIP_NPM_INSTALL', 'False')):
install_node_prereqs()
install_python_prereqs()
log_installed_python_prereqs()
print_devstack_warning()
def log_installed_python_prereqs():
""" Logs output of pip freeze for debugging. """
sh(u"pip freeze > {}".format(Env.GEN_LOG_DIR + "/pip_freeze.log"))
return
def print_devstack_warning():
if Env.USING_DOCKER: # pragma: no cover
print("********************************************************************************")
print("* WARNING: Mac users should run this from both the lms and studio shells")
print("* in docker devstack to avoid startup errors that kill your CPU.")
print("* For more details, see:")
print("* https://github.com/edx/devstack#docker-is-using-lots-of-cpu-time-when-it-should-be-idle")
print("********************************************************************************")
|
cpennington/edx-platform
|
pavelib/prereqs.py
|
Python
|
agpl-3.0
| 12,376
|
"""
Serialize data to/from JSON
"""
# Avoid shadowing the standard library json module
from __future__ import absolute_import
import datetime
import decimal
import json
import sys
from django.core.serializers.base import DeserializationError
from django.core.serializers.python import Serializer as PythonSerializer
from keops.core.serializers.python import Deserializer as PythonDeserializer
from django.utils import six
from django.utils.timezone import is_aware
from django.utils.translation import ugettext as _
class Serializer(PythonSerializer):
"""
Convert a queryset to JSON.
"""
internal_use_only = False
def start_serialization(self):
if json.__version__.split('.') >= ['2', '1', '3']:
# Use JS strings to represent Python Decimal instances (ticket #16850)
self.options.update({'use_decimal': False})
self._current = None
self.json_kwargs = self.options.copy()
self.json_kwargs.pop('stream', None)
self.json_kwargs.pop('fields', None)
self.stream.write("[")
def end_serialization(self):
if self.options.get("indent"):
self.stream.write("\n")
self.stream.write("]")
if self.options.get("indent"):
self.stream.write("\n")
def end_object(self, obj):
# self._current has the field data
indent = self.options.get("indent")
if not self.first:
self.stream.write(",")
if not indent:
self.stream.write(" ")
if indent:
self.stream.write("\n")
json.dump(self.get_dump_object(obj), self.stream,
cls=DjangoJSONEncoder, **self.json_kwargs)
self._current = None
def getvalue(self):
# Grand-parent super
return super(PythonSerializer, self).getvalue()
def Deserializer(stream_or_string, **options):
"""
Deserialize a stream or string of JSON data.
"""
if not isinstance(stream_or_string, (bytes, six.string_types)):
stream_or_string = stream_or_string.read()
if isinstance(stream_or_string, bytes):
stream_or_string = stream_or_string.decode('utf-8')
try:
objects = json.loads(stream_or_string)
for obj in PythonDeserializer(objects, **options):
yield obj
except GeneratorExit:
raise
except Exception as e:
# Map to deserializer error
six.reraise(DeserializationError, DeserializationError(e), sys.exc_info()[2])
class DjangoJSONEncoder(json.JSONEncoder):
"""
JSONEncoder subclass that knows how to encode date/time and decimal types.
"""
def default(self, o):
# See "Date Time String Format" in the ECMA-262 specification.
if isinstance(o, datetime.datetime):
r = o.isoformat()
if o.microsecond:
r = r[:23] + r[26:]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(o, datetime.date):
return o.isoformat()
elif isinstance(o, datetime.time):
if is_aware(o):
raise ValueError("JSON can't represent timezone-aware times.")
r = o.isoformat()
if o.microsecond:
r = r[:12]
return r
elif isinstance(o, decimal.Decimal):
return str(o)
else:
return super(DjangoJSONEncoder, self).default(o)
# Older, deprecated class name (for backwards compatibility purposes).
DateTimeAwareJSONEncoder = DjangoJSONEncoder
|
mrmuxl/keops
|
keops/core/serializers/json.py
|
Python
|
agpl-3.0
| 3,575
|
print "Start."
import requests
from xml.dom import minidom
url="https://services.techhouse.dk/webTourManager/0.6/Service.svc/basHttps"
#headers = {'content-type': 'application/soap+xml'}
headers = {'content-type': 'text/xml', 'SOAPAction' : 'https://services.techhouse.dk/webTourManager/IPlanning/Login'}
body = """<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:web="https://services.techhouse.dk/webTourManager">
<soapenv:Header/>
<soapenv:Body>
<web:Login>
<!--Optional:-->
<web:OwnerIDno>107</web:OwnerIDno>
<!--Optional:-->
<web:Alias>testonly</web:Alias>
<!--Optional:-->
<web:AuthKey>4057640D-4C9B-424C-A88C-DC9EE42B1032</web:AuthKey>
</web:Login>
</soapenv:Body>
</soapenv:Envelope>"""
response = requests.post(url,data=body,headers=headers)
print (response.content)
c = response.cookies
headers2 = {'content-type': 'text/xml', 'SOAPAction' : 'https://services.techhouse.dk/webTourManager/ITour/Tour_GetFromStatusAndDateRange_WithSalesperson'}
body2 = """<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:web="https://services.techhouse.dk/webTourManager">
<soapenv:Header/>
<soapenv:Body>
<web:Tour_GetFromStatusAndDateRange_WithSalesperson>
<web:Status>10</web:Status>
<web:From>2016-09-01</web:From>
<web:To>2016-09-30</web:To>
<web:SalespersonID>JDa</web:SalespersonID>
</web:Tour_GetFromStatusAndDateRange_WithSalesperson>
</soapenv:Body>
</soapenv:Envelope>"""
doc = minidom.parseString(response.content)
authenticated = doc.getElementsByTagName("b:IsAuthenticated")[0]
if authenticated.firstChild.data == "true":
response2 = requests.post(url,data=body2,headers=headers2,cookies=c)
print (response2.content)
doc = minidom.parseString(response2.content)
tours = doc.getElementsByTagName("b:TourIDnoItem")
for TourIDnoItem in tours:
tourid = TourIDnoItem.getElementsByTagName("b:TourIDno")[0]
print(tourid.firstChild.data)
print "Done."
|
sl2017/campos
|
campos_transportation/misc/webtour_test3.py
|
Python
|
agpl-3.0
| 2,071
|
from sys import path
import sys
import pdb
import collections
import types
import re
path.insert(0, '.')
from test_util import shard_table
path.insert(0, "../../drivers/python")
from os import environ
import rethinkdb as r
# JSPORT = int(sys.argv[1])
CPPPORT = int(sys.argv[2])
CLUSTER_PORT = int(sys.argv[3])
BUILD = sys.argv[4]
# -- utilities --
failure_count = 0
def print_test_failure(test_name, test_src, message):
global failure_count
failure_count = failure_count + 1
print ''
print "TEST FAILURE: %s" % test_name.encode('utf-8')
print "TEST BODY: %s" % test_src.encode('utf-8')
print message
print ''
class Lst:
def __init__(self, lst):
self.lst = lst
def __eq__(self, other):
if not hasattr(other, '__iter__'):
return False
i = 0
for row in other:
if i >= len(self.lst) or (self.lst[i] != row):
return False
i += 1
if i != len(self.lst):
return False
return True
def __repr__(self):
return repr(self.lst)
class Bag(Lst):
def __init__(self, lst):
self.lst = sorted(lst)
def __eq__(self, other):
if not hasattr(other, '__iter__'):
return False
other = sorted(other)
if len(self.lst) != len(other):
return False
for i in xrange(len(self.lst)):
fun = eq(self.lst[i])
if not fun(other[i]):
return False
return True
class Dct:
def __init__(self, dct):
self.dct = dct
def __eq__(self, other):
if not isinstance(other, types.DictType):
return False
for key in self.dct.keys():
if not key in other.keys():
return False
fun = eq(self.dct[key])
if not fun(other[key]):
return False
return True
def __repr__(self):
return repr(self.dct)
class Err:
def __init__(self, err_type=None, err_msg=None, err_frames=None, regex=False):
self.etyp = err_type
self.emsg = err_msg
self.frames = None #err_frames # TODO: test frames
self.regex = regex
def __eq__(self, other):
if not isinstance(other, Exception):
return False
if self.etyp and self.etyp != other.__class__.__name__:
return False
if self.regex:
return re.match(self.emsg, other.message)
else:
# Strip "offending object" from the error message
other.message = re.sub(":\n.*", ".", other.message, flags=re.M|re.S)
other.message = re.sub("\nFailed assertion:.*", "", other.message, flags=re.M|re.S)
if self.emsg and self.emsg != other.message:
return False
if self.frames and self.frames != other.frames:
return False
return True
def __repr__(self):
return "%s(%s\"%s\")" % (self.etyp, self.regex and '~' or '', repr(self.emsg) or '')
class Arr:
def __init__(self, length, thing=None):
self.length = length
self.thing = thing
def __eq__(self, arr):
if not isinstance(arr, list):
return False
if not self.length == len(arr):
return False
if self.thing is None:
return True
return all([v == self.thing for v in arr])
def __repr__(self):
return "arr(%d, %s)" % (self.length, repr(self.thing))
class Uuid:
def __eq__(self, thing):
if not isinstance(thing, types.StringTypes):
return False
return re.match("[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}", thing) != None
def __repr__(self):
return "uuid()"
class Int:
def __init__(self, i):
self.i = i
def __eq__(self, thing):
return isinstance(thing, int) and (self.i == thing)
class Float:
def __init__(self, f):
self.f = f
def __eq__(self, thing):
return isinstance(thing, float) and (self.f == thing)
# -- Curried output test functions --
def eq(exp):
if exp == ():
return lambda x: True
if isinstance(exp, list):
exp = Lst(exp)
elif isinstance(exp, dict):
exp = Dct(exp)
def sub(val):
if isinstance(val, str):
# Remove additional error info that creeps in in debug mode
val = re.sub("\nFailed assertion:.*", "", val, flags=re.M|re.S)
if not (val == exp):
return False
else:
return True
return sub
class PyTestDriver:
# Set up connections to each database server
def connect(self):
#print 'Connecting to JS server on port ' + str(JSPORT)
#self.js_conn = r.connect(host='localhost', port=JSPORT)
print 'Connecting to CPP server on port ' + str(CPPPORT)
print ''
self.cpp_conn = r.connect(host='localhost', port=CPPPORT)
self.scope = {}
def define(self, expr):
exec(expr, globals(), self.scope)
def run(self, src, expected, name):
# Try to build the expected result
if expected:
exp_val = eval(expected, dict(globals().items() + self.scope.items()))
else:
# This test might not have come with an expected result, we'll just ensure it doesn't fail
#exp_fun = lambda v: True
exp_val = ()
# If left off the comparison function is equality by default
#if not isinstance(exp_fun, types.FunctionType):
# exp_fun = eq(exp_fun)
# Try to build the test
try:
query = eval(src, dict(globals().items() + self.scope.items()))
except Exception as err:
if not isinstance(exp_val, Err):
print_test_failure(name, src, "Error eval'ing test src:\n\t%s" % repr(err))
elif not eq(exp_val)(err):
print_test_failure(name, src,
"Error eval'ing test src not equal to expected err:\n\tERROR: %s\n\tEXPECTED: %s" %
(repr(err), repr(exp_val))
)
return # Can't continue with this test if there is no test query
# Try actually running the test
try:
cppres = query.run(self.cpp_conn)
# And comparing the expected result
if not eq(exp_val)(cppres):
print_test_failure(name, src,
"CPP result is not equal to expected result:\n\tVALUE: %s\n\tEXPECTED: %s" %
(repr(cppres), repr(exp_val))
)
except Exception as err:
if not isinstance(exp_val, Err):
print_test_failure(name, src, "Error running test on CPP server:\n\t%s" % repr(err))
elif not eq(exp_val)(err):
print_test_failure(name, src,
"Error running test on CPP server not equal to expected err:\n\tERROR: %s\n\tEXPECTED: %s" %
(repr(err), repr(exp_val))
)
driver = PyTestDriver()
driver.connect()
# Emitted test code will consist of calls to this function
def test(query, expected, name):
if expected == '':
expected = None
driver.run(query, expected, name)
# Emitted test code can call this function to define variables
def define(expr):
driver.define(expr)
# Emitted test code can call this function to set bag equality on a list
def bag(lst):
return Bag(lst)
# Emitted test code can call this function to indicate expected error output
def err(err_type, err_msg=None, frames=None):
return Err(err_type, err_msg, frames)
def err_regex(err_type, err_msg=None, frames=None):
return Err(err_type, err_msg, frames, True)
def arrlen(length, thing=None):
return Arr(length, thing)
def uuid():
return Uuid()
def shard(table_name):
shard_table(CLUSTER_PORT, BUILD, table_name)
def int_cmp(i):
return Int(i)
def float_cmp(f):
return Float(f)
def the_end():
global failure_count
if failure_count > 0:
sys.exit("Failed %d tests" % failure_count)
|
rocio/rethinkdb
|
test/rql_test/drivers/driver.py
|
Python
|
agpl-3.0
| 8,141
|
# pylmm is a python-based linear mixed-model solver with applications to GWAS
# Copyright (C) 2013 Nicholas A. Furlotte (nick.furlotte@gmail.com)
# Copyright (C) 2015 Pjotr Prins (pjotr.prins@thebird.nl)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import numpy as np
import struct
import pdb
class plink:
def __init__(self,fbase,kFile=None,phenoFile=None,type='b',normGenotype=True,readKFile=False):
self.fbase = fbase
self.type = type
self.indivs = self.getIndivs(self.fbase,type)
self.kFile = kFile
self.phenos = None
self.normGenotype = normGenotype
self.phenoFile = phenoFile
# Originally I was using the fastLMM style that has indiv IDs embedded.
# NOW I want to use this module to just read SNPs so I'm allowing
# the programmer to turn off the kinship reading.
self.readKFile = readKFile
if self.kFile:
self.K = self.readKinship(self.kFile)
elif os.path.isfile("%s.kin" % fbase):
self.kFile = "%s.kin" %fbase
if self.readKFile:
self.K = self.readKinship(self.kFile)
else:
self.kFile = None
self.K = None
self.getPhenos(self.phenoFile)
self.fhandle = None
self.snpFileHandle = None
def __del__(self):
if self.fhandle: self.fhandle.close()
if self.snpFileHandle: self.snpFileHandle.close()
def getSNPIterator(self):
if not self.type == 'b':
sys.stderr.write("Have only implemented this for binary plink files (bed)\n")
return
# get the number of snps
file = self.fbase + '.bim'
i = 0
f = open(file,'r')
for line in f: i += 1
f.close()
self.numSNPs = i
self.have_read = 0
self.snpFileHandle = open(file,'r')
self.BytestoRead = self.N / 4 + (self.N % 4 and 1 or 0)
self._formatStr = 'c'*self.BytestoRead
file = self.fbase + '.bed'
self.fhandle = open(file,'rb')
magicNumber = self.fhandle.read(2)
order = self.fhandle.read(1)
if not order == '\x01':
sys.stderr.write("This is not in SNP major order - you did not handle this case\n")
raise StopIteration
return self
def __iter__(self):
return self.getSNPIterator()
def next(self):
if self.have_read == self.numSNPs:
raise StopIteration
X = self.fhandle.read(self.BytestoRead)
XX = [bin(ord(x)) for x in struct.unpack(self._formatStr,X)]
self.have_read += 1
return self.formatBinaryGenotypes(XX,self.normGenotype),self.snpFileHandle.readline().strip().split()[1]
def formatBinaryGenotypes(self,X,norm=True):
D = { \
'00': 0.0, \
'10': 0.5, \
'11': 1.0, \
'01': np.nan \
}
D_tped = { \
'00': '1 1', \
'10': '1 2', \
'11': '2 2', \
'01': '0 0' \
}
#D = D_tped
G = []
for x in X:
if not len(x) == 10:
xx = x[2:]
x = '0b' + '0'*(8 - len(xx)) + xx
a,b,c,d = (x[8:],x[6:8],x[4:6],x[2:4])
L = [D[y] for y in [a,b,c,d]]
G += L
# only take the leading values because whatever is left should be null
G = G[:self.N]
G = np.array(G)
if norm:
G = self.normalizeGenotype(G)
return G
def normalizeGenotype(self,G):
# print "Before",G
# print G.shape
print "call input.normalizeGenotype"
raise "This should not be used"
x = True - np.isnan(G)
m = G[x].mean()
s = np.sqrt(G[x].var())
G[np.isnan(G)] = m
if s == 0: G = G - m
else: G = (G - m) / s
# print "After",G
return G
def getPhenos(self,phenoFile=None):
if not phenoFile:
self.phenoFile = phenoFile = self.fbase+".phenos"
if not os.path.isfile(phenoFile):
sys.stderr.write("Could not find phenotype file: %s\n" % (phenoFile))
return
f = open(phenoFile,'r')
keys = []
P = []
for line in f:
v = line.strip().split()
keys.append((v[0],v[1]))
P.append([(x == 'NA' or x == '-9') and np.nan or float(x) for x in v[2:]])
f.close()
P = np.array(P)
# reorder to match self.indivs
D = {}
L = []
for i in range(len(keys)):
D[keys[i]] = i
for i in range(len(self.indivs)):
if not D.has_key(self.indivs[i]):
continue
L.append(D[self.indivs[i]])
P = P[L,:]
self.phenos = P
return P
def getIndivs(self,base,type='b'):
if type == 't':
famFile = "%s.tfam" % base
else:
famFile = "%s.fam" % base
keys = []
i = 0
f = open(famFile,'r')
for line in f:
v = line.strip().split()
famId = v[0]
indivId = v[1]
k = (famId.strip(),indivId.strip())
keys.append(k)
i += 1
f.close()
self.N = len(keys)
sys.stderr.write("Read %d individuals from %s\n" % (self.N, famFile))
return keys
def readKinship(self,kFile):
# Assume the fastLMM style
# This will read in the kinship matrix and then reorder it
# according to self.indivs - additionally throwing out individuals
# that are not in both sets
if self.indivs == None or len(self.indivs) == 0:
sys.stderr.write("Did not read any individuals so can't load kinship\n")
return
sys.stderr.write("Reading kinship matrix from %s\n" % (kFile) )
f = open(kFile,'r')
# read indivs
v = f.readline().strip().split("\t")[1:]
keys = [tuple(y.split()) for y in v]
D = {}
for i in range(len(keys)): D[keys[i]] = i
# read matrix
K = []
for line in f:
K.append([float(x) for x in line.strip().split("\t")[1:]])
f.close()
K = np.array(K)
# reorder to match self.indivs
L = []
KK = []
X = []
for i in range(len(self.indivs)):
if not D.has_key(self.indivs[i]):
X.append(self.indivs[i])
else:
KK.append(self.indivs[i])
L.append(D[self.indivs[i]])
K = K[L,:][:,L]
self.indivs = KK
self.indivs_removed = X
if len(self.indivs_removed):
sys.stderr.write("Removed %d individuals that did not appear in Kinship\n" % (len(self.indivs_removed)))
return K
def getCovariates(self,covFile=None):
if not os.path.isfile(covFile):
sys.stderr.write("Could not find covariate file: %s\n" % (phenoFile))
return
f = open(covFile,'r')
keys = []
P = []
for line in f:
v = line.strip().split()
keys.append((v[0],v[1]))
P.append([x == 'NA' and np.nan or float(x) for x in v[2:]])
f.close()
P = np.array(P)
# reorder to match self.indivs
D = {}
L = []
for i in range(len(keys)):
D[keys[i]] = i
for i in range(len(self.indivs)):
if not D.has_key(self.indivs[i]): continue
L.append(D[self.indivs[i]])
P = P[L,:]
return P
|
genenetwork/pylmm_gn2
|
pylmm_gn2/input.py
|
Python
|
agpl-3.0
| 8,358
|
##
# Copyright (C) 2013 Jessica Tallon & Matt Molyneaux
#
# This file is part of Inboxen.
#
# Inboxen is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Inboxen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Inboxen. If not, see <http://www.gnu.org/licenses/>.
##
from datetime import datetime
from pytz import utc
from django.utils.translation import ugettext as _
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.shortcuts import render
from django.contrib.admin.views.decorators import staff_member_required
from inboxen.models import BlogPost
@staff_member_required
def add(request):
error = ""
if request.method == "POST":
if not ("title" in request.POST or "body" in request.POST):
error = _("You need to have a title and the blog's body")
else:
if "draft" in request.POST and request.POST["draft"] == "melon":
draft = True
else:
draft = False
post = BlogPost(
subject=request.POST["title"],
body=request.POST["body"],
date=datetime.now(utc),
author=request.user,
modified=datetime.now(utc),
draft=draft
)
post.save()
return HttpResponseRedirect(reverse('blog'))
context = {
"error":error,
"headline":_("Add Post"),
}
return render(request, "blog/add.html", context)
|
Inboxen/website
|
views/blog/add.py
|
Python
|
agpl-3.0
| 2,013
|
from django.conf.urls import url
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.forms import ModelForm
from django.http import HttpResponseRedirect
from livinglots import get_owner_contact_model
from .admin_views import MakeAliasesView
from .models import Alias
class OwnerAdminMixin(object):
actions = ('make_aliases',)
def aliases_summary(self, obj):
return ', '.join(obj.aliases.all().values_list('name', flat=True))
aliases_summary.short_description = 'AKA'
def make_aliases(self, request, queryset):
ids = queryset.values_list('pk', flat=True)
ids = [str(id) for id in ids]
return HttpResponseRedirect(reverse('admin:owners_owner_make_aliases') +
'?ids=%s' % (','.join(ids)))
def get_urls(self):
opts = self.model._meta
app_label, object_name = (opts.app_label, opts.object_name.lower())
prefix = "%s_%s" % (app_label, object_name)
urls = super(OwnerAdminMixin, self).get_urls()
my_urls = [
url(r'^make-aliases/', MakeAliasesView.as_view(),
name='%s_make_aliases' % prefix),
]
return my_urls + urls
class BaseOwnerForm(ModelForm):
def __init__(self, *args, **kwargs):
super(BaseOwnerForm, self).__init__(*args, **kwargs)
self.fields['default_contact'].queryset = \
get_owner_contact_model().objects.filter(owner=self.instance)
class BaseOwnerAdmin(OwnerAdminMixin, admin.ModelAdmin):
form = BaseOwnerForm
list_display = ('name', 'owner_type', 'aliases_summary',)
list_filter = ('owner_type',)
search_fields = ('name',)
class BaseOwnerContactAdmin(admin.ModelAdmin):
list_display = ('name', 'owner', 'phone', 'email',)
list_filter = ('owner__owner_type',)
search_fields = ('name', 'owner', 'email',)
class BaseOwnerGroupAdmin(admin.ModelAdmin):
list_display = ('name', 'owner_type',)
list_filter = ('owner_type',)
search_fields = ('name',)
class AliasAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(Alias, AliasAdmin)
|
596acres/django-livinglots-owners
|
livinglots_owners/admin.py
|
Python
|
agpl-3.0
| 2,155
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from rest_framework_swagger.views import get_swagger_view
from django.urls import re_path
from django.views.generic import RedirectView
from ureport.api.views import (
DashBlockDetails,
DashBlockList,
FeaturedPollList,
ImageDetails,
ImageList,
NewsItemDetails,
NewsItemList,
OrgDetails,
OrgList,
PollDetails,
PollList,
StoryDetails,
StoryList,
VideoDetails,
VideoList,
)
schema_view = get_swagger_view(title="API")
urlpatterns = [
re_path(r"^$", RedirectView.as_view(pattern_name="api.v1.docs", permanent=False), name="api.v1"),
re_path(r"^docs/", schema_view, name="api.v1.docs"),
re_path(r"^orgs/$", OrgList.as_view(), name="api.v1.org_list"),
re_path(r"^orgs/(?P<pk>[\d]+)/$", OrgDetails.as_view(), name="api.v1.org_details"),
re_path(r"^polls/org/(?P<org>[\d]+)/$", PollList.as_view(), name="api.v1.org_poll_list"),
re_path(r"^polls/org/(?P<org>[\d]+)/featured/$", FeaturedPollList.as_view(), name="api.v1.org_poll_fetured"),
re_path(r"^polls/(?P<pk>[\d]+)/$", PollDetails.as_view(), name="api.v1.poll_details"),
re_path(r"^news/org/(?P<org>[\d]+)/$", NewsItemList.as_view(), name="api.v1.org_newsitem_list"),
re_path(r"^news/(?P<pk>[\d]+)/$", NewsItemDetails.as_view(), name="api.v1.newsitem_details"),
re_path(r"^videos/org/(?P<org>[\d]+)/$", VideoList.as_view(), name="api.v1.org_video_list"),
re_path(r"^videos/(?P<pk>[\d]+)/$", VideoDetails.as_view(), name="api.v1.video_details"),
re_path(r"^assets/org/(?P<org>[\d]+)/$", ImageList.as_view(), name="api.v1.org_asset_list"),
re_path(r"^assets/(?P<pk>[\d]+)/$", ImageDetails.as_view(), name="api.v1.asset_details"),
re_path(r"^dashblocks/org/(?P<org>[\d]+)/$", DashBlockList.as_view(), name="api.v1.org_dashblock_list"),
re_path(r"^dashblocks/(?P<pk>[\d]+)/$", DashBlockDetails.as_view(), name="api.v1.dashblock_details"),
re_path(r"^stories/org/(?P<org>[\d]+)/$", StoryList.as_view(), name="api.v1.org_story_list"),
re_path(r"^stories/(?P<pk>[\d]+)/$", StoryDetails.as_view(), name="api.v1.story_details"),
]
|
rapidpro/ureport
|
ureport/api/urls.py
|
Python
|
agpl-3.0
| 2,209
|
# encoding: utf-8
from Errors import AssertionFail
from Dweb import Dweb
class Transportable(object):
"""
Encapsulate any kind of object that can be transported
Any subclass needs to implement:
_data getter and setter to return the data and to load from opaque bytes returned by transport. (Note SmartDict does this)
__init__(data=None, url=None, ...) That can be called after raw data retrieved (default calls getter/setter for _data
Fields
_url URL of data stored
_data Data (if its opaque)
_needsfetch True if need to fetch from Dweb
"""
def __init__(self, data=None, **ignoredoptions):
"""
Create a new Transportable element - storing its data and url if known.
Subclassed to initialize from that information
:param data: Any opaque bytes to store
:param url: Hash of those opaque bytes
"""
self._setdata(data) # Note will often call the @_data.setter function
self._url = None
def transport(self):
"""
Find transport for this object,
if not yet stored this._url will be undefined and will return default transport
returns: instance of subclass of Transport
"""
return Dweb.transport(self._url);
def _setdata(self, value):
self._data = value # Default behavior, assumes opaque bytes, and not a dict - note subclassed in SmartDict
def _getdata(self):
return self._data; # Default behavior - opaque bytes
def store(self, verbose=False):
"""
Store this block on the underlying transport, return the url for future id.
Note this calls _data which is typically a getter/setter that returns subclass specific results.
Exception: UnicodeDecodeError - if data is binary
:return: url of data
"""
if verbose: print "Storing", self.__class__.__name__
if self._url: # If already stored
return self._url # Return url
data = self._getdata() # Note assumes _getdata() will be sublassed to construct outgoing data
if (verbose): print "Transportable.store data=", data
self._url = self.transport().rawstore(data=data, verbose=verbose)
if verbose: print self.__class__.__name__, ".stored: url=", self._url
return self
def dirty(self):
"""
Mark an object as needing storing again, for example because one of its fields changed.
Flag as dirty so needs uploading - subclasses may delete other, now invalid, info like signatures
"""
self._url = None
@classmethod
def fetch(cls, url=None, verbose=False):
"""
Fetch the data for a url, subclasses act on the data, typically storing it and returns data not self
:param url: string of url to retrieve
:return: string - arbitrary bytes retrieved.
"""
if verbose: print "Transportable.fetch _url=",url
return Dweb.transport(url).rawfetch(url=url, verbose=verbose)
def file(self, verbose=False, contenttype=None, **options): #TODO-API
return { "Content-type": contenttype or "application/octet-stream",
"data": self._getdata() }
|
mitra42/dweb
|
dweb/Transportable.py
|
Python
|
agpl-3.0
| 3,238
|
# coding: utf-8
# In[158]:
import nest
import numpy as np
import sys
import os
nest.ResetKernel()
# In[159]:
unique_ID = sys.argv[1]
params_file = 'params' + unique_ID + '.param'
params = np.genfromtxt(params_file)
gL_Bas = params[0]*1000 #5.0e-3 #7.14293e-3
tauMem_Bas = params[1] #14.0
Cm_Bas = tauMem_Bas * gL_Bas
Vrest_Bas = params[2] #-70.0
reset_Bas = params[4]-params[3] #params[3] #-56.0 #-55.0
theta_Bas = params[4] #-50.0
tref_Bas = params[5] #0.1
E_Exc = 0.0
E_Inh = -70.0
tauSyn_BasExc = 3.0
tauSyn_BasInh = 1.5
a = params[6] #0.0 #8.0 #5.0 #3.2 #-0.8 #4.0 #nS Subthreshold adaptation conductance
#a = 0.
# moves threshold up
b = params[7] #0.0 #0.2 #0.055 #0.001 #0.1 #0.065 #0.0805 #nA Spike-triggered adaptation
#b = 0.
# -> decreases the slope of the f-I curve
# 0.3 - slope = 3-4 Hz/100 pA
# 0.15 - slope = 6-7 Hz/100 pA
# 0.065 -slope = 15 Hz/100 pA
delta_T = params[8] #2.0 #0.5 #0.8 #mV Slope factor
tau_w = params[9] #20 #100#144.0 #ms Adaptation time constant
# changes shape of f-I curve
# 44 steeper, 160 flatter
v_spike = theta_Bas + 5 * delta_T #mV Spike detection threshold
# In[160]:
# this is how we create a neuron
neuron = nest.Create('aeif_cond_exp')
cell_params_Bas = {"C_m": Cm_Bas,
#"tau_m": tauMem_Bas, # tau_m in pynn
"g_L": gL_Bas,
"E_L": Vrest_Bas,
"E_ex": E_Exc,
"E_in": E_Inh,
"tau_syn_ex": tauSyn_BasExc,
"tau_syn_in": tauSyn_BasInh,
"t_ref": tref_Bas,
"V_reset": reset_Bas,
"V_th": theta_Bas,
"a": a,
"b": b*1000,
"Delta_T": delta_T,
"tau_w": tau_w,
"V_peak": v_spike}
nest.SetStatus(neuron, cell_params_Bas)
nest.SetStatus(neuron, {'V_m':-60.0})
# In[161]:
current_list = [150., 200., 300., 600.]
trace_num = int(round(params[10]))
current = current_list[trace_num]
# In[162]:
current_gen = nest.Create('step_current_generator')
nest.SetStatus(current_gen, {'amplitude_times': [100., 900.], 'amplitude_values': [current, 0.0]})
# In[163]:
voltmeter = nest.Create('voltmeter', params={'interval':0.2})
spikedetector = nest.Create('spike_detector')
# In[164]:
nest.Connect(current_gen, neuron)
nest.Connect(voltmeter, neuron)
nest.Connect(neuron, spikedetector)
# In[165]:
try:
nest.Simulate(1100.1)
times = nest.GetStatus(voltmeter)[0]['events']['times'] # they start from 1.0
times = np.insert(times, 0, 0.)
voltages = nest.GetStatus(voltmeter)[0]['events']['V_m']
voltages = np.insert(voltages, 0, -60.)
except:
print("NESTERROR COUGHT")
times = list(np.linspace(0.0,1100.0, 5501))
voltages = list(np.linspace(-100, 0, 5501))
# In[166]:
spikes = nest.GetStatus(spikedetector)[0]['events']['times']
#import matplotlib.pyplot as plt
#plt.plot(times, voltages)
#plt.show()
# In[167]:
spike_filename = 'spike' + unique_ID + '.dat'
trace_filename = 'trace' + unique_ID + '.dat'
np.savetxt(spike_filename, spikes, fmt='%.2f')
np.savetxt(trace_filename, np.array([times, voltages]).T, fmt='%.2f')
|
KaliLab/optimizer
|
optimizer/new_test_files/adexpif_external_ca3_pc/teststeps_optim5.py
|
Python
|
lgpl-2.1
| 3,387
|
# -*- coding: iso-8859-1 -*-
import sys
import salome
salome.salome_init()
theStudy = salome.myStudy
import salome_notebook
notebook = salome_notebook.notebook
import os
from blocFissure import gmu
###
### GEOM component
###
import GEOM
from salome.geom import geomBuilder
import math
import SALOMEDS
geompy = geomBuilder.New(theStudy)
O = geompy.MakeVertex(0, 0, 0)
OX = geompy.MakeVectorDXDYDZ(1, 0, 0)
OY = geompy.MakeVectorDXDYDZ(0, 1, 0)
OZ = geompy.MakeVectorDXDYDZ(0, 0, 1)
geomObj_1 = geompy.MakeCylinderRH(1000, 3000)
Cylinder_1 = geompy.MakeRotation(geomObj_1, OZ, 180*math.pi/180.0)
geomObj_2 = geompy.MakeCylinder(O, OX, 2000, 5000)
Cylinder_2 = geompy.MakeRotation(geomObj_2, OX, 180*math.pi/180.0)
Translation_1 = geompy.MakeTranslation(Cylinder_2, -2000, 0, 0)
Fuse_1 = geompy.MakeFuse(Cylinder_1, Translation_1)
Fillet_1 = geompy.MakeFillet(Fuse_1, 800, geompy.ShapeType["EDGE"], [11])
Vertex_1 = geompy.MakeVertex(0, -3000, -3000)
Vertex_2 = geompy.MakeVertex(2500, 3000, 3000)
Box_1 = geompy.MakeBoxTwoPnt(Vertex_2, Vertex_1)
Partition_1 = geompy.MakePartition([Box_1], [Fillet_1], [], [], geompy.ShapeType["SOLID"], 0, [], 0)
geomObj_3 = geompy.MakeCylinderRH(1450, 8000)
Cylinder_3 = geompy.MakeRotation(geomObj_3, OZ, 180*math.pi/180.0)
Cut_1 = geompy.MakeCut(Partition_1, Cylinder_3)
[faceFiss1] = geompy.SubShapes(Cut_1, [61])
[Vertex_3,geomObj_4] = geompy.SubShapes(faceFiss1, [4, 5])
Cylinder_4 = geompy.MakeCylinderRH(2000, 4000)
Cylinder_5 = geompy.MakeCylinderRH(1500, 4000)
Cut_2 = geompy.MakeCut(Cylinder_4, Cylinder_5)
Plane_1 = geompy.MakePlaneLCS(None, 10000, 3)
Vertex_5 = geompy.MakeVertex(0, 0, 100)
Plane_2 = geompy.MakePlaneThreePnt(O, Vertex_5, Vertex_3, 10000)
Plane_3 = geompy.MakePlaneThreePnt(O, Vertex_5, geomObj_4, 10000)
Vertex_6 = geompy.MakeVertex(0, -5000, -5000)
Vertex_7 = geompy.MakeVertex(5000, 5000, 5000)
Box_2 = geompy.MakeBoxTwoPnt(Vertex_7, Vertex_6)
Common_1 = geompy.MakeCommon(Box_2, Cut_2)
objetSain = geompy.MakePartition([Common_1], [Plane_1, Plane_2, Plane_3], [], [], geompy.ShapeType["SOLID"], 0, [], 0)
[hauteurs, epaisseurs, Compound_3, Compound_4, Compound_5, Compound_6] = geompy.Propagate(objetSain)
geompy.Export(faceFiss1, os.path.join(gmu.pathBloc, "materielCasTests/faceGaucheFiss.brep"), "BREP")
geompy.addToStudy( O, 'O' )
geompy.addToStudy( OX, 'OX' )
geompy.addToStudy( OY, 'OY' )
geompy.addToStudy( OZ, 'OZ' )
geompy.addToStudy( Cylinder_1, 'Cylinder_1' )
geompy.addToStudy( Cylinder_2, 'Cylinder_2' )
geompy.addToStudy( Translation_1, 'Translation_1' )
geompy.addToStudy( Fuse_1, 'Fuse_1' )
geompy.addToStudy( Fillet_1, 'Fillet_1' )
geompy.addToStudy( Vertex_1, 'Vertex_1' )
geompy.addToStudy( Vertex_2, 'Vertex_2' )
geompy.addToStudy( Box_1, 'Box_1' )
geompy.addToStudy( Partition_1, 'Partition_1' )
geompy.addToStudy( Cylinder_3, 'Cylinder_3' )
geompy.addToStudy( Cut_1, 'Cut_1' )
geompy.addToStudyInFather( Cut_1, faceFiss1, 'faceFiss1' )
geompy.addToStudyInFather( faceFiss1, Vertex_3, 'Vertex_3' )
geompy.addToStudy( Cylinder_4, 'Cylinder_4' )
geompy.addToStudy( Cylinder_5, 'Cylinder_5' )
geompy.addToStudy( Cut_2, 'Cut_2' )
geompy.addToStudy( Plane_1, 'Plane_1' )
geompy.addToStudy( Vertex_5, 'Vertex_5' )
geompy.addToStudy( Plane_2, 'Plane_2' )
geompy.addToStudy( Plane_3, 'Plane_3' )
geompy.addToStudy( Vertex_6, 'Vertex_6' )
geompy.addToStudy( Vertex_7, 'Vertex_7' )
geompy.addToStudy( Box_2, 'Box_2' )
geompy.addToStudy( Common_1, 'Common_1' )
geompy.addToStudy( objetSain, 'objetSain' )
geompy.addToStudyInFather( objetSain, hauteurs, 'hauteurs' )
geompy.addToStudyInFather( objetSain, epaisseurs, 'epaisseurs' )
geompy.addToStudyInFather( objetSain, Compound_3, 'Compound_3' )
geompy.addToStudyInFather( objetSain, Compound_4, 'Compound_4' )
geompy.addToStudyInFather( objetSain, Compound_5, 'Compound_5' )
geompy.addToStudyInFather( objetSain, Compound_6, 'Compound_6' )
###
### SMESH component
###
import SMESH, SALOMEDS
from salome.smesh import smeshBuilder
smesh = smeshBuilder.New(theStudy)
from salome.StdMeshers import StdMeshersBuilder
objetSain_1 = smesh.Mesh(objetSain)
Regular_1D = objetSain_1.Segment()
Nb_Segments_1 = Regular_1D.NumberOfSegments(10,[],[ ])
Nb_Segments_1.SetDistrType( 0 )
Quadrangle_2D = objetSain_1.Quadrangle(algo=smeshBuilder.QUADRANGLE)
Hexa_3D = objetSain_1.Hexahedron(algo=smeshBuilder.Hexa)
Regular_1D_1 = objetSain_1.Segment(geom=hauteurs)
Nb_Segments_2 = Regular_1D_1.NumberOfSegments(15,[],[ ])
Nb_Segments_2.SetDistrType( 0 )
Regular_1D_2 = objetSain_1.Segment(geom=epaisseurs)
Nb_Segments_3 = Regular_1D_2.NumberOfSegments(5,[],[ ])
Nb_Segments_3.SetDistrType( 0 )
isDone = objetSain_1.Compute()
smesh.SetName(objetSain_1, 'objetSain')
objetSain_1.ExportMED( os.path.join(gmu.pathBloc, "materielCasTests/faceGaucheSain.med"), 0, SMESH.MED_V2_2, 1 )
SubMesh_1 = Regular_1D_1.GetSubMesh()
SubMesh_2 = Regular_1D_2.GetSubMesh()
## set object names
smesh.SetName(objetSain_1.GetMesh(), 'objetSain')
smesh.SetName(Regular_1D.GetAlgorithm(), 'Regular_1D')
smesh.SetName(Nb_Segments_1, 'Nb. Segments_1')
smesh.SetName(Quadrangle_2D.GetAlgorithm(), 'Quadrangle_2D')
smesh.SetName(Hexa_3D.GetAlgorithm(), 'Hexa_3D')
smesh.SetName(Nb_Segments_2, 'Nb. Segments_2')
smesh.SetName(Nb_Segments_3, 'Nb. Segments_3')
smesh.SetName(SubMesh_1, 'SubMesh_1')
smesh.SetName(SubMesh_2, 'SubMesh_2')
if salome.sg.hasDesktop():
salome.sg.updateObjBrowser(1)
|
FedoraScientific/salome-smesh
|
src/Tools/blocFissure/materielCasTests/fissureGauche.py
|
Python
|
lgpl-2.1
| 5,391
|
''' Platform-specific installation scripts.
LICENSING
-------------------------------------------------
hwiopy: A common API for hardware input/output access.
Copyright (C) 2014-2015 Nicholas Badger
badg@nickbadger.com
nickbadger.com
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
USA
------------------------------------------------------
Something something sooooomething goes here.
'''
# Global dependencies
import subprocess, tempfile, json
from subprocess import CalledProcessError
from pkg_resources import resource_filename, resource_string
class bbb_setup():
''' Performs installation setup for a Beaglebone Black.
Basically using this as a local namsepace container so that all platform
setup files can be contained within this file. Flat is better than nested
and all that.
'''
def __init__(self):
''' Handles all of the prerequisites we need to actually set up the
beaglebone black.
'''
# Verify that dtc can be used. If not, install it. Then we can just
# do a subprocess.check_call([self._dtc_string, ...])
if not self._check_dtc():
# Declare the self dtc string as the included bbb_dtc.sh
self._dtc_string = \
resource_filename('hwiopy', 'setup_utils/bbb_dtc.sh')
# Make the included dtc.sh executable
subprocess.check_call(['chmod', '+x', self._dtc_string])
else:
# Successfull verification; use built-in dtc
self._dtc_string = 'dtc'
# Okay, now we need to compile dtbos
# Get the description from the mapping file
sitara_description = json.loads(resource_string('hwiopy',
'maps/sitara_termmodes.json').decode('utf-8'))
# Iterate over every SoC terminal
for terminal_name, terminal_dict in sitara_description.items():
# Create a list of every available mode for the terminal
modes = [mode['mode_type'] for mode in
terminal_dict['modes'].values()]
# If gpio is in the modes
if 'gpio' in modes:
# Describe it
desc = 'Terminal ' + terminal_name + ' gpio overlay'
# Name it
name = 'hwiopy-gpio-' + terminal_name
offset = terminal_dict['control_reg_offset']
# Get the control, convert it to a proper hex string
control = hex(int(terminal_dict['mux_default'], 16))
# Build a dts for it
self._build_dts(desc, name, offset, control)
self._compile_dts(name)
@classmethod
def do(cls):
self = cls()
@staticmethod
def _build_dts(description, name, offset, control, pin=None, device=None):
'''
description: the overlay description.
name: the overlay name.
offset: the register offset address for the SoC terminal.
control: the pinmux setting.
pin: the header pin (currently unused)
device: SPI, UART, etc. (currently unimplemented)
Ignore the docstring below this bit.
-----------------------------------------------------------------
pin_str: ex 'P8_3'
pin_dict: ex
{
"name": "USR0",
"gpio": 53,
"led": "usr0",
"mux": "gpmc_a5",
"key": "USR0",
"muxRegOffset": "0x054",
"options": [
"gpmc_a5",
"gmii2_txd0",
"rgmii2_td0",
"rmii2_txd0",
"gpmc_a21",
"pr1_mii1_rxd3",
"eqep1b_in",
"gpio1_21"
]
}
mux_mode: ex 'Mode7: gpio1_21'
If the pin_dict contains options:
# bspm?
if the mux_mode is pwm:
# bspwm?
pin sysfs loc = '/sys/devices/ocp.?/pwm_test_'+pinstr+'.??/'
pin data = slew rate (FAST/slow),
direction (OUT/in),
pullup (PULLUP/pulldown/disabled),
muxmode (ex 0x07 mode 7)
create dts (pin_dict, pin data, bspm/bspwm)
else:
could be analog in, which don't require overlays?
other options are set things like reference voltages and also
cannot be overlayed/set/etc
Note: dtbo filename must include the version. So if the part-number is
'BLAHBLAH', the whole thing needs to be 'BLAHBLAH-00A0.dtbo'
Note: Need to check which numbers to unexport (see adafruit tutorial)
before exporting, for the sake of cleanup. That said, we should be
careful with that, since evidently it can cause a kernel panic
situation?
Note: I hear that PWM overlay generation works differently?
'''
# Maybe define this elsewhere?
version = '00A0'
# Define the header
template = \
resource_string('hwiopy', 'overlays/bbb_template_gpio.dts').\
decode()
# Make a name that contains no dashes
safename = name.replace('-', '_')
# Replace applicable fields in the template
dts = template.replace('__NAME__', name)
dts = dts.replace('__DESCRIPTION__', description)
dts = dts.replace('__VERSION__', version)
dts = dts.replace('__SAFENAME__', safename)
dts = dts.replace('__OFFSET__', offset)
dts = dts.replace('__PINCONTROL__', control)
# Output the dts
dts_filename = resource_filename('hwiopy', 'overlays') + '/' + name +\
'-' + version + '.dts'
with open(dts_filename, 'w+') as dts_file:
dts_file.write(dts)
def _compile_dts(self, name):
# Maybe define this elsewhere?
version = '00A0'
# Get the before and after filenames
overlays_path = resource_filename('hwiopy', 'overlays') + '/'
dts_filename = overlays_path + name + '-' + version + '.dts'
dtbo_filename = overlays_path + name + '-' + version + '.dtbo'
# Make the system call
subprocess.check_call([self._dtc_string, '-O', 'dtb', '-o',
dtbo_filename, '-b', '0', '-@', dts_filename])
def _check_dtc(self):
''' Checks for capability to run the device tree compiler. If
unavailable, uses a DTC copied from
https://raw.github.com/RobertCNelson/tools/master/pkgs/dtc.sh
'''
# Get the filenames for the relevant files
test_overlay = resource_filename('hwiopy', 'overlays/test.dts')
test_compiled = resource_filename('hwiopy', 'overlays') + '/test.dtbo'
try:
# Try using the system dtc on the example device tree
subprocess.check_call(['dtc', '-O', 'dtb', '-o',
test_compiled, '-b', '0', '-@', test_overlay])
# If that works, well, dandy
return True
# If it didn't work, an oserrror will be raised
except (CalledProcessError, OSError):
# Can't compile, get our own compiler.
return False
|
Badg/hwiopy
|
platform_setup.py
|
Python
|
lgpl-2.1
| 7,848
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Jolla Ltd.
# Contact: Denis Zalevskiy <denis.zalevskiy@jollamobile.com>
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
# http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html
# Some code was taken from Python 2.6 unittest.py. Copyright is following.
# Copyright (c) 1999-2003 Steve Purcell
# This module is free software, and you may redistribute it and/or modify
# it under the same terms as Python itself, so long as this copyright message
# and disclaimer are retained in their original form.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
import sys
import traceback
import re
# just to track this is the UT module
__unittest = 1
class StackParser:
__header_re = re.compile(r'^Traceback \(mo.+')
__fname_re = re.compile(r'\s*File "(.+)", line ([0-9]+), in (.+)')
__exc_line = re.compile(r'^(Exception|Failure): (.+)')
def __init__(self):
self.last_fname = "?"
self.err = "?"
self.locations = {}
self.info = []
self.__parse = self.__parse_start
def __parse_remain(self, x):
self.info.append(x)
def __parse_start(self, x):
if self.__header_re.match(x):
self.__parse = self.__parse_stack
def __parse_stack(self, x):
m = self.__exc_line.match(x)
if m:
self.err = m.group(2)
self.__parse = self.__parse_remain
return
m = self.__fname_re.match(x)
if m:
k = ':'.join([m.group(i) for i in range(1,4)])
self.last_fname = k
else:
loc = self.last_fname
if not loc in self.locations:
self.locations[loc] = []
self.locations[loc].append('"{0}"'.format(str(x).strip()))
def parse(self, stack):
[self.__parse(x) for x in stack.split("\n")]
traces = {loc : '\n'.join(lines) \
for loc, lines in self.locations.items() }
return self.err, traces
class Test(object):
def __init__(self):
self.__id = Test.__new_id()
def prepare(self, fn):
self.result = None
self.short_doc = fn.__doc__.split('\n')[0].strip() if fn.__doc__ else ''
self.__fn = fn
self.__mk_special_method(fn, 'setup')
return self
__next_test_id = 0
def __mk_special_method(self, fn, name):
method = fn.unit_test_args.get(name, None)
if method:
method = method if type(method) == str else method.__name__
setattr(self, name, getattr(fn.__self__, method))
else:
setattr(self, name, lambda : False)
@staticmethod
def __new_id():
res = Test.__next_test_id
Test.__next_test_id += 1
return res
def __str__(self):
return self.name
def __repr__(self):
return "Test({})".format(self.name)
@property
def number(self):
return self.__id
@property
def name(self):
return self.__fn.__name__
def __enter__(self):
self.setup()
def __exit__(self, err_type, err, tb):
self.__traceback = (err_type, err, tb)
try:
self.__fn.__self__._teardown()
except Exception as e:
self.__traceback = sys.exc_info()
raise e
@property
def traceback(self):
return self.__traceback
def __call__(self, *args, **kwargs):
self.__fn(*args, **kwargs)
def test(*args, **kwargs):
def modify(fn):
fn.unit_test = Test()
fn.unit_test_args = kwargs
return fn
if len(kwargs):
return modify
return modify(args[0])
def is_test(fn):
return hasattr(fn, 'unit_test')
class Suite(object):
def __init__(self):
self.__report = None
test_names = [name for name in dir(self.__class__) \
if is_test(getattr(self.__class__, name))]
methods = [(name, getattr(self, name)) for name in test_names]
self.__tests = { name : fn.unit_test.prepare(fn) for name, fn in methods }
self.log = lambda *args, **kwargs: False
self.suite_teardown = []
self.teardown = []
@property
def name(self):
return type(self).__name__
@property
def tests(self) :
return self.__tests.values()
def __enter__(self):
if hasattr(self, 'suite_setup'):
[getattr(self, fn.__name__)() for fn in self.suite_setup]
def __exit__(self, *args):
if hasattr(self, 'suite_teardown'):
self.__teardown(self.suite_teardown)
def __teardown(self, fns):
while len(fns):
fn = fns.pop()
fn()
def _teardown(self):
self.__teardown(self.teardown)
def __assertion(self, msg, *args, **kwargs):
self.log.warning(msg.format(*args, **kwargs))
raise Failure(msg, *args, **kwargs)
def ensure(self, condition, msg, *args, **kwargs):
if not condition:
self.__assertion('failed:' + msg, *args, **kwargs)
def ensure_eq(self, x, y, msg, *args, **kwargs):
if x != y:
fmt = 'failed: ({} == {}): {}'
self.__assertion(fmt, x, y, msg.format(*args, **kwargs))
def ensure_ne(self, x, y, msg, *args, **kwargs):
if x == y:
fmt = 'failed: ({} != {}): {}'
self.__assertion(fmt, x, y, msg.format(*args, **kwargs))
class Format(object):
def __init__(self, msg, *args, **kwargs):
self.msg = msg
self.args = args
self.kwargs = kwargs
def __repr__(self):
return self.msg.format(*self.args, **self.kwargs)
class Error(object):
def __init__(self, test, err):
self.__test_info = test
self.__name = test.name
self.__err = err
@property
def ok(self):
return False
def __repr__(self):
return ':'.join([self.__name, 'ERROR', repr(self.__err)])
__str__ = __repr__
@property
def test(self):
return self.__test_info
class Success(object):
def __init__(self, test):
self.__test_info = test
self.__name = test.name
def __repr__(self):
return ':'.join([self.__name, 'OK'])
__str__ = __repr__
@property
def test(self):
return self.__test_info
@property
def ok(self):
return True
class Failure(Exception):
def __init__(self, msg, *args, **kwargs):
self.__fmt = Format(msg, *args, **kwargs)
def init(self, test):
self.__test_info = test
self.__name = test.name
@property
def test(self):
return self.__test_info
@property
def ok(self):
return False
def __repr__(self):
return ':'.join([self.__name, 'FAIL', repr(self.__fmt)])
__str__ = __repr__
@property
def source(self):
return self.__name
class Runner(object):
def __init__(self, suite):
self.__suite = suite
def __run(self, test):
try:
with test:
test()
if test.result is None:
test.result = Success(test)
except Failure as e:
e.init(test)
test.result = e
except Exception as e:
test.result = Error(test, e)
def run(self, report):
self.__suite.log = report.log
report.suite = self.__suite
with report:
tests = sorted(self.__suite.tests,
key = lambda v: v.number)
with self.__suite:
for test in tests:
section = report.section(test)
with section:
self.__run(test)
class Report(object):
def __init__(self, stream, log):
self.__suite = None
self.__results = []
self.stream = stream
self.log = log
class Section(object):
def __init__(self, report, test):
self.report = report
self.__test = test
def __enter__(self):
self.report.log.info("Test {:s}".format(self.__test))
def __exit__(self, *args):
self.report.log.info("}")
stamp = '.' if self.__test.result.ok else 'F'
self.report.stream.write(stamp)
def section(self, test):
return Report.Section(self, test)
def __enter__(self):
self.log.info("Suite %s {", self.__suite.name)
self.stream.write("{} ".format(self.__suite.name))
def __exit__(self, *args):
self.stream.write("\n")
self.log.info("}")
self.__results = self.__suite.tests
self.__suite = None
@property
def suite(self):
return self.__suite
@suite.setter
def suite(self, v):
self.__suite = v
@property
def results(self):
return [x.result for x in self.__results]
@property
def failed_count(self):
return len([x for x in self.results if not x.ok])
def _is_relevant_tb_level(self, tb):
return '__unittest' in tb.tb_frame.f_globals
def _count_relevant_tb_levels(self, tb):
length = 0
while tb and not self._is_relevant_tb_level(tb):
length += 1
tb = tb.tb_next
return length
def _exc_info_to_string(self, err):
"""Converts a sys.exc_info()-style tuple of values into a string."""
exctype, value, tb = err
if exctype is Failure :
# Skip test runner traceback levels
while tb and self._is_relevant_tb_level(tb):
tb = tb.tb_next
# Skip assert*() traceback levels
length = self._count_relevant_tb_levels(tb)
else:
length = None
return ''.join(traceback.format_exception(exctype, value, tb, length))
def stack(self, traceback):
parser = StackParser()
return parser.parse(self._exc_info_to_string(traceback))
def format_results(self):
self.log.always("REPORT:\n")
failed = [self.__format_result(x) for x in self.results if not x.ok ]
self.log.always("Failed: {}\n\n".format(len(failed)))
def __format_result(self, res):
def log(data):
if type(data) != str:
[self.log.warning(':'.join(x)) for x in data.items()]
#self.stream.writelines(data)
else:
self.log.warning(data)
#self.stream.write("{:s}\n".format(data))
err, traces = self.stack(res.test.traceback)
log(err)
log(traces)
def default_main(*classes):
import logging
log = logging.getLogger("UT")
log.addHandler(logging.StreamHandler())
log.setLevel(logging.CRITICAL)
log.always = log.warning
rc = 0
for cls in classes:
report = Report(sys.stderr, log)
Runner(cls()).run(report)
log.setLevel(logging.WARNING)
report.format_results()
rc += report.failed_count
exit(rc)
class Tests(Suite):
@test
def test1(self):
'''First test info'''
return False
@test
def test2(self):
'''Second test info'''
self.ensure(False, "e {}", "s")
return True
if __name__ == '__main__':
default_main(Tests)
|
android-808/statefs
|
tests/UT.py
|
Python
|
lgpl-2.1
| 12,498
|
# -*- coding:utf-8 -*-
#
# Copyright (C) 2012, Maximilian Köhl <linuxmaxi@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import heapq
class Handler():
LOW = 50
NORMAL = 0
HIGH = -50
def __init__(self, event, priority, function, arguments, keyword_arguments):
self._priority = priority
self.event = event
self.function = function
self.arguments = arguments
self.keyword_arguments = keyword_arguments
def __gt__(self, other):
if isinstance(other, Handler):
return other._priority < self._priority
elif isinstance(other, int):
return other < self._priority
super().__gt__(other)
def __lt__(self, other):
if isinstance(other, Handler):
return other._priority > self._priority
elif isinstance(other, int):
return other > self._priority
super().__lt__(other)
@property
def priority(self):
return self._priority
@priority.setter
def priority(self, priority):
self._priority = priority
heapq.heapify(self.event.handlers)
def call(self, *arguments, **keyword_arguments):
arguments = list(arguments)
arguments.extend(self.arguments)
keyword_arguments.update(self.keyword_arguments)
self.function(*arguments, **keyword_arguments)
def remove(self):
self.event.handlers.remove(self)
class Event():
def __init__(self, name=None):
self.name = name
self.handlers = []
def handler(self, function, *arguments, priority=Handler.NORMAL,
**keyword_arguments):
handler = Handler(self, priority, function, arguments,
keyword_arguments)
heapq.heappush(self.handlers, handler)
return handler
def emit(self, *arguments, **keyword_arguments):
for handler in self.handlers:
handler.call(*arguments, **keyword_arguments)
|
koehlma/lightasync
|
src/lightasync/event.py
|
Python
|
lgpl-2.1
| 2,710
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# Copyright (C) 2016 Michel Müller, Tokyo Institute of Technology
# This file is part of Hybrid Fortran.
# Hybrid Fortran is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Hybrid Fortran is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with Hybrid Fortran. If not, see <http://www.gnu.org/licenses/>.
import os
import logging
def dirEntries(dir_name, subdir, *args):
'''Return a list of file names found in directory 'dir_name'
If 'subdir' is True, recursively access subdirectories under 'dir_name'.
Additional arguments, if any, are file extensions to match filenames. Matched
file names are added to the list.
If there are no additional arguments, all files found in the directory are
added to the list.
Example usage: fileList = dirEntries(r'H:\TEMP', False, 'txt', 'py')
Only files with 'txt' and 'py' extensions will be added to the list.
Example usage: fileList = dirEntries(r'H:\TEMP', True)
All files and all the files in subdirectories under H:\TEMP will be added
to the list.
'''
fileList = []
for file in os.listdir(dir_name):
dirfile = os.path.join(dir_name, file)
if os.path.isfile(dirfile):
if not args:
fileList.append(dirfile)
else:
if os.path.splitext(dirfile)[1][1:] in args:
fileList.append(dirfile)
# recursively access file names in subdirectories
elif os.path.isdir(dirfile) and subdir:
fileList.extend(dirEntries(dirfile, subdir, *args))
return fileList
|
muellermichel/Hybrid-Fortran
|
hf/tools/filesystem.py
|
Python
|
lgpl-3.0
| 2,088
|
import hashlib
import django.db.models
import django.forms
from . import widgets
def sha1(s):
"""Get a SHA1 hash of string `s`."""
algo = hashlib.new('sha1')
algo.update(s.encode('ascii'))
return algo.hexdigest()
REGISTRY = {}
def get_qualname(cls):
"""Get the fully qualified name of a class.
:rtype: str
For example:
>>> get_qualname(AutoRegister)
'simpleselect.fields.AutoRegister'
"""
qualname = cls.__module__ + '.' + cls.__name__
return qualname
class AutoRegister(type):
def __init__(cls, name, bases, namespace):
if cls.__module__ != __name__:
key = cls.registry_key_func()
REGISTRY[key] = cls
super().__init__(name, bases, namespace)
class AutoSelectField(django.forms.ModelChoiceField, metaclass=AutoRegister):
@classmethod
def registry_key_func(cls):
return sha1(get_qualname(cls))[:5]
def __init__(self, *args, **kwargs):
if not 'widget' in kwargs:
widget = widgets.AutocompleteSelect(
queries=self.queries,
token_generator=lambda w: type(self).registry_key_func())
widget.choices = self.data
kwargs['widget'] = widget
super().__init__(self.data, *args, **kwargs)
|
leo-the-manic/django-simple-select
|
simpleselect/fields.py
|
Python
|
lgpl-3.0
| 1,292
|
def combos(ls,n):
if n == 0:
return [[]]
rtn = []
for it in ls:
for ls2 in combos(ls, n-1):
ls2.append(it)
rtn.append(ls2)
return rtn
def histogram(perm):
rtn = {}
for p in perm:
sp = sum(p)
rtn[sp] = 1 + rtn.get(sp, 0)
return rtn
pete = histogram(combos(range(1,5), 9))
cole = histogram(combos(range(1,7), 6))
played = 0
wins = 0
for pk in pete:
for ck in cole:
played += pete[pk] * cole[ck]
if pk > ck:
wins += pete[pk] * cole[ck]
print round(wins / float(played),7)
|
jdavidberger/project-euler
|
prob205.py
|
Python
|
lgpl-3.0
| 590
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This file is part of Androwarn.
#
# Copyright (C) 2012, 2019, Thomas Debize <tdebize at mail.com>
# All rights reserved.
#
# Androwarn is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androwarn is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androwarn. If not, see <http://www.gnu.org/licenses/>.
# Global imports
import re
import logging
from html.parser import HTMLParser
# Logger
log = logging.getLogger('log')
def convert_dex_to_canonical(dex_name) :
"""
@param dex_name : a dex name, for instance "Lcom/name/test"
@rtype : a dotted string, for instance "com.name.test"
"""
final_name = ''
if re.match('^\[?L[a-zA-Z]+(?:\/[a-zA-Z]+)*(.)*;$', dex_name) :
global_part = dex_name[1:-1].split('/')
final_part = global_part[:-1]
last_part = global_part[-1].split('$')[0]
final_part.append(str(last_part))
final_name = '.'.join(str(i) for i in final_part)
else :
log.debug("[!] Conversion to canonical dotted name failed : \"" + dex_name + "\" is not a valid library dex name")
return final_name
def convert_canonical_to_dex(canonical_name) :
return 'L' + canonical_name.replace('.', '/') + ';'
# Log extra information
def log_result_path_information(detectors) :
"""
@param detectors : a result from the detector's result list
@rtype : void - it only logs extra information about the analysis result
"""
for res in detectors :
xrefs_from = res.get_xref_from()
for xref_analysis, xref_encodedmethod in xrefs_from:
xref_encodedmethod_class, xref_encodedmethod_function, _ = xref_encodedmethod.get_triple()
log.info("'%s' called by function '%s' of class '%s'" % (res.get_value(), xref_encodedmethod_function, xref_encodedmethod_class))
def strip_HTML_tags(html):
"""
@param html : a string to be cleaned up
@rtype : a HTML-tag sanitized string
"""
# HTML Sanitizer
class MLStripper(HTMLParser):
def __init__(self):
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
# Keep the indentation
html = html.replace('<br>', '\n')
# Remove HTML tags
s = MLStripper()
s.feed(html)
return s.get_data()
# XML parsing
def get_parent_child_grandchild(tree):
"""
@param tree : xml root Element
@rtype : parent, child and grandchild Element
"""
for parent in tree.iter() :
for child in parent :
for grandchild in child :
yield parent, child, grandchild
# Single structural analysis
def structural_analysis_search_method(class_name, method_name, x):
return x.find_methods(classname=class_name, methodname=method_name)
def structural_analysis_search_string(pattern, x):
result = list(x.find_strings(pattern))
log_result_path_information(result)
return result
def structural_analysis_search_field(pattern, x):
return list(x.find_fields(fieldname=pattern))
# Bulk structural analysis
def structural_analysis_search_method_bulk(class_name, method_listing, x):
"""
@param list : a list of tuple (class function name, class function description)
@rtype : a list of strings related to the findings
"""
formatted_str = []
for method_name, description in method_listing:
if list(structural_analysis_search_method(class_name, method_name, x)):
if description not in formatted_str:
formatted_str.append(description)
return sorted(formatted_str)
def structural_analysis_search_string_bulk(string_listing, x):
formatted_str = []
for string_name, description in string_listing:
if structural_analysis_search_string(string_name, x):
if description not in formatted_str:
formatted_str.append(description)
return sorted(formatted_str)
# OR Bitwise option recovery
def recover_bitwise_flag_settings(flag, constants_dict) :
"""
@param flag : an integer value to be matched with bitwise OR options set
@param constants_dict : a dictionary containing each options' integer value
@rtype : a string summing up settings
"""
recover = ''
options = []
try:
flag_int = int(flag)
except:
return recover
for option_value in constants_dict :
if (int(flag) & option_value) == option_value :
options.append(constants_dict[option_value])
recover = ', '.join(i for i in options)
return recover
# Check if extracted values are ALL register numbers, following the pattern 'v[0-9]+', as they obviously are register numbers and thus useless
def isnt_all_regs_values(string_list) :
"""
@param list : a list of strings, extracted from the data flow analysis
@rtype : a boolean, True if there's at least 1 non-register number value, Else False
"""
result = False
p_reg = re.compile('^v[0-9]+$')
for i in string_list :
if not(p_reg.match(i)):
result = True
return result
|
maaaaz/androwarn
|
warn/util/util.py
|
Python
|
lgpl-3.0
| 5,774
|
# ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos import Fit
from tests.integration.testcase import IntegrationTestCase
class CustomizationTestCase(IntegrationTestCase):
"""Class which should be used by eve object customization tests.
Attributes:
fit: Pre-created fit.
"""
def setUp(self):
IntegrationTestCase.setUp(self)
self.fit = Fit()
def get_log(self, name='eos.eve_obj.custom*'):
return IntegrationTestCase.get_log(self, name=name)
|
pyfa-org/eos
|
tests/integration/customization/testcase.py
|
Python
|
lgpl-3.0
| 1,352
|
############################ Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Michael Stead <michael.stead@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2013 martinqt <m.ki2@laposte.net> #
# Copyright 2014 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2016 Jannis Gebauer <ja.geb@me.com> #
# Copyright 2016 Peter Buckley <dx-pbuckley@users.noreply.github.com> #
# Copyright 2017 Nicolas Agustín Torres <nicolastrres@gmail.com> #
# Copyright 2018 Jess Morgan <979404+JessMorgan@users.noreply.github.com> #
# Copyright 2018 per1234 <accounts@perglass.com> #
# Copyright 2018 sfdye <tsfdye@gmail.com> #
# Copyright 2020 Huan-Cheng Chang <changhc84@gmail.com> #
# #
# This file is part of PyGithub. #
# http://pygithub.readthedocs.io/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
import github.GithubObject
import github.NamedUser
from . import Consts
class PullRequestComment(github.GithubObject.CompletableGithubObject):
"""
This class represents PullRequestComments. The reference can be found here http://docs.github.com/en/rest/reference/pulls#comments
"""
def __repr__(self):
return self.get__repr__({"id": self._id.value, "user": self._user.value})
@property
def body(self):
"""
:type: string
"""
self._completeIfNotSet(self._body)
return self._body.value
@property
def commit_id(self):
"""
:type: string
"""
self._completeIfNotSet(self._commit_id)
return self._commit_id.value
@property
def created_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._created_at)
return self._created_at.value
@property
def diff_hunk(self):
"""
:type: string
"""
self._completeIfNotSet(self._diff_hunk)
return self._diff_hunk.value
@property
def id(self):
"""
:type: integer
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def in_reply_to_id(self):
"""
:type: integer
"""
self._completeIfNotSet(self._in_reply_to_id)
return self._in_reply_to_id.value
@property
def original_commit_id(self):
"""
:type: string
"""
self._completeIfNotSet(self._original_commit_id)
return self._original_commit_id.value
@property
def original_position(self):
"""
:type: integer
"""
self._completeIfNotSet(self._original_position)
return self._original_position.value
@property
def path(self):
"""
:type: string
"""
self._completeIfNotSet(self._path)
return self._path.value
@property
def position(self):
"""
:type: integer
"""
self._completeIfNotSet(self._position)
return self._position.value
@property
def pull_request_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._pull_request_url)
return self._pull_request_url.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._updated_at)
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
@property
def html_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._html_url)
return self._html_url.value
@property
def user(self):
"""
:type: :class:`github.NamedUser.NamedUser`
"""
self._completeIfNotSet(self._user)
return self._user.value
def delete(self):
"""
:calls: `DELETE /repos/{owner}/{repo}/pulls/comments/{number} <http://docs.github.com/en/rest/reference/pulls#comments>`_
:rtype: None
"""
headers, data = self._requester.requestJsonAndCheck("DELETE", self.url)
def edit(self, body):
"""
:calls: `PATCH /repos/{owner}/{repo}/pulls/comments/{number} <http://docs.github.com/en/rest/reference/pulls#comments>`_
:param body: string
:rtype: None
"""
assert isinstance(body, str), body
post_parameters = {
"body": body,
}
headers, data = self._requester.requestJsonAndCheck(
"PATCH", self.url, input=post_parameters
)
self._useAttributes(data)
def get_reactions(self):
"""
:calls: `GET /repos/{owner}/{repo}/pulls/comments/{number}/reactions
<https://docs.github.com/en/rest/reference/reactions#list-reactions-for-a-pull-request-review-comment>`_
:return: :class: :class:`github.PaginatedList.PaginatedList` of :class:`github.Reaction.Reaction`
"""
return github.PaginatedList.PaginatedList(
github.Reaction.Reaction,
self._requester,
f"{self.url}/reactions",
None,
headers={"Accept": Consts.mediaTypeReactionsPreview},
)
def create_reaction(self, reaction_type):
"""
:calls: `POST /repos/{owner}/{repo}/pulls/comments/{number}/reactions
<https://docs.github.com/en/rest/reference/reactions#create-reaction-for-a-pull-request-review-comment>`_
:param reaction_type: string
:rtype: :class:`github.Reaction.Reaction`
"""
assert isinstance(reaction_type, str), reaction_type
post_parameters = {
"content": reaction_type,
}
headers, data = self._requester.requestJsonAndCheck(
"POST",
f"{self.url}/reactions",
input=post_parameters,
headers={"Accept": Consts.mediaTypeReactionsPreview},
)
return github.Reaction.Reaction(self._requester, headers, data, completed=True)
def delete_reaction(self, reaction_id):
"""
:calls: `DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}
<https://docs.github.com/en/rest/reference/reactions#delete-a-pull-request-comment-reaction>`_
:param reaction_id: integer
:rtype: bool
"""
assert isinstance(reaction_id, int), reaction_id
status, _, _ = self._requester.requestJson(
"DELETE",
f"{self.url}/reactions/{reaction_id}",
headers={"Accept": Consts.mediaTypeReactionsPreview},
)
return status == 204
def _initAttributes(self):
self._body = github.GithubObject.NotSet
self._commit_id = github.GithubObject.NotSet
self._created_at = github.GithubObject.NotSet
self._diff_hunk = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._in_reply_to_id = github.GithubObject.NotSet
self._original_commit_id = github.GithubObject.NotSet
self._original_position = github.GithubObject.NotSet
self._path = github.GithubObject.NotSet
self._position = github.GithubObject.NotSet
self._pull_request_url = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
self._html_url = github.GithubObject.NotSet
self._user = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "body" in attributes: # pragma no branch
self._body = self._makeStringAttribute(attributes["body"])
if "commit_id" in attributes: # pragma no branch
self._commit_id = self._makeStringAttribute(attributes["commit_id"])
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "diff_hunk" in attributes: # pragma no branch
self._diff_hunk = self._makeStringAttribute(attributes["diff_hunk"])
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "in_reply_to_id" in attributes: # pragma no branch
self._in_reply_to_id = self._makeIntAttribute(attributes["in_reply_to_id"])
if "original_commit_id" in attributes: # pragma no branch
self._original_commit_id = self._makeStringAttribute(
attributes["original_commit_id"]
)
if "original_position" in attributes: # pragma no branch
self._original_position = self._makeIntAttribute(
attributes["original_position"]
)
if "path" in attributes: # pragma no branch
self._path = self._makeStringAttribute(attributes["path"])
if "position" in attributes: # pragma no branch
self._position = self._makeIntAttribute(attributes["position"])
if "pull_request_url" in attributes: # pragma no branch
self._pull_request_url = self._makeStringAttribute(
attributes["pull_request_url"]
)
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
if "html_url" in attributes: # pragma no branch
self._html_url = self._makeStringAttribute(attributes["html_url"])
if "user" in attributes: # pragma no branch
self._user = self._makeClassAttribute(
github.NamedUser.NamedUser, attributes["user"]
)
|
sbesson/PyGithub
|
github/PullRequestComment.py
|
Python
|
lgpl-3.0
| 11,694
|
# Copyright 2016 Ivan Yelizariev <https://it-projects.info/team/yelizariev>
# Copyright 2019 Artem Rafailov <https://it-projects.info/team/Ommo73/>
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html)
{
"name": """Pad support in Lead's Internal notes""",
"summary": """This module allows collaborative editing with *html support* in Lead's Internal notes and maintains the versions history.""",
"category": "Project",
# "live_test_url": "http://apps.it-projects.info/shop/product/DEMO-URL?version=12.0",
"images": ["images/pad_crm_2.jpg"],
"version": "12.0.1.0.0",
"application": False,
"author": "IT-Projects LLC, Ivan Yelizariev",
"support": "apps@it-projects.info",
"website": "https://it-projects.info/",
"license": "LGPL-3",
"price": 90.00,
"currency": "EUR",
"depends": ["crm", "pad"],
"external_dependencies": {"python": [], "bin": []},
"data": ["crm_lead.xml", "template.xml"],
"demo": [],
"qweb": [],
"post_load": None,
"pre_init_hook": None,
"post_init_hook": None,
"uninstall_hook": None,
"auto_install": False,
"installable": True,
# "demo_title": "{MODULE_NAME}",
# "demo_addons": [
# ],
# "demo_addons_hidden": [
# ],
# "demo_url": "DEMO-URL",
# "demo_summary": "{SHORT_DESCRIPTION_OF_THE_MODULE}",
# "demo_images": [
# "images/MAIN_IMAGE",
# ]
}
|
yelizariev/addons-yelizariev
|
pad_crm/__manifest__.py
|
Python
|
lgpl-3.0
| 1,416
|
# -*- coding: iso-8859-1 -*-
#
# Copyright (C) 2009 Rene Liebscher
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with
# this program; if not, see <http://www.gnu.org/licenses/>.
#
__revision__ = "$Id: AlgebraicProduct.py,v 1.3 2009/08/07 07:19:18 rliebscher Exp $"
from fuzzy.norm.Norm import Norm,NormException
class AlgebraicProduct(Norm):
def __init__(self):
Norm.__init__(self,Norm.T_NORM)
def __call__(self,*args):
if len(args) != 2:
raise NormException("%s is supported only for 2 parameters" % self.__class__.__name__ )
x = float(args[0])
y = float(args[1])
return x*y
|
arruda/pyfuzzy
|
fuzzy/norm/AlgebraicProduct.py
|
Python
|
lgpl-3.0
| 1,188
|
# Copyright (c) 2010, PRESENSE Technologies GmbH
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the PRESENSE Technologies GmbH nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from bdec.expression import parse
from bdec.field import Field
from bdec.sequence import Sequence
from bdec.sequenceof import SequenceOf
from bdec.output.instance import encode
class TestSequenceOf(unittest.TestCase):
def test_encode_hidden_count(self):
# Test that we correctly encode a hidden count
a = Sequence('a', [
Field('count:', length=8),
SequenceOf('c', Field('d', length=8, format=Field.TEXT), count=parse("${count:}")),
])
self.assertEqual('\x03abc', encode(a, {'c' : ['a', 'b', 'c']}).bytes())
|
asdf1011/bdec
|
bdec/encode/test/testsequenceof.py
|
Python
|
lgpl-3.0
| 2,162
|
import unittest
from kona.linalg.memory import KonaMemory
from kona.algorithms.util.linesearch import BackTracking
from kona.algorithms.util.merit import ObjectiveMerit
from kona.examples.simple_2by2 import Simple2x2
class BackTrackingTestCase(unittest.TestCase):
def setUp(self):
solver = Simple2x2()
km = KonaMemory(solver)
self.pf = pf = km.primal_factory
self.sf = sf = km.state_factory
pf.request_num_vectors(10)
sf.request_num_vectors(10)
self.merit = ObjectiveMerit(pf, sf)
km.allocate_memory()
self.bt = BackTracking() # leave all settings with initial values
search_dir = self.pf.generate()
search_dir.base.data[:] = [-1,0]
at_design = self.pf.generate()
at_design.equals(1)
at_state = self.sf.generate()
at_state.equals_primal_solution(at_design)
state_work = self.sf.generate()
adjoint = self.sf.generate()
adjoint.equals_objective_adjoint(at_design, at_state, state_work)
primal_work = self.pf.generate()
dfdx = self.pf.generate()
dfdx.equals_total_gradient(at_design, at_state, adjoint)
self.bt.p_dot_dfdx = dfdx.inner(search_dir)
def test_stops_after_one_iter(self):
'''BackTracking line search test (1/4)'''
search_dir = self.pf.generate()
search_dir.base.data[:] = [0,-2]
at_design = self.pf.generate()
at_design.equals(1)
at_state = self.sf.generate()
at_state.equals_primal_solution(at_design)
grad = self.pf.generate()
at_adjoint = self.sf.generate()
state_work = self.sf.generate()
at_adjoint.equals_objective_adjoint(at_design, at_state, state_work)
primal_work = self.pf.generate()
grad.equals_total_gradient(at_design, at_state, at_adjoint)
p_dot_grad = search_dir.inner(grad)
self.merit.reset(search_dir, at_design, at_state, p_dot_grad)
self.bt.alpha_init = 0.5 # should evaluate 2.5, 2.5
self.bt.rdtn_factor = .5
self.bt.decr_cond = 1e-4
alpha, n_iter = self.bt.find_step_length(self.merit)
self.assertEqual(n_iter, 2)
self.assertEqual(alpha, .25)
def test_stops_after_multiple_iter(self):
'''BackTracking line search test (2/4)'''
search_dir = self.pf.generate()
search_dir.base.data[:] = [0,-2]
at_design = self.pf.generate()
at_design.equals(1)
at_state = self.sf.generate()
at_state.equals_primal_solution(at_design)
grad = self.pf.generate()
at_adjoint = self.sf.generate()
state_work = self.sf.generate()
at_adjoint.equals_objective_adjoint(at_design, at_state, state_work)
primal_work = self.pf.generate()
grad.equals_total_gradient(at_design, at_state, at_adjoint)
p_dot_grad = search_dir.inner(grad)
self.merit.reset(search_dir, at_design, at_state, p_dot_grad)
self.bt.alpha_init = 1
self.bt.rdtn_factor = .75
self.bt.decr_cond = 1e-4
alpha, n_iter = self.bt.find_step_length(self.merit)
self.assertEqual(n_iter, 4)
def test_from_running_other_way(self):
'''BackTracking line search test (3/4)'''
search_dir = self.pf.generate()
search_dir.base.data[:] = [4.25,0]
at_design = self.pf.generate()
at_design.equals(-2)
at_state = self.sf.generate()
at_state.equals_primal_solution(at_design)
state_work = self.sf.generate()
adjoint = self.sf.generate()
adjoint.equals_objective_adjoint(at_design, at_state, state_work)
primal_work = self.pf.generate()
dfdx = self.pf.generate()
dfdx.equals_total_gradient(at_design, at_state, adjoint)
self.bt.p_dot_dfdx = dfdx.inner(search_dir)
self.merit.reset(search_dir, at_design, at_state, self.bt.p_dot_dfdx)
self.bt.alpha_init = 1
alpha, n_iter = self.bt.find_step_length(self.merit)
self.assertEqual(n_iter, 1)
self.assertEqual(alpha, 1)
def test_bad_search_direction(self):
'''BackTracking line search test (4/4)'''
search_dir = self.pf.generate()
search_dir.base.data[:] = [-1,0]
at_design = self.pf.generate()
at_design.equals(1)
at_state = self.sf.generate()
at_state.equals_primal_solution(at_design)
grad = self.pf.generate()
at_adjoint = self.sf.generate()
state_work = self.sf.generate()
at_adjoint.equals_objective_adjoint(at_design, at_state, state_work)
primal_work = self.pf.generate()
grad.equals_total_gradient(at_design, at_state, at_adjoint)
p_dot_grad = search_dir.inner(grad)
self.merit.reset(search_dir, at_design, at_state, p_dot_grad)
self.bt.alpha_init = .3 # should evaluate 2.5, 2.5
self.merit.p_dot_grad *= -1
try:
alpha, n_iter = self.bt.find_step_length(self.merit)
except ValueError as err:
self.assertEqual(
str(err),
'search direction is not a descent direction')
else:
self.fail('ValueError expected')
def test_no_merit_function(self):
'''BackTracking error test for no merit function'''
try:
alpha, n_iter = self.bt.find_step_length(None)
except ValueError as err:
self.assertEqual(str(err), 'unknown merit_function type')
else:
self.fail('ValueError expected')
def test_bad_alpha_init(self):
'''BackTracking error test for bad initial alpha'''
self.bt.alpha_init = 1e6
try:
alpha, n_iter = self.bt.find_step_length(None)
except ValueError as err:
self.assertEqual(str(err), 'alpha_init must be 0 < alpha_init <=1')
else:
self.fail('ValueError expected')
if __name__ == "__main__":
unittest.main()
|
OptimalDesignLab/Kona
|
src/kona/test/test_back_tracking.py
|
Python
|
lgpl-3.0
| 6,025
|
import uuid
from django.core.management.base import BaseCommand
from jobs.models import Job
from core.utils import PersistentStorage
class Command(BaseCommand):
""" Management script to sync all previous jobs stored on persistent storage
"""
def handle(self, *args, **kwargs):
storage = PersistentStorage()
job_ids = storage.list_jobs()
for job_id in job_ids:
Job.objects.get_or_create(id=uuid.UUID(job_id),
defaults={'status': Job.PREVIOUS})
|
SeiryuZ/HemeWeb
|
src/jobs/management/commands/download_previous_jobs.py
|
Python
|
lgpl-3.0
| 534
|
# Copyright (c) 2020 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import os
import sys
from unittest.mock import patch, MagicMock
from UM.PluginRegistry import PluginRegistry
from UM.Resources import Resources
from UM.Trust import Trust
from ..PostProcessingPlugin import PostProcessingPlugin
# not sure if needed
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
""" In this file, community refers to regular Cura for makers."""
mock_plugin_registry = MagicMock()
mock_plugin_registry.getPluginPath = MagicMock(return_value = "mocked_plugin_path")
# noinspection PyProtectedMember
@patch("cura.ApplicationMetadata.IsEnterpriseVersion", False)
def test_community_user_script_allowed():
assert PostProcessingPlugin._isScriptAllowed("blaat.py")
# noinspection PyProtectedMember
@patch("cura.ApplicationMetadata.IsEnterpriseVersion", False)
def test_community_bundled_script_allowed():
assert PostProcessingPlugin._isScriptAllowed(_bundled_file_path())
# noinspection PyProtectedMember
@patch("cura.ApplicationMetadata.IsEnterpriseVersion", True)
@patch.object(PluginRegistry, "getInstance", return_value=mock_plugin_registry)
def test_enterprise_unsigned_user_script_not_allowed(plugin_registry):
assert not PostProcessingPlugin._isScriptAllowed("blaat.py")
# noinspection PyProtectedMember
@patch("cura.ApplicationMetadata.IsEnterpriseVersion", True)
@patch.object(PluginRegistry, "getInstance", return_value=mock_plugin_registry)
def test_enterprise_signed_user_script_allowed(plugin_registry):
mocked_trust = MagicMock()
mocked_trust.signedFileCheck = MagicMock(return_value=True)
plugin_registry.getPluginPath = MagicMock(return_value="mocked_plugin_path")
with patch.object(Trust, "signatureFileExistsFor", return_value = True):
with patch("UM.Trust.Trust.getInstanceOrNone", return_value=mocked_trust):
assert PostProcessingPlugin._isScriptAllowed("mocked_plugin_path/scripts/blaat.py")
# noinspection PyProtectedMember
@patch("cura.ApplicationMetadata.IsEnterpriseVersion", False)
def test_enterprise_bundled_script_allowed():
assert PostProcessingPlugin._isScriptAllowed(_bundled_file_path())
def _bundled_file_path():
return os.path.join(
Resources.getStoragePath(Resources.Resources) + "scripts/blaat.py"
)
|
Ultimaker/Cura
|
plugins/PostProcessingPlugin/tests/TestPostProcessingPlugin.py
|
Python
|
lgpl-3.0
| 2,361
|
import json
import spycfg.errors
from spycfg import SpyCfg, INI_CFG
from tests.helpers import BaseTestCase
class CreateTestCase(BaseTestCase):
def test_no_file(self):
with self.assertRaises(spycfg.errors.IOError) as cf:
file_name = 'any'
SpyCfg(file_name)
self.assertIsInstance(cf.exception, spycfg.errors.IOError)
def test_create_from_json(self):
cfg = SpyCfg(self.SIMPLE_JSON_CONFIG)
self.assertEqual(cfg['key1'], 'key1')
def test_dev_env_config_loaded_and_overrides_default(self):
dev_cfg = json.loads(self.file_content(self.DEV_CONFIG))
cfg = SpyCfg(self.SIMPLE_JSON_CONFIG, env='DEV')
self.assertEquals(cfg['key1'], dev_cfg['key1'])
def test_create_from_ini(self):
cfg = SpyCfg(self.SIMPLE_INI_CONFIG, cfg_type=INI_CFG)
self.assertEqual(cfg['key1'], 'key1')
|
zadoev/spycfg
|
src/tests/test_basics.py
|
Python
|
lgpl-3.0
| 885
|
# -*- coding: utf-8 -*-
from django.conf.urls import include, url
from financeiro import views
# urls do Cadastro
urlpatterns = [
url(r'^$', views.home, name='home'),
# Contratos
url(r'^contratos-a-lancar/$', views.contratos_a_lancar, name='contratos_a_lancar'),
url(r'^contrato/(?P<contrato_id>[0-9]+)/ver/$', views.ver_contrato, name='ver_contrato'),
url(r'^contrato/(?P<contrato_id>[0-9]+)/realizar-lancamento/$', views.realizar_lancamento, name='realizar_lancamento'),
url(r'^contrato/(?P<contrato_id>[0-9]+)/adicionar-lancamento/$', views.contrato_adicionar_lancamento, name='contrato_adicionar_lancamento'),
url(r'^contrato/(?P<contrato_id>[0-9]+)/fechar-contrato/$', views.contrato_fechar, name='contrato_fechar'),
# lancamentos
url(r'^lancamentos/$', views.lancamentos, name='lancamentos'),
url(r'^lancamentos/a-receber/$', views.lancamentos_a_receber, name='lancamentos_a_receber'),
url(r'^lancamentos/a-receber/(?P<lancamento_id>[0-9]+)/receber/$', views.lancamentos_a_receber_receber, name='lancamentos_a_receber_receber'),
url(r'^lancamentos/a-receber/(?P<lancamento_id>[0-9]+)/comentar/$', views.lancamentos_a_receber_comentar, name='lancamentos_a_receber_comentar'),
# processo de antecipacao
url(r'^lancamentos/a-receber/antecipar/$', views.lancamentos_a_receber_antecipar, name='lancamentos_a_receber_antecipar'),
# ajax
url(r'^lancamentos/ajax/a-receber/(?P<busca_tipo>[a-z]+)/(?P<offset>[0-9]+)/$', views.ajax_lancamentos_receber, name='ajax_lancamentos_receber'),
url(r'^lancamentos/ajax/lancamento/(?P<lancamento_id>[0-9]+)/comentarios/$', views.ajax_lancamento_comentarios, name='ajax_lancamento_comentarios'),
url(r'^lancamentos/ajax/lancamento/(?P<lancamento_id>[0-9]+)/informacao-pagamento/$', views.ajax_lancamento_informacao_pagamento, name='ajax_lancamento_informacao_pagamento'),
url(r'^lancamentos/ajax/lancamento/buscar/$', views.ajax_lancamento_buscar, name='ajax_lancamento_buscar'),
]
|
dudanogueira/microerp
|
microerp/financeiro/urls.py
|
Python
|
lgpl-3.0
| 1,996
|
#!/usr/bin/env python3
"""
This module provides testing code for the auxi.modelling.financial.des module.
"""
import unittest
from datetime import datetime
from auxi.modelling.financial.des import AccountType
from auxi.modelling.financial.des import GeneralLedgerAccount
from auxi.modelling.financial.des import Transaction
from auxi.modelling.financial.des import TransactionTemplate
from auxi.modelling.financial.des import GeneralLedgerStructure
from auxi.modelling.financial.des import GeneralLedger
from auxi.core.reporting import ReportFormat
__version__ = '0.3.2'
__license__ = 'LGPL v3'
__copyright__ = 'Copyright 2016, Ex Mente Technologies (Pty) Ltd'
__author__ = 'Christoff Kok, Johan Zietsman'
__credits__ = ['Christoff Kok', 'Johan Zietsman']
__maintainer__ = 'Christoff Kok'
__email__ = 'christoff.kok@ex-mente.co.za'
__status__ = 'Planning'
class GeneralLedgerAccountUnitTester(unittest.TestCase):
"""
Tester for the auxi.modelling.financial.des.GeneralLedgerAccount class.
"""
def setUp(self):
self.object = GeneralLedgerAccount("NameA",
description="DescriptionA",
number="010",
account_type=AccountType.asset)
def test_constructor(self):
self.assertEqual(self.object.name, "NameA")
self.assertEqual(self.object.description, "DescriptionA")
self.assertEqual(self.object.number, "010")
self.assertEqual(self.object.account_type, AccountType.asset)
def test_set_parent_path(self):
self.object.set_parent_path("accyA/accyB")
self.assertEqual(self.object.path, "accyA/accyB/NameA")
def test_create_account(self):
new_account = self.object.create_account("TestA",
description="TestA_Desc",
number="011")
self.assertEqual(new_account.name, "TestA")
self.assertEqual(new_account.description, "TestA_Desc")
self.assertEqual(new_account.number, "011")
self.assertEqual(new_account.account_type, self.object.account_type)
self.assertEqual(new_account, self.object.accounts[0])
def test_remove_account(self):
num_accounts = len(self.object.accounts)
self.object.create_account("TestA",
description="TestA_Desc",
number="011")
self.object.remove_account("TestA")
self.assertEqual(len(self.object.accounts), num_accounts)
def test_get_child_account(self):
sub_acc = self.object.create_account(
"TestA",
description="TestA_Desc",
number="011")
sub_sub_acc = sub_acc.create_account(
"TestA1",
description="TestA1_Desc",
number="012")
result = self.object.get_child_account("TestA/TestA1")
self.assertEqual(result.name, sub_sub_acc.name)
self.assertEqual(result.description, sub_sub_acc.description)
self.assertEqual(result.number, sub_sub_acc.number)
self.assertEqual(result.account_type, sub_sub_acc.account_type)
self.assertEqual(result, sub_sub_acc)
class TransactionUnitTester(unittest.TestCase):
"""
Tester for the auxi.modelling.financial.des.Transaction class.
"""
def setUp(self):
self.object = Transaction("NameA",
description="DescriptionA",
tx_date=datetime(2016, 2, 1).date(),
dt_account="Bank",
cr_account="Sales",
source="PigeonSales",
amount=100.00)
def test_constructor(self):
self.assertEqual(self.object.name, "NameA")
self.assertEqual(self.object.description, "DescriptionA")
self.assertEqual(self.object.dt_account, "Bank")
self.assertEqual(self.object.tx_date, datetime(2016, 2, 1).date())
self.assertEqual(self.object.cr_account, "Sales")
self.assertEqual(self.object.source, "PigeonSales")
self.assertEqual(self.object.is_closing_cr_account, False)
self.assertEqual(self.object.is_closing_dt_account, False)
self.assertEqual(self.object.amount, 100.0)
class TransactionTemplateUnitTester(unittest.TestCase):
"""
Tester for the auxi.modelling.financial.des.TransactionTemplate class.
"""
def setUp(self):
self.object = TransactionTemplate("NameA",
description="DescriptionA",
dt_account="Bank",
cr_account="Sales")
def test_constructor(self):
self.assertEqual(self.object.name, "NameA")
self.assertEqual(self.object.description, "DescriptionA")
self.assertEqual(self.object.dt_account, "Bank")
self.assertEqual(self.object.cr_account, "Sales")
class GeneralLedgerStructureUnitTester(unittest.TestCase):
"""
Tester for the auxi.modelling.financial.des.generalledgerstructure class.
"""
def setUp(self):
self.object = GeneralLedgerStructure("NameA",
description="DescriptionA")
def test_constructor(self):
"""
Test the the variables has been initialised and that the default
accounts has been created.
"""
self.assertEqual(self.object.name, "NameA")
self.assertEqual(self.object.description, "DescriptionA")
self.assertNotEqual(self.object["Bank"], None)
self.assertEqual(
self.object['Unallocated Income Statement'].account_type,
AccountType.revenue)
self.assertEqual(self.object["Sales"].number, "I10")
self.assertEqual(
self.object["Sales"].account_type,
AccountType.revenue)
self.assertEqual(self.object["Cost of Sales"].number, "I15")
self.assertEqual(
self.object["Cost of Sales"].account_type,
AccountType.expense)
self.assertEqual(self.object["Other Income"].number, "I20")
self.assertEqual(
self.object["Other Income"].account_type,
AccountType.revenue)
self.assertEqual(self.object["Expense"].number, "I25")
self.assertEqual(
self.object["Expense"].account_type,
AccountType.expense)
self.assertEqual(self.object["Tax"].number, "I30")
self.assertEqual(
self.object["Tax"].account_type,
AccountType.expense)
self.assertEqual(self.object["Dividends"].number, "I35")
self.assertEqual(
self.object["Dividends"].account_type,
AccountType.expense)
self.assertEqual(self.object["Share Capital"].number, "B10")
self.assertEqual(
self.object["Share Capital"].account_type,
AccountType.asset)
self.assertEqual(self.object["Retained Income"].number, "B15")
self.assertEqual(
self.object["Retained Income"].account_type,
AccountType.equity)
self.assertEqual(self.object["Shareholders Loan"].number, "B20")
self.assertEqual(
self.object["Shareholders Loan"].account_type,
AccountType.liability)
self.assertEqual(self.object["Long Term Borrowing"].number, "B25")
self.assertEqual(
self.object["Long Term Borrowing"].account_type,
AccountType.liability)
self.assertEqual(
self.object["Other Long Term Liabilities"].number,
"B30")
self.assertEqual(
self.object["Other Long Term Liabilities"].account_type,
AccountType.liability)
self.assertEqual(self.object["Fixed Assets"].number, "B35")
self.assertEqual(
self.object["Fixed Assets"].account_type,
AccountType.asset)
self.assertEqual(self.object["Investments"].number, "B40")
self.assertEqual(
self.object["Investments"].account_type,
AccountType.asset)
self.assertEqual(self.object["Other Fixed Assets"].number, "B45")
self.assertEqual(
self.object["Other Fixed Assets"].account_type,
AccountType.asset)
self.assertEqual(self.object["Inventory"].number, "B50")
self.assertEqual(
self.object["Inventory"].account_type,
AccountType.asset)
self.assertEqual(self.object["Accounts Receivable"].number, "B55")
self.assertEqual(
self.object["Accounts Receivable"].account_type,
AccountType.asset)
self.assertEqual(self.object["Bank"].number, "B60")
self.assertEqual(
self.object["Bank"].account_type,
AccountType.asset)
self.assertEqual(self.object["Bank"]["Default"].number, "0000")
self.assertEqual(
self.object["Bank"]["Default"].account_type,
AccountType.asset)
self.assertEqual(self.object["Other Current Assets"].number, "B65")
self.assertEqual(
self.object["Other Current Assets"].account_type,
AccountType.asset)
self.assertEqual(self.object["Account Payable"].number, "B70")
self.assertEqual(
self.object["Account Payable"].account_type,
AccountType.liability)
self.assertEqual(self.object["Taxation"].number, "B75")
self.assertEqual(
self.object["Taxation"].account_type,
AccountType.liability)
self.assertEqual(
self.object["Other Current Liabilities"].number,
"B80")
self.assertEqual(
self.object["Other Current Liabilities"].account_type,
AccountType.liability)
self.assertEqual(
self.object['Unallocated Income Statement']["Gross Profit"].number,
"0000")
self.assertEqual(
self.object['Unallocated Income Statement']
["Gross Profit"].account_type,
AccountType.revenue)
self.assertEqual(
self.object['Unallocated Income Statement']
["Income Summary"].number,
"0010")
self.assertEqual(
self.object['Unallocated Income Statement']
["Income Summary"].account_type,
AccountType.revenue)
self.assertEqual(
self.object['Retained Income']
["Retained Earnings"].number,
"0000")
self.assertEqual(
self.object['Retained Income']
["Retained Earnings"].account_type,
AccountType.equity)
self.assertEqual(
self.object['Tax']
["Income Tax Expense"].number,
"0000")
self.assertEqual(
self.object['Tax']
["Income Tax Expense"].account_type,
AccountType.expense)
self.assertEqual(self.object.tax_payment_account, "Bank/Default")
def test_get_account(self):
self.object["Retained Income"].create_account(
"TestA",
number="010")
acc = self.object["Bank"].create_account(
"TestB",
number="020")
sub_acc = acc.create_account(
"TestB1",
description="TestB1_Desc",
number="010")
sub_acc.create_account(
"TestB1.1",
description="TestB1.1_Desc",
number="010")
orig = sub_acc.create_account(
"TestB1.2",
description="TestB1.1_Desc",
number="011")
result = self.object.get_account("Bank/TestB/TestB1/TestB1.2")
self.assertEqual(result.name, orig.name)
self.assertEqual(result.description, orig.description)
self.assertEqual(result.number, orig.number)
def test_get_account_decendants(self):
# Set up this test.
self.sales_fish_acc = self.object["Sales"].create_account(
"SalesFish",
description="Sales of Fish",
number="010")
self.sales_barracuda_acc = self.sales_fish_acc.create_account(
"SalesBarracuda",
description="Sales of Barracudas",
number="010")
self.sales_nemo_acc = self.sales_fish_acc.create_account(
"SalesNemo",
description="Sales of Nemos",
number="020")
# perform the test.
result = self.object.get_account_decendants(self.object["Sales"])
self.assertEqual(len(result), 3)
self.assertEqual(result[0], self.sales_fish_acc)
self.assertEqual(result[1], self.sales_barracuda_acc)
self.assertEqual(result[2], self.sales_nemo_acc)
def test_validate_account_name_valid(self):
self.object.validate_account_names(
["Bank/Default", "Retained Income/Retained Earnings"])
def test_validate_account_name_invalid(self):
self.assertRaises(
ValueError,
self.object.validate_account_names,
["invalid_acc_name_a", "invalid_acc_name_b"])
def test_report(self):
report = self.object.report(ReportFormat.string)
line1 = report.split("\n")[0]
self.assertEqual(line1.replace(" ", ""), "TypeNumberNameDescription")
class GeneralLedgerUnitTester(unittest.TestCase):
"""
Tester for the auxi.modelling.financial.des.GeneralLedger class.
"""
def setUp(self):
self.structure = GeneralLedgerStructure("NameA",
description="DescriptionA")
self.structure["Retained Income"].create_account(
"TestA",
description="TestA_Desc",
number="010")
self.object = GeneralLedger("NameA",
self.structure,
description="DescriptionA")
def test_constructor(self):
self.assertEqual(self.object.name, "NameA")
self.assertEqual(self.object.description, "DescriptionA")
self.assertEqual(self.structure, self.object.structure)
def test_create_transaction(self):
new_tx = self.object.create_transaction(
"TestA",
description="TestA_Desc",
tx_date=datetime(2016, 2, 1).date(),
dt_account="Bank",
cr_account="Sales",
source="Peanut Sales",
amount=20.00)
tx_list = self.object.transactions
self.assertEqual(new_tx.name, tx_list[0].name)
self.assertEqual(new_tx.tx_date, tx_list[0].tx_date)
self.assertEqual(new_tx.dt_account, tx_list[0].dt_account)
self.assertEqual(new_tx.cr_account, tx_list[0].cr_account)
self.assertEqual(new_tx.source, tx_list[0].source)
self.assertEqual(new_tx.amount, tx_list[0].amount)
if __name__ == '__main__':
unittest.main()
|
christoffkok/auxi.0
|
src/modelling/financial/des_test.py
|
Python
|
lgpl-3.0
| 15,040
|
from .basic import ProcFile
class MemInfo(ProcFile):
filename = '/proc/meminfo'
def names(self):
return [line.split(':')[0].lower() for line in self._readfile()]
def get(self, name, default=None):
for line in self._readfile():
mem_info = line.split()
if name + ':' == mem_info[0].lower():
return int(mem_info[1])
else:
return default
def __getattr__(self, name):
if name in self.names():
return self.get(name)
else:
raise AttributeError
if __name__ == '__main__':
MEMINFO = MemInfo()
print(MEMINFO.memtotal)
print(MEMINFO.memfree)
|
thuck/proc
|
proc/meminfo.py
|
Python
|
lgpl-3.0
| 684
|
# MIT License
#
# Copyright (c) 2010 Gaetan Guidet
#
# This file is part of gcore.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import _gcore
from _gcore import *
|
gatgui/gcore
|
src/py/gcore.py
|
Python
|
lgpl-3.0
| 1,178
|
import sys
from direct.showbase.ShowBase import ShowBase
from panda3d.core import AmbientLight
from panda3d.core import DirectionalLight
from panda3d.core import Vec3
from panda3d.core import Vec4
from panda3d.core import MeshDrawer
from world import World
from car import Car
from server import Server
class Simulador(ShowBase):
def __init__(self):
ShowBase.__init__(self)
base.setBackgroundColor(0.1, 0.1, 0.8, 1)
base.setFrameRateMeter(True)
base.cam.setPos(3, -30, 4)
base.cam.lookAt(3, 0, 0)
# Light
alight = AmbientLight('ambientLight')
alight.setColor(Vec4(0.5, 0.5, 0.5, 1))
alightNP = render.attachNewNode(alight)
dlight = DirectionalLight('directionalLight')
dlight.setDirection(Vec3(1, 1, -1))
dlight.setColor(Vec4(0.7, 0.7, 0.7, 1))
dlightNP = render.attachNewNode(dlight)
render.clearLight()
render.setLight(alightNP)
render.setLight(dlightNP)
# Input
base.accept('escape', self.doExit)
base.accept('r', self.doReset)
base.accept('f1', self.toggleWireframe)
base.accept('f2', self.toggleTexture)
base.accept('f3', self.toggleDebug)
base.accept('f5', self.doScreenshot)
base.accept('i', self.toggleVerify)
# World, Physics
self.world = None
self.setup()
# Task
taskMgr.add(self.update, 'updateWorld')
# _____HANDLER_____
def toggleVerify(self):
self.smartStop = False if self.smartStop else True
def doExit(self):
self.cleanup()
sys.exit(1)
def doReset(self):
self.cleanup()
self.setup()
def toggleWireframe(self):
base.toggleWireframe()
def toggleTexture(self):
base.toggleTexture()
def toggleDebug(self):
if self.world.debug.isHidden():
self.world.debug.show()
else:
self.world.debug.hide()
def doScreenshot(self):
base.screenshot('Bullet')
def cleanup(self):
worldNP = self.world.node
worldNP.removeNode()
def setup(self):
self.world = World()
# Car
self.vehicle = Car(self.world, (1.5, 50, 1), (180, 0, 0))
self.vehicle2 = Car(self.world, (50, 0, 1), (90, 0, 0))
# Server
server = Server()
cars = [self.vehicle, self.vehicle2]
for car in cars:
taskMgr.add(car.AI.area_prediction, "area prediction")
server.add_client(car.AI)
taskMgr.add(server.verify, "prediction server")
# ____TASK___
def update(self, task):
dt = globalClock.getDt()
self.world.bulletW.doPhysics(dt, 10, 0.008)
self.vehicle.forward()
return task.cont
sim = Simulador()
base.run()
|
leotada/dontcrash
|
sim.py
|
Python
|
lgpl-3.0
| 2,923
|
import unittest
from collections import OrderedDict
from models.yum_repositories import YumRepository, YumRepositories
from libs.model import ModelCollection, Model
class YumRepositoryTest(unittest.TestCase):
def testInit(self):
e = YumRepository(environment="test", data='lol')
for a in ['environment', 'data']:
self.assertTrue(hasattr(e, a))
self.assertEqual('test', e.environment)
self.assertEqual('lol', e.data)
self.assertFalse(hasattr(e, 'derp'))
self.assertIsInstance(e, Model)
class YumRepositoriesTest(unittest.TestCase):
def testInit(self):
yr = YumRepositories()
self.assertIsInstance(yr, ModelCollection)
self.assertGreater(yr._items, 0)
self.assertIsInstance(yr.all(), list)
for i in yr.all():
self.assertIsInstance(i, YumRepository)
def testFirst(self):
yr = YumRepositories()
self.assertEqual(yr.first(), None)
entity = yr.new()
yr.add(entity, persist=False)
self.assertEqual(yr.first(), entity)
def testNew(self):
yr = YumRepositories()
self.assertIsInstance(yr.new(), YumRepository)
e = yr.new(environment="lol")
self.assertEqual(e.environment, 'lol')
self.assertEqual(e.data, None)
def testAdd(self):
yr = YumRepositories()
before_count = len(yr.all())
self.assertTrue(yr.add(yr.new(), persist=False))
after_count = len(yr.all())
self.assertGreater(after_count, before_count)
self.assertEqual(before_count + 1, after_count)
def testDelete(self):
pass
def test_GenerateSelectQuery(self):
yr = YumRepositories()
expected = 'SELECT * FROM yum_repositories'
self.assertEqual(yr._generate_select_query(), expected)
def test_InsertQuery(self):
yr = YumRepositories()
entity = yr.new()
expected = OrderedDict([('environment', None), ('data', None)])
data = yr._generate_query_data(entity)
self.assertEqual(expected, data)
expected = 'INSERT INTO yum_repositories(environment,data) VALUES (?,?)'
self.assertEqual(yr._generate_insert_query(data), expected)
def testTableDefinition(self):
yr = YumRepositories()
expected = 'CREATE TABLE yum_repositories (environment TEXT PRIMARY KEY,data TEXT)'
self.assertEqual(str(yr.table_definition()), expected)
def testDelete(self):
yr = YumRepositories()
entity = yr.new()
expected = 'DELETE FROM yum_repositories WHERE environment = ?'
self.assertEqual(yr._generate_delete_query(entity.environment), expected)
|
masom/Puck
|
server/tests/test_models/test_yum_repositories.py
|
Python
|
lgpl-3.0
| 2,700
|
import sys
import json
import unittest
import packet
class PacketManagerTest(unittest.TestCase):
def setUp(self):
self.manager = PacketMockManager(auth_token="foo")
def test_get_user(self):
user = self.manager.get_user()
self.assertEqual(user.get('full_name'), 'Aaron Welch')
def test_list_facilities(self):
facilities = self.manager.list_facilities()
for facility in facilities:
str(facility)
repr(facility)
self.assertIsInstance(facility, packet.Facility)
def test_list_plans(self):
plans = self.manager.list_plans()
for plan in plans:
str(plan)
repr(plan)
self.assertIsInstance(plan, packet.Plan)
def test_list_operating_systems(self):
oss = self.manager.list_operating_systems()
for os in oss:
str(os)
repr(os)
self.assertIsInstance(os, packet.OperatingSystem)
def test_list_projects(self):
projects = self.manager.list_projects()
self.assertTrue(isinstance(projects, list))
for project in projects:
str(project)
repr(project)
self.assertIsInstance(project, packet.Project)
def test_get_project(self):
project = self.manager.get_project('438659f0')
self.assertIsInstance(project, packet.Project)
def test_create_project(self):
project = self.manager.create_project('test project')
self.assertIsInstance(project, packet.Project)
def test_update_project(self):
name = 'updated name'
project = self.manager.get_project('438659f0')
project.name = name
project.update()
self.assertEqual(project.name, name)
self.assertIsInstance(project, packet.Project)
def test_delete_project(self):
project = self.manager.get_project('438659f0')
self.assertTrue(project.delete())
def test_list_devices(self):
devices = self.manager.list_devices('438659f0')
for device in devices:
str(device)
repr(device)
self.assertIsInstance(device, packet.Device)
def test_create_device(self):
device = self.manager.create_device('438659f0', 'hostname', 'baremetal_0', 'ewr1', 'ubuntu_14_04')
self.assertIsInstance(device, packet.Device)
def test_get_device(self):
device = self.manager.get_device('9dec7266')
self.assertIsInstance(device, packet.Device)
def test_device_actions(self):
device = self.manager.get_device('9dec7266')
self.assertTrue(device.power_off() is None)
self.assertTrue(device.power_on() is None)
self.assertTrue(device.reboot() is None)
def test_update_device(self):
hostname = 'updated hostname'
device = self.manager.get_device('9dec7266')
device.hostname = hostname
device.update()
self.assertEqual(device.hostname, hostname)
self.assertIsInstance(device, packet.Device)
def test_delete_device(self):
device = self.manager.get_device('9dec7266')
self.assertTrue(device.delete())
def test_list_ssh_keys(self):
keys = self.manager.list_ssh_keys()
for key in keys:
str(key)
repr(key)
self.assertIsInstance(key, packet.SSHKey)
def test_get_ssh_key(self):
key = self.manager.get_ssh_key('084a5dec')
self.assertIsInstance(key, packet.SSHKey)
def test_create_ssh_key(self):
public_key = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDI4pIqzpb5g3992h+yr527VRcaB68KE4vPjWPPoiQws49KIs2NMcOzS9QE4\
641uW1u5ML2HgQdfYKMF/YFGnI1Y6xV637DjhDyZYV9LasUH49npSSJjsBcsk9JGfUpNAOdcgpFzK8V90eiOrOC5YncxdwwG8pwjFI9nNVPCl4hYEu1iXdy\
ysHvkFfS2fklsNjLWrzfafPlaen+qcBxygCA0sFdW/7er50aJeghdBHnE2WhIKLUkJxnKadznfAge7oEe+3LLAPfP+3yHyvp2+H0IzmVfYvAjnzliYetqQ8\
pg5ZW2BiJzvqz5PebGS70y/ySCNW1qQmJURK/Wc1bt9en"
key = self.manager.create_ssh_key(label="sshkey-name", public_key=public_key)
self.assertIsInstance(key, packet.SSHKey)
self.assertEquals(key.key, public_key)
def test_delete_ssh_key(self):
key = self.manager.get_ssh_key('084a5dec')
self.assertTrue(key.delete())
def test_update_ssh_key(self):
label = 'updated label'
key = self.manager.get_ssh_key('084a5dec')
key.label = label
key.update()
self.assertEqual(key.label, label)
self.assertIsInstance(key, packet.SSHKey)
def test_list_volumes(self):
volumes = self.manager.list_volumes('438659f0')
for volume in volumes:
self.assertIsInstance(volume, packet.Volume)
def test_create_volume(self):
volume = self.manager.create_volume('438659f0', 'volume description', 'storage_0', '100', 'ewr1', 7, '1day')
self.assertIsInstance(volume, packet.Volume)
def test_get_volume(self):
volume = self.manager.get_volume('f9a8a263')
str(volume)
repr(volume)
self.assertIsInstance(volume, packet.Volume)
def test_update_volume(self):
description = 'updated description'
volume = self.manager.get_volume('f9a8a263')
volume.description = description
volume.update()
self.assertEqual(volume.description, description)
self.assertIsInstance(volume, packet.Volume)
def test_delete_volume(self):
volume = self.manager.get_volume('f9a8a263')
self.assertTrue(volume.delete())
def test_list_volume_snapshots(self):
volume = self.manager.get_volume('f9a8a263')
snaps = volume.list_snapshots()
for snap in snaps:
str(snap)
repr(snap)
snap.delete()
def test_attach_volume(self):
volume = self.manager.get_volume('f9a8a263')
self.assertTrue(volume.attach('9dec7266') is None)
def test_detach_volume(self):
volume = self.manager.get_volume('f9a8a263')
self.assertTrue(volume.detach())
def test_volume_create_snapshot(self):
volume = self.manager.get_volume('f9a8a263')
volume.create_snapshot()
def test_list_capacity(self):
capacities = self.manager.list_capacity()
for capacity in capacities:
str(capacity)
repr(capacity)
print capacity
print packet.Capacity
self.assertIsInstance(capacity, packet.Capacity)
def test_check_capacity(self):
device = self.manager.check_capacity('ewr1', 'baremetal_0', '7')
print device
print packet.Capacity
self.assertIsInstance(device, packet.Capacity)
class PacketMockManager(packet.Manager):
def call_api(self, method, type='GET', params=None):
if type == 'DELETE':
return True
else:
fixture = '%s_%s' % (type.lower(), method.lower())
with open('fixtures/%s.json' % (fixture.replace('/', '_').split("?")[0])) as data_file:
return json.load(data_file)
if __name__ == '__main__':
sys.exit(unittest.main())
|
disha94/packet-python
|
test/test_packet.py
|
Python
|
lgpl-3.0
| 7,104
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright (C) 2013-2019 British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ----------------------------------------------------------------------------
"""This tests the ISO 8601 parsing and data model functionality."""
import datetime
import pytest
import random
import unittest
import concurrent.futures
from metomi.isodatetime.data import TimePoint, Duration, get_days_since_1_ad
def daterange(start_date, end_date):
"""https://stackoverflow.com/a/1060330"""
for n in range(1 + int((end_date - start_date).days)):
yield start_date + datetime.timedelta(n)
test_duration_attributes = [
("weeks", 110),
("days", 770),
("hours", 770 * 24),
("minutes", 770 * 24 * 60),
("seconds", 770 * 24 * 60 * 60)
]
@pytest.mark.slow
class TestTimePointCompat(unittest.TestCase):
"""Test time point compatibility with "datetime"."""
def test_timepoint(self):
"""Test the time point data model (takes a while).
For a range of years (e.g. 1801 to 2403) it iterates through each
year, then creates another range with the days in this year. Finally
performs a series of tests, failing if any operation results in
an error."""
for test_year in range(1801, 2403):
my_date = datetime.datetime(test_year, 1, 1)
stop_date = datetime.datetime(test_year + 1, 1, 1)
# test each day in the year concurrently
# using the number of cores in a travis ci server for max_workers
with concurrent.futures.ThreadPoolExecutor(max_workers=2)\
as executor:
futures = {executor.submit(self._do_test_dates, d):
d for d in daterange(my_date, stop_date)}
concurrent.futures.wait(futures)
# Each day takes approx 0.5s to compute, so let's give
# it four times the normal as buffer
for _, future in enumerate(
concurrent.futures.as_completed(futures, timeout=2.0)):
future.result() # This will also raise any exceptions
def _do_test_dates(self, my_date):
"""Performs a series of tests against a given date.
This method does some time consuming operations, which are not IO
bound, so this method is a good candidate to be run concurrently.
:param my_date: a date to be tested
:type my_date: datetime.datetime
"""
ctrl_data = my_date.isocalendar()
test_date = TimePoint(
year=my_date.year,
month_of_year=my_date.month,
day_of_month=my_date.day
)
test_week_date = test_date.to_week_date()
test_data = test_week_date.get_week_date()
self.assertEqual(test_data, ctrl_data)
ctrl_data = (my_date.year, my_date.month, my_date.day)
test_data = test_week_date.get_calendar_date()
self.assertEqual(test_data, ctrl_data)
ctrl_data = my_date.toordinal()
year, day_of_year = test_date.get_ordinal_date()
test_data = day_of_year
test_data += get_days_since_1_ad(year - 1)
self.assertEqual(test_data, ctrl_data)
for attribute, attr_max in test_duration_attributes:
kwargs = {attribute: random.randrange(0, attr_max)}
ctrl_data = my_date + datetime.timedelta(**kwargs)
ctrl_data = ctrl_data.year, ctrl_data.month, ctrl_data.day
test_data = (
(test_date + Duration(**kwargs)).get_calendar_date())
self.assertEqual(test_data, ctrl_data)
ctrl_data = my_date - datetime.timedelta(**kwargs)
ctrl_data = ctrl_data.year, ctrl_data.month, ctrl_data.day
# TBD: the subtraction is quite slow. Much slower than other
# operations. Could be related to the fact it converts the value
# in kwargs to negative multiplying by -1 (i.e. from __sub__ to
# __mul__), and also adds it to the date (i.e. __add__).
# Profiling the tests, the __sub__ operation used in the next
# line will appear amongst the top of time consuming operations.
test_data = (
(test_date - Duration(**kwargs)).get_calendar_date())
self.assertEqual(test_data, ctrl_data)
kwargs = {}
for attribute, attr_max in test_duration_attributes:
kwargs[attribute] = random.randrange(0, attr_max)
test_date_minus = test_date - Duration(**kwargs)
test_data = test_date - test_date_minus
ctrl_data = Duration(**kwargs)
self.assertEqual(test_data, ctrl_data)
test_data = test_date_minus + (test_date - test_date_minus)
ctrl_data = test_date
self.assertEqual(test_data, ctrl_data)
test_data = (test_date_minus + Duration(**kwargs))
ctrl_data = test_date
self.assertEqual(test_data, ctrl_data)
ctrl_data = (
my_date +
datetime.timedelta(minutes=450) +
datetime.timedelta(hours=5) -
datetime.timedelta(seconds=500, weeks=5))
ctrl_data = [
(ctrl_data.year, ctrl_data.month, ctrl_data.day),
(ctrl_data.hour, ctrl_data.minute, ctrl_data.second)]
test_data = (
test_date + Duration(minutes=450) +
Duration(hours=5) -
Duration(weeks=5, seconds=500)
)
test_data = [
test_data.get_calendar_date(),
test_data.get_hour_minute_second()]
self.assertEqual(test_data, ctrl_data)
if __name__ == '__main__':
unittest.main()
|
matthewrmshin/isodatetime
|
metomi/isodatetime/tests/test_time_point.py
|
Python
|
lgpl-3.0
| 6,468
|
import re
from collections import defaultdict
import pdb
class Computer(object):
DIGIT = re.compile(r"\d+")
def __init__(self):
self.registers = defaultdict(lambda: 0)
def run(self, instructions):
self.cached = []
for instruction in instructions:
split = re.split('\s+', instruction.strip())
command_name, command_args = split[0], split[1:]
command = getattr(self, command_name)
command_args = map(lambda a: (int(a) if self.DIGIT.match(a) else a), command_args)
self.cached.append((command, command_args))
self.cached = tuple(self.cached)
self.i = 0
inst_count = 0
while self.i < len(instructions):
inst, args = self.cached[self.i]
inst(*args)
inst_count += 1
# if inst_count % 10000 == 0:
# print inst_count
# print inst_count
def resolve(self, register_or_value):
if isinstance(register_or_value, int):
return register_or_value
else:
return self.registers[register_or_value]
def cpy(self, x, y):
self.registers[y] = self.resolve(x)
self.i += 1
def inc(self, x):
self.registers[x] += 1
self.i += 1
def dec(self, x):
self.registers[x] -= 1
self.i += 1
def jnz(self, x, y):
if self.resolve(x) == 0:
self.i += 1
else:
self.i += int(y)
if __name__ == "__main__":
instructions = open('day12/input.txt').readlines()
computer = Computer()
#computer.registers['c'] = 1
computer.run(instructions)
print computer.registers
|
chrisb87/advent_of_code_2016
|
day12/day12.py
|
Python
|
unlicense
| 1,422
|
#!/usr/bin/env pyhton
import sqlite3
def main():
with sqlite3.connect("new.db") as connection:
c = connection.cursor()
sql = {
'average': "select avg(population) from population;",
'maximum': "select max(population) from population;",
'minimum': "select min(population) from population;",
'sum': "select sum(population) from population;",
'count': "select count(city) from population;"
}
for keys, values in sql.iteritems():
c.execute(values)
result = c.fetchone()
print("%s: %s" % (keys, result))
if __name__ == "__main__":
main()
|
alekscl/tripping-dangerzone
|
sql/sqlj.py
|
Python
|
unlicense
| 674
|
import os
import json
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
with open(os.path.join(__location__, 'para_whitelist.json')) as data_file:
whitelist = json.load(data_file)
|
lukaasp/libs
|
aws_xray_sdk/ext/botocore/resources.py
|
Python
|
unlicense
| 228
|
# coding: u8
import datetime
from hashlib import md5 as m5
import traceback
now = datetime.datetime.now
now_str = lambda: dt2str(now())
yesterday = lambda: datetime.date.today() - datetime.timedelta(days=1)
yesterday_str = lambda: yesterday().strftime('%Y-%m-%d')
tomorrow = lambda: datetime.date.today() + datetime.timedelta(days=1)
tomorrow_str = lambda: tomorrow().strftime('%Y-%m-%d')
str2dt = lambda s: datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S')
dt2str = lambda dt: dt.strftime('%Y-%m-%d %H:%M:%S')
md5 = lambda s: m5(s).hexdigest()
def async(max_workers=10, debug=False):
from concurrent.futures import ThreadPoolExecutor
from functools import partial, wraps
import tornado.ioloop
import tornado.web
EXECUTOR = ThreadPoolExecutor(max_workers=max_workers)
def unblock(f):
@tornado.web.asynchronous
@wraps(f)
def wrapper(*args, **kwargs):
self = args[0]
def callback(future):
try:
self.write(future.result() or '')
except:
if debug:
try:
self.write('<pre>%s</pre>' % traceback.format_exc())
except:
pass
finally:
self.try_finish()
EXECUTOR.submit(
partial(f, *args, **kwargs)
).add_done_callback(
lambda future: tornado.ioloop.IOLoop.instance().add_callback(
partial(callback, future)))
return wrapper
return unblock
|
Shu-Ji/multi-supervisord-web-admin
|
src/utils/__init__.py
|
Python
|
unlicense
| 1,611
|
from itertools import takewhile
codontable = {
'ATA':'I', 'ATC':'I', 'ATT':'I', 'ATG':'M',
'ACA':'T', 'ACC':'T', 'ACG':'T', 'ACT':'T',
'AAC':'N', 'AAT':'N', 'AAA':'K', 'AAG':'K',
'AGC':'S', 'AGT':'S', 'AGA':'R', 'AGG':'R',
'CTA':'L', 'CTC':'L', 'CTG':'L', 'CTT':'L',
'CCA':'P', 'CCC':'P', 'CCG':'P', 'CCT':'P',
'CAC':'H', 'CAT':'H', 'CAA':'Q', 'CAG':'Q',
'CGA':'R', 'CGC':'R', 'CGG':'R', 'CGT':'R',
'GTA':'V', 'GTC':'V', 'GTG':'V', 'GTT':'V',
'GCA':'A', 'GCC':'A', 'GCG':'A', 'GCT':'A',
'GAC':'D', 'GAT':'D', 'GAA':'E', 'GAG':'E',
'GGA':'G', 'GGC':'G', 'GGG':'G', 'GGT':'G',
'TCA':'S', 'TCC':'S', 'TCG':'S', 'TCT':'S',
'TTC':'F', 'TTT':'F', 'TTA':'L', 'TTG':'L',
'TAC':'Y', 'TAT':'Y', 'TAA':'_', 'TAG':'_',
'TGC':'C', 'TGT':'C', 'TGA':'_', 'TGG':'W',
}
def compl(args):
output = ''
for c in args:
if (c == 'A'):
output += 'T'
elif (c == 'T'):
output += 'A'
elif (c == 'C'):
output += 'G'
else:
output += 'C'
return output
def translate_dna(sequence, stop_codons = ('TAA', 'TGA', 'TAG')):
start = sequence.find('ATG')
# Take sequence from the first start codon
trimmed_sequence = sequence[start:]
# Split it into triplets
codons = [trimmed_sequence[i:i+3] for i in range(0, len(trimmed_sequence), 3)]
found = False
for c in codons:
if (c in stop_codons):
found = True
if(False == found):
return '', start
# Take all codons until first stop codon
coding_sequence = takewhile(lambda x: x not in stop_codons and len(x) == 3 , codons)
# Translate and join into string
protein_sequence = ''.join([codontable[codon] for codon in coding_sequence])
# This line assumes there is always stop codon in the sequence
return "{0}".format(protein_sequence), start
class Fasta:
def __init__(self, name, sequence):
self.name = name
self.sequence = sequence
def read_fasta(text):
items = []
index = 0
aninstance = None
seq = ''
name = ''
for line in text.splitlines():
if line.startswith(">"):
if index >= 1:
items.append(aninstance)
index += 1
name = line[1:]
seq = ''
aninstance = Fasta(name, seq)
else:
seq += line
aninstance = Fasta(name, seq)
items.append(aninstance)
return items
if __name__ == '__main__':
sequence = '''
>Rosalind_6173
TTCTAAGGTCCCTCGTAAAGCAACAGGCCCGCAAACGGCGAACGCCCTAGGCCACCCTGA
AACGCTAACACCGACAACAATGTGCAGCCCGCGCCGGATCGCAAGCGTTGATTACACCAC
ACCGCGAAGCGATAAAAAAAGATGTTTAGCTCCCCCATCGGGCTCCTTGTGATCCGCGAC
GGACGCTTCTAGAGACCACCTTAGGCCAACTCAACCGGCGCGACCGCACCCGTTACTCCA
GTGTACTCTACAAATTCAAGTAGTTGTAAAGGGGGCATAAAAAGCGCAGTATCGCTCTGC
CGGTTGCACCTGCGATTGGCAAACGAGGCTGGCCCCGTGGATCGGTATTCCAAACTATGC
GTTTTGGGGCGGGCTCAACTCGGGAAAGACTTAGATAAGTTCTAGCGTTTATCGAGGGGG
CTCATGCTCTAACGTAGACATTAGCTAATGTCTACGTTAGAGCATCCTGACCACCATCCT
ACCGAGAGTAAGTATGGAACGTACACGCAGCTATGGATGGCATACCACTTAGTCCGGGAG
CTTTTTGGCTGATGCTTCGGCGGAGAGAGGTTGGGGAACGCGAATGAAACGCACTCTGTC
TGTGCAAAAAAAAGCAAACCGATCTGGAACCCGACGGATAGTAAAGGGTCGGGGGGTCGC
TCAGACCGCCTGCTAGGGTGTCATGCTCTACCATATGCCGCCGGGCTTTACGGAACCTGT
GCGTAGCGCAGCATGACGTTCAGTCGCAAGCCGTCCTTGTTACAAGTAGACAGCCCAACA
GGTAGCGTTAATCCTTTCTTCTTACATTTTACTAATAGGCTCCGTAGTATTCCTATGGTC
GTGCCCACATGTCCCACAATCATGTTGATGCATAGGACAGCTGGGATC
'''
proteins = list()
for i in read_fasta(sequence):
seq = i.sequence
while(True):
out, start = translate_dna(seq)
if (out != '' and out not in proteins):
proteins.append(out)
if (start == -1):
break;
seq = seq[start+3:len(seq)]
#other frames
seq = i.sequence
seq = compl(seq)
seq = seq[::-1]
while(True):
out, start = translate_dna(seq)
if (out != '' and out not in proteins):
proteins.append(out)
if (start == -1):
break;
seq = seq[start+3:len(seq)]
for p in proteins:
print p
|
kinow/rosalind-exercises
|
src/orf.py
|
Python
|
unlicense
| 4,182
|
# coding: utf-8
"""
Grafeas API
An API to insert and retrieve annotations on cloud artifacts. # noqa: E501
OpenAPI spec version: v1alpha1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import grafeas
from grafeas.models.api_source import ApiSource # noqa: E501
from grafeas.rest import ApiException
class TestApiSource(unittest.TestCase):
"""ApiSource unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testApiSource(self):
"""Test ApiSource"""
# FIXME: construct object with mandatory attributes with example values
# model = grafeas.models.api_source.ApiSource() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
grafeas/client-python
|
test/test_api_source.py
|
Python
|
apache-2.0
| 828
|
#!/usr/bin/env python
# Copyright 2017 The LibYuv Project Authors. All rights reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import glob
import os
import shutil
import sys
import tempfile
import unittest
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARENT_DIR = os.path.join(SCRIPT_DIR, os.pardir)
sys.path.append(PARENT_DIR)
import roll_deps
from roll_deps import CalculateChangedDeps, GetMatchingDepsEntries, \
ParseDepsDict, ParseLocalDepsFile, UpdateDepsFile
TEST_DATA_VARS = {
'chromium_git': 'https://chromium.googlesource.com',
'chromium_revision': '1b9c098a08e40114e44b6c1ec33ddf95c40b901d',
}
DEPS_ENTRIES = {
'src/build': 'https://build.com',
'src/buildtools': 'https://buildtools.com',
'src/testing/gtest': 'https://gtest.com',
'src/testing/gmock': 'https://gmock.com',
}
BUILD_OLD_REV = '52f7afeca991d96d68cf0507e20dbdd5b845691f'
BUILD_NEW_REV = 'HEAD'
BUILDTOOLS_OLD_REV = '64e38f0cebdde27aa0cfb405f330063582f9ac76'
BUILDTOOLS_NEW_REV = '55ad626b08ef971fd82a62b7abb325359542952b'
class TestError(Exception):
pass
class FakeCmd(object):
def __init__(self):
self.expectations = []
def add_expectation(self, *args, **kwargs):
returns = kwargs.pop('_returns', None)
self.expectations.append((args, kwargs, returns))
def __call__(self, *args, **kwargs):
if not self.expectations:
raise TestError('Got unexpected\n%s\n%s' % (args, kwargs))
exp_args, exp_kwargs, exp_returns = self.expectations.pop(0)
if args != exp_args or kwargs != exp_kwargs:
message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args, exp_kwargs)
message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs)
raise TestError(message)
return exp_returns
class TestRollChromiumRevision(unittest.TestCase):
def setUp(self):
self._output_dir = tempfile.mkdtemp()
for test_file in glob.glob(os.path.join(SCRIPT_DIR, 'testdata', '*')):
shutil.copy(test_file, self._output_dir)
self._libyuv_depsfile = os.path.join(self._output_dir, 'DEPS')
self._old_cr_depsfile = os.path.join(self._output_dir, 'DEPS.chromium.old')
self._new_cr_depsfile = os.path.join(self._output_dir, 'DEPS.chromium.new')
self.fake = FakeCmd()
self.old_RunCommand = getattr(roll_deps, '_RunCommand')
setattr(roll_deps, '_RunCommand', self.fake)
def tearDown(self):
shutil.rmtree(self._output_dir, ignore_errors=True)
self.assertEqual(self.fake.expectations, [])
setattr(roll_deps, '_RunCommand', self.old_RunCommand)
def testUpdateDepsFile(self):
new_rev = 'aaaaabbbbbcccccdddddeeeeefffff0000011111'
current_rev = TEST_DATA_VARS['chromium_revision']
UpdateDepsFile(self._libyuv_depsfile, current_rev, new_rev, [])
with open(self._libyuv_depsfile) as deps_file:
deps_contents = deps_file.read()
self.assertTrue(new_rev in deps_contents,
'Failed to find %s in\n%s' % (new_rev, deps_contents))
def testParseDepsDict(self):
with open(self._libyuv_depsfile) as deps_file:
deps_contents = deps_file.read()
local_scope = ParseDepsDict(deps_contents)
vars_dict = local_scope['vars']
def assertVar(variable_name):
self.assertEquals(vars_dict[variable_name], TEST_DATA_VARS[variable_name])
assertVar('chromium_git')
assertVar('chromium_revision')
self.assertEquals(len(local_scope['deps']), 3)
def testGetMatchingDepsEntriesReturnsPathInSimpleCase(self):
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing/gtest')
self.assertEquals(len(entries), 1)
self.assertEquals(entries[0], DEPS_ENTRIES['src/testing/gtest'])
def testGetMatchingDepsEntriesHandlesSimilarStartingPaths(self):
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing')
self.assertEquals(len(entries), 2)
def testGetMatchingDepsEntriesHandlesTwoPathsWithIdenticalFirstParts(self):
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/build')
self.assertEquals(len(entries), 1)
self.assertEquals(entries[0], DEPS_ENTRIES['src/build'])
def testCalculateChangedDeps(self):
_SetupGitLsRemoteCall(self.fake,
'https://chromium.googlesource.com/chromium/src/build', BUILD_NEW_REV)
libyuv_deps = ParseLocalDepsFile(self._libyuv_depsfile)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile)
changed_deps = CalculateChangedDeps(libyuv_deps, new_cr_deps)
self.assertEquals(len(changed_deps), 2)
self.assertEquals(changed_deps[0].path, 'src/build')
self.assertEquals(changed_deps[0].current_rev, BUILD_OLD_REV)
self.assertEquals(changed_deps[0].new_rev, BUILD_NEW_REV)
self.assertEquals(changed_deps[1].path, 'src/buildtools')
self.assertEquals(changed_deps[1].current_rev, BUILDTOOLS_OLD_REV)
self.assertEquals(changed_deps[1].new_rev, BUILDTOOLS_NEW_REV)
def _SetupGitLsRemoteCall(cmd_fake, url, revision):
cmd = ['git', 'ls-remote', url, revision]
cmd_fake.add_expectation(cmd, _returns=(revision, None))
if __name__ == '__main__':
unittest.main()
|
helloworldgnu/MultiMedia
|
libyuv/tools_libyuv/autoroller/unittests/roll_deps_test.py
|
Python
|
apache-2.0
| 5,304
|
# Copyright (c) 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import os
import os.path
from unittest import TestCase, mock
from urllib import parse
import pytest
import requests
import requests_unixsocket
from pylxd import client, exceptions
class TestClient(TestCase):
"""Tests for pylxd.client.Client."""
def setUp(self):
self.get_patcher = mock.patch("pylxd.client._APINode.get")
self.get = self.get_patcher.start()
self.post_patcher = mock.patch("pylxd.client._APINode.post")
self.post = self.post_patcher.start()
response = mock.MagicMock(status_code=200)
response.json.return_value = {
"metadata": {
"auth": "trusted",
"environment": {"storage": "zfs"},
}
}
self.get.return_value = response
post_response = mock.MagicMock(status_code=200)
self.post.return_value = post_response
def tearDown(self):
self.get_patcher.stop()
self.post_patcher.stop()
@mock.patch("os.path.exists")
def test_create(self, _path_exists):
"""Client creation sets default API endpoint."""
_path_exists.return_value = False
expected = "http+unix://%2Fvar%2Flib%2Flxd%2Funix.socket/1.0"
an_client = client.Client()
self.assertEqual(expected, an_client.api._api_endpoint)
@mock.patch("os.path.exists")
@mock.patch("os.environ")
def test_create_with_snap_lxd(self, _environ, _path_exists):
# """Client creation sets default API endpoint."""
_path_exists.return_value = True
expected = "http+unix://%2Fvar%2Fsnap%2Flxd%2F" "common%2Flxd%2Funix.socket/1.0"
an_client = client.Client()
self.assertEqual(expected, an_client.api._api_endpoint)
def test_create_LXD_DIR(self):
"""When LXD_DIR is set, use it in the client."""
os.environ["LXD_DIR"] = "/lxd"
expected = "http+unix://%2Flxd%2Funix.socket/1.0"
an_client = client.Client()
self.assertEqual(expected, an_client.api._api_endpoint)
def test_create_endpoint(self):
"""Explicitly set the client endpoint."""
endpoint = "http://lxd"
expected = "http://lxd/1.0"
an_client = client.Client(endpoint=endpoint)
self.assertEqual(expected, an_client.api._api_endpoint)
def test_create_endpoint_with_project(self):
"""Explicitly set the client endpoint."""
response = mock.MagicMock(status_code=200)
response.json.side_effect = [
{
"metadata": {
"auth": "untrusted",
"api_extensions": ["projects"],
}
},
]
self.get.return_value = response
endpoint = "/tmp/unix.socket"
expected = "http+unix://%2Ftmp%2Funix.socket/1.0"
with mock.patch("os.path.exists") as mock_exists:
mock_exists.return_value = True
a_client = client.Client(endpoint=endpoint, project="prj")
self.assertEqual(a_client.api._api_endpoint, expected)
self.assertEqual(a_client.api._project, "prj")
def test_create_endpoint_unixsocket(self):
"""Test with unix socket endpoint."""
endpoint = "/tmp/unix.socket"
expected = "http+unix://%2Ftmp%2Funix.socket/1.0"
with mock.patch("os.path.exists") as mock_exists:
mock_exists.return_value = True
a_client = client.Client(endpoint)
self.assertEqual(expected, a_client.api._api_endpoint)
def test_connection_404(self):
"""If the endpoint 404s, an exception is raised."""
response = mock.MagicMock(status_code=404)
self.get.return_value = response
self.assertRaises(exceptions.ClientConnectionFailed, client.Client)
def test_connection_failed(self):
"""If the connection fails, an exception is raised."""
def raise_exception():
raise requests.exceptions.ConnectionError()
self.get.side_effect = raise_exception
self.get.return_value = None
self.assertRaises(exceptions.ClientConnectionFailed, client.Client)
def test_connection_untrusted(self):
"""Client.trusted is False when certs are untrusted."""
response = mock.MagicMock(status_code=200)
response.json.return_value = {"metadata": {"auth": "untrusted"}}
self.get.return_value = response
an_client = client.Client()
self.assertFalse(an_client.trusted)
def test_connection_trusted(self):
"""Client.trusted is True when certs are untrusted."""
response = mock.MagicMock(status_code=200)
response.json.return_value = {"metadata": {"auth": "trusted"}}
self.get.return_value = response
an_client = client.Client()
self.assertTrue(an_client.trusted)
def test_server_clustered_false_no_info(self):
"""Client.server_clustered is False if the info is not available in metadata."""
response = mock.MagicMock(status_code=200)
response.json.return_value = {"metadata": {"environment": {}}}
self.get.return_value = response
a_client = client.Client()
self.assertFalse(a_client.server_clustered)
def test_server_clustered_false(self):
"""Client.server_clustered is False if not clustered."""
response = mock.MagicMock(status_code=200)
response.json.return_value = {
"metadata": {"environment": {"server_clustered": False}}
}
self.get.return_value = response
a_client = client.Client()
self.assertFalse(a_client.server_clustered)
def test_server_clustered_true(self):
"""Client.server_clustered is True if clustered."""
response = mock.MagicMock(status_code=200)
response.json.return_value = {
"metadata": {"environment": {"server_clustered": True}}
}
self.get.return_value = response
a_client = client.Client()
self.assertTrue(a_client.server_clustered)
def test_authenticate(self):
"""A client is authenticated."""
response = mock.MagicMock(status_code=200)
response.json.side_effect = [
{"metadata": {"auth": "untrusted"}},
{
"metadata": {
"type": "client",
"fingerprint": "eaf55b72fc23aa516d709271df9b0116064bf8cfa009cf34c67c33ad32c2320c",
}
},
{"metadata": {"auth": "trusted"}},
]
self.get.return_value = response
certs = (
os.path.join(os.path.dirname(__file__), "lxd.crt"),
os.path.join(os.path.dirname(__file__), "lxd.key"),
)
an_client = client.Client("https://lxd", cert=certs)
an_client.authenticate("test-password")
self.assertTrue(an_client.trusted)
def test_authenticate_with_project(self):
"""A client is authenticated with a project."""
response = mock.MagicMock(status_code=200)
response.json.side_effect = [
{
"metadata": {
"auth": "untrusted",
"api_extensions": ["projects"],
}
},
{
"metadata": {
"type": "client",
"fingerprint": "eaf55b72fc23aa516d709271df9b0116064bf8cfa009cf34c67c33ad32c2320c",
}
},
{
"metadata": {
"auth": "trusted",
"environment": {"project": "test-proj"},
}
},
]
self.get.return_value = response
certs = (
os.path.join(os.path.dirname(__file__), "lxd.crt"),
os.path.join(os.path.dirname(__file__), "lxd.key"),
)
an_client = client.Client("https://lxd", cert=certs, project="test-proj")
an_client.authenticate("test-password")
self.assertTrue(an_client.trusted)
self.assertEqual(an_client.host_info["environment"]["project"], "test-proj")
def test_authenticate_project_not_supported(self):
"""A client raises an error if projects are not supported."""
response = mock.MagicMock(status_code=200)
response.json.return_value = {
"metadata": {
"auth": "untrusted",
"api_extensions": [],
}
}
self.get.return_value = response
with pytest.raises(exceptions.ClientConnectionFailed):
client.Client("https://lxd", project="test-proj")
def test_authenticate_project_not_supported_but_default(self):
"""
A client doesn't raise an error if projects are not supported and the
default one is requested.
"""
response = mock.MagicMock(status_code=200)
response.json.side_effect = [
{
"metadata": {
"auth": "untrusted",
"api_extensions": [],
}
},
{
"metadata": {
"type": "client",
"fingerprint": "eaf55b72fc23aa516d709271df9b0116064bf8cfa009cf34c67c33ad32c2320c",
}
},
{
"metadata": {
"auth": "trusted",
"environment": {},
}
},
]
self.get.return_value = response
certs = (
os.path.join(os.path.dirname(__file__), "lxd.crt"),
os.path.join(os.path.dirname(__file__), "lxd.key"),
)
an_client = client.Client("https://lxd", cert=certs, project="default")
an_client.authenticate("test-password")
self.assertTrue(an_client.trusted)
def test_authenticate_already_authenticated(self):
"""If the client is already authenticated, nothing happens."""
an_client = client.Client()
an_client.authenticate("test-password")
self.assertTrue(an_client.trusted)
def test_host_info(self):
"""Perform a host query."""
an_client = client.Client()
self.assertEqual("zfs", an_client.host_info["environment"]["storage"])
def test_events(self):
"""The default websocket client is returned."""
an_client = client.Client()
ws_client = an_client.events()
self.assertEqual("/1.0/events", ws_client.resource)
def test_events_unix_socket(self):
"""A unix socket compatible websocket client is returned."""
websocket_client = mock.Mock(resource=None)
WebsocketClient = mock.Mock()
WebsocketClient.return_value = websocket_client
os.environ["LXD_DIR"] = "/lxd"
an_client = client.Client()
an_client.events(websocket_client=WebsocketClient)
WebsocketClient.assert_called_once_with(
"ws+unix:///lxd/unix.socket", ssl_options=None
)
def test_events_htt(self):
"""An http compatible websocket client is returned."""
websocket_client = mock.Mock(resource=None)
WebsocketClient = mock.Mock()
WebsocketClient.return_value = websocket_client
an_client = client.Client("http://lxd.local")
an_client.events(websocket_client=WebsocketClient)
WebsocketClient.assert_called_once_with("ws://lxd.local", ssl_options=None)
def test_events_https(self):
"""An https compatible websocket client is returned."""
websocket_client = mock.Mock(resource=None)
WebsocketClient = mock.Mock()
WebsocketClient.return_value = websocket_client
an_client = client.Client("https://lxd.local", cert=client.DEFAULT_CERTS)
an_client.events(websocket_client=WebsocketClient)
ssl_options = {
"certfile": client.DEFAULT_CERTS.cert,
"keyfile": client.DEFAULT_CERTS.key,
}
WebsocketClient.assert_called_once_with(
"wss://lxd.local", ssl_options=ssl_options
)
def test_events_type_filter(self):
"""The websocket client can filter events by type."""
an_client = client.Client()
# from the itertools recipes documentation
def powerset(types):
from itertools import chain, combinations
pwset = [combinations(types, r) for r in range(len(types) + 1)]
return chain.from_iterable(pwset)
event_path = "/1.0/events"
for types in powerset(client.EventType):
ws_client = an_client.events(event_types=set(types))
actual_resource = parse.urlparse(ws_client.resource)
expect_resource = parse.urlparse(event_path)
if types and client.EventType.All not in types:
type_csl = ",".join([t.value for t in types])
query = parse.parse_qs(expect_resource.query)
query.update({"type": type_csl})
qs = parse.urlencode(query)
expect_resource = expect_resource._replace(query=qs)
self.assertEqual(expect_resource.path, actual_resource.path)
if types and client.EventType.All not in types:
qdict = parse.parse_qs(expect_resource.query)
expect_types = set(qdict["type"][0].split(","))
qdict = parse.parse_qs(actual_resource.query)
actual_types = set(qdict["type"][0].split(","))
self.assertEqual(expect_types, actual_types)
else:
self.assertEqual(expect_resource.query, actual_resource.query)
def test_resources(self):
a_client = client.Client()
a_client.host_info["api_extensions"] = ["resources"]
response = mock.MagicMock(status_code=200)
response.json.return_value = {
"metadata": {
"cpu": {},
}
}
self.get.return_value = response
self.assertIn("cpu", a_client.resources)
def test_resources_raises_conn_failed_exception(self):
a_client = client.Client()
a_client.host_info["api_extensions"] = ["resources"]
response = mock.MagicMock(status_code=400)
self.get.return_value = response
with self.assertRaises(exceptions.ClientConnectionFailed):
a_client.resources
def test_resources_raises_api_extension_not_avail_exception(self):
a_client = client.Client()
a_client.host_info["api_extensions"] = []
with self.assertRaises(exceptions.LXDAPIExtensionNotAvailable):
a_client.resources
def test_resources_uses_cache(self):
a_client = client.Client()
a_client._resource_cache = {"cpu": {}}
# Client.__init__ calls get, reset the mock before trying
# resources to confirm it wasn't called.
self.get.called = False
self.assertIn("cpu", a_client.resources)
self.assertFalse(self.get.called)
def test_has_api_extension(self):
a_client = client.Client()
a_client.host_info = {"api_extensions": ["one", "two"]}
self.assertFalse(a_client.has_api_extension("three"))
self.assertTrue(a_client.has_api_extension("one"))
self.assertTrue(a_client.has_api_extension("two"))
def test_assert_has_api_extension(self):
a_client = client.Client()
a_client.host_info = {"api_extensions": ["one", "two"]}
with self.assertRaises(exceptions.LXDAPIExtensionNotAvailable) as c:
self.assertFalse(a_client.assert_has_api_extension("three"))
self.assertIn("three", str(c.exception))
a_client.assert_has_api_extension("one")
a_client.assert_has_api_extension("two")
class TestAPINode(TestCase):
"""Tests for pylxd.client._APINode."""
def test_getattr(self):
"""API Nodes can use object notation for nesting."""
node = client._APINode("http://test.com")
new_node = node.test
self.assertEqual("http://test.com/test", new_node._api_endpoint)
def test_getattr_storage_pools(self):
"""API node with storage_pool should be storage-pool"""
node = client._APINode("http://test.com")
new_node = node.test.storage_pools
self.assertEqual("http://test.com/test/storage-pools", new_node._api_endpoint)
# other _ should stay as they were.
new_node = node.test.some_thing
self.assertEqual("http://test.com/test/some_thing", new_node._api_endpoint)
def test_getitem(self):
"""API Nodes can use dict notation for nesting."""
node = client._APINode("http://test.com")
new_node = node["test"]
self.assertEqual("http://test.com/test", new_node._api_endpoint)
def test_getitem_leave_underscores_alone(self):
"""Bug 295 erronously changed underscores to '-' -- let's make sure
it doens't happend again
"""
node = client._APINode("http://test.com")
new_node = node.thing["my_snapshot"]
self.assertEqual("http://test.com/thing/my_snapshot", new_node._api_endpoint)
def test_session_http(self):
"""HTTP nodes return the default requests session."""
node = client._APINode("http://test.com")
self.assertIsInstance(node.session, requests.Session)
def test_session_unix_socket(self):
"""HTTP nodes return a requests_unixsocket session."""
node = client._APINode("http+unix://test.com")
self.assertIsInstance(node.session, requests_unixsocket.Session)
@mock.patch("pylxd.client.requests.Session")
def test_get(self, Session):
"""Perform a session get."""
response = mock.Mock(
**{
"status_code": 200,
"json.return_value": {"type": "sync"},
}
)
session = mock.Mock(**{"get.return_value": response})
Session.return_value = session
node = client._APINode("http://test.com")
node.get()
session.get.assert_called_once_with("http://test.com", timeout=None)
@mock.patch("pylxd.client.requests.Session")
def test_post(self, Session):
"""Perform a session post."""
response = mock.Mock(
**{
"status_code": 200,
"json.return_value": {"type": "sync"},
}
)
session = mock.Mock(**{"post.return_value": response})
Session.return_value = session
node = client._APINode("http://test.com")
node.post()
session.post.assert_called_once_with("http://test.com", timeout=None)
@mock.patch("pylxd.client.requests.Session")
def test_post_200_not_sync(self, Session):
"""A status code of 200 with async request raises an exception."""
response = mock.Mock(
**{
"status_code": 200,
"json.return_value": {"type": "async"},
}
)
session = mock.Mock(**{"post.return_value": response})
Session.return_value = session
node = client._APINode("http://test.com")
self.assertRaises(exceptions.LXDAPIException, node.post)
@mock.patch("pylxd.client.requests.Session")
def test_post_missing_type_200(self, Session):
"""A missing response type raises an exception."""
response = mock.Mock(
**{
"status_code": 200,
"json.return_value": {},
}
)
session = mock.Mock(**{"post.return_value": response})
Session.return_value = session
node = client._APINode("http://test.com")
self.assertRaises(exceptions.LXDAPIException, node.post)
@mock.patch("pylxd.client.requests.Session")
def test_put(self, Session):
"""Perform a session put."""
response = mock.Mock(
**{
"status_code": 200,
"json.return_value": {"type": "sync"},
}
)
session = mock.Mock(**{"put.return_value": response})
Session.return_value = session
node = client._APINode("http://test.com")
node.put()
session.put.assert_called_once_with("http://test.com", timeout=None)
@mock.patch("pylxd.client.requests.Session")
def test_patch(self, Session):
"""Perform a session patch."""
response = mock.Mock(
**{
"status_code": 200,
"json.return_value": {"type": "sync"},
}
)
session = mock.Mock(**{"patch.return_value": response})
Session.return_value = session
node = client._APINode("http://test.com")
node.patch()
session.patch.assert_called_once_with("http://test.com", timeout=None)
@mock.patch("pylxd.client.requests.Session")
def test_delete(self, Session):
"""Perform a session delete."""
response = mock.Mock(
**{
"status_code": 200,
"json.return_value": {"type": "sync"},
}
)
session = mock.Mock(**{"delete.return_value": response})
Session.return_value = session
node = client._APINode("http://test.com")
node.delete()
session.delete.assert_called_once_with("http://test.com", timeout=None)
class TestWebsocketClient(TestCase):
"""Tests for pylxd.client.WebsocketClient."""
def test_handshake_ok(self):
"""A `message` attribute of an empty list is created."""
ws_client = client._WebsocketClient("ws://an/fake/path")
ws_client.handshake_ok()
self.assertEqual([], ws_client.messages)
def test_received_message(self):
"""A json dict is added to the messages attribute."""
message = mock.Mock(data=json.dumps({"test": "data"}).encode("utf-8"))
ws_client = client._WebsocketClient("ws://an/fake/path")
ws_client.handshake_ok()
ws_client.received_message(message)
self.assertEqual({"test": "data"}, ws_client.messages[0])
|
lxc/pylxd
|
pylxd/tests/test_client.py
|
Python
|
apache-2.0
| 22,609
|
# Object Model
from datetime import date
from google.appengine.ext import ndb
import logging
#model class para el Login. Debo probar la persistencia
#version 0.9 alfa
class Usuario(ndb.Model):
usuario = ndb.StringProperty()
clave = ndb.StringProperty()
date = ndb.DateTimeProperty(auto_now_add=True)
nombres = ndb.StringProperty()
paterno = ndb.StringProperty()
materno = ndb.StringProperty()
email = ndb.StringProperty()
rol = ndb.StringProperty()
def get_usuariokey(cls, keyusuario):
result = None
if keyusuario:
q = cls.query().filter(Usuario._key == keyusuario).fetch(1)
for element in q:
result = element
return result
def get_usuario(cls, usuario):
result = None
if len(usuario) > 0:
q = cls.query().filter(ndb.GenericProperty('usuario') == usuario).fetch(1)
for element in q:
result = element
return result
class Profile(ndb.Model):
usuario = ndb.StringProperty()
operaciones = ndb.BooleanProperty()
inventario = ndb.BooleanProperty()
reportes = ndb.BooleanProperty()
#version 0.9 beta
class Cliente(ndb.Model):
cliente = ndb.StringProperty()
direccion = ndb.StringProperty()
activo = ndb.BooleanProperty()
contacto = ndb.StringProperty()
ubicacion = ndb.StringProperty()
def get_cliente(cls, id):
value = None
if id:
value = cls.get_by_id(id)
return value
class InformeDefinicion(ndb.Model):
version = ndb.IntegerProperty()
informe_schema = ndb.JsonProperty()
informe_layout = ndb.JsonProperty()
class UsuarioInforme(ndb.Model):
usuario = ndb.KeyProperty(kind=Usuario)
informedef = ndb.KeyProperty(kind=InformeDefinicion)
fecha_creacion = ndb.DateTimeProperty(auto_now_add=True)
informe_data = ndb.JsonProperty()
class InformeMetaData(ndb.Model):
usuario = ndb.StringProperty()
fecha_creacion = ndb.DateTimeProperty(auto_now_add=True)
tiempo_estimado = ndb.IntegerProperty()
tiempo_atencion = ndb.IntegerProperty()
tiempo_espera = ndb.IntegerProperty()
comentario = ndb.StringProperty()
costo_estimado = ndb.FloatProperty()
usuario_informe = ndb.KeyProperty(kind=UsuarioInforme)
class ImagenInforme(ndb.Model):
usuarioinforme = ndb.KeyProperty(kind=UsuarioInforme)
titulo = ndb.StringProperty()
imagen = ndb.BlobProperty()
class TipoOperacion(ndb.Model):
tipo = ndb.StringProperty()
descripcion = ndb.StringProperty()
class Operaciones(ndb.Model):
asignado = ndb.KeyProperty(kind=Usuario)
cliente = ndb.KeyProperty(kind=Cliente)
cliente_name = ndb.StringProperty()
registrado = ndb.KeyProperty(kind=Usuario)
titulo = ndb.StringProperty()
descripcion = ndb.StringProperty()
fecha_creacion = ndb.DateTimeProperty(auto_now_add=True)
tipo_operacion = ndb.KeyProperty(kind=TipoOperacion)
def get_operaciones(cls, key_asignado):
c = {'operaciones':[], 'ope_fecha':''}
d = date.today()
list_c = {'cliente':'', 'titulo':'', 'descripcion':'', 'fecha':'', 'url':''}
logging.info('valor de cls.asignado: %s',cls.asignado)
logging.info('valor de key_asignado: %s', key_asignado)
operaciones = cls.query().filter(ndb.GenericProperty('asignado') == key_asignado).fetch(100) #Tener presente en definir en el filter la clase.propiedad
# Tener presente que cuando se borra una tabla, la primera lectura no funciona bien, problema del SDK
logging.info('dentro de metodo. valor de operaciones %s', operaciones)
if operaciones:
for element in operaciones:
logging.info('dentro del for')
list_c = {'cliente':'', 'titulo':'', 'descripcion':'', 'fecha':'', 'url':''}
list_c['cliente'] = element.cliente_name
list_c['descripcion'] = element.descripcion
list_c['fecha'] = str(element.fecha_creacion.strftime("%d/%m/%y"))
list_c['url'] = '/informes?operacion=' + str(element._key.id())
logging.info('datos ingresados en diccionario : %s, %s, %s', element.titulo, element.descripcion, str(element.fecha_creacion.strftime("%d/%m/%y")))
c['operaciones'].append(list_c)
c['ope_fecha'] = d.strftime("%A, %d. %B %Y %I:%M%p") #formato dependiente del ingles. Se debe usar el api webapp2 con internationalization
return c
|
jrevatta/mro-pyme
|
opmodel.py
|
Python
|
apache-2.0
| 4,888
|
""" This module provides classes and functions to visualise a KGML Pathway Map
The KGML definition is as of release KGML v0.7.1
(http://www.kegg.jp/kegg/xml/docs/)
Classes:
"""
from __future__ import print_function
import os
import tempfile
from itertools import chain
from io import BytesIO
from reportlab.lib import pagesizes
from reportlab.lib import colors
from reportlab.lib.utils import ImageReader
from reportlab.graphics.shapes import *
from reportlab.pdfgen import canvas
from PIL import Image
from Bio._py3k import urlopen as _urlopen
from Bio.KEGG.KGML.KGML_pathway import Pathway
def darken(color, factor=0.7):
"""Returns darkened color as a ReportLab RGB color.
Take a passed color and returns a Reportlab color that is darker by the
factor indicated in the parameter.
"""
newcol = color_to_reportlab(color)
for a in ['red', 'green', 'blue']:
setattr(newcol, a, factor * getattr(newcol, a))
return newcol
def color_to_reportlab(color):
"""Returns the passed color in Reportlab Color format.
We allow colors to be specified as hex values, tuples, or Reportlab Color
objects, and with or without an alpha channel. This function acts as a
Rosetta stone for conversion of those formats to a Reportlab Color
object, with alpha value.
Any other color specification is returned directly
"""
# Reportlab Color objects are in the format we want already
if isinstance(color, colors.Color):
return color
elif isinstance(color, str): # String implies hex color
if color.startswith("0x"): # Standardise to octothorpe
color.replace("0x", "#")
if len(color) == 7:
return colors.HexColor(color)
else:
try:
return colors.HexColor(color, hasAlpha=True)
except TypeError: # Catch pre-2.7 Reportlab
raise RuntimeError("Your reportlab seems to be too old, try 2.7 onwards")
elif isinstance(color, tuple): # Tuple implies RGB(alpha) tuple
return colors.Color(*color)
return color
def get_temp_imagefilename(url):
"""Returns filename of temporary file containing downloaded image.
Create a new temporary file to hold the image file at the passed URL
and return the filename.
"""
img = _urlopen(url).read()
im = Image.open(BytesIO(img))
# im.transpose(Image.FLIP_TOP_BOTTOM)
f = tempfile.NamedTemporaryFile(delete=False, suffix='.png')
fname = f.name
f.close()
im.save(fname, 'PNG')
return fname
class KGMLCanvas(object):
"""Reportlab Canvas-based representation of a KGML pathway map."""
def __init__(self, pathway, import_imagemap=False, label_compounds=True,
label_orthologs=True, label_reaction_entries=True,
label_maps=True, show_maps=False, fontname='Helvetica',
fontsize=6, draw_relations=True, show_orthologs=True,
show_compounds=True, show_genes=True,
show_reaction_entries=True, margins=(0.02, 0.02)):
self.pathway = pathway
self.show_maps = show_maps
self.show_orthologs = show_orthologs
self.show_compounds = show_compounds
self.show_genes = show_genes
self.show_reaction_entries = show_reaction_entries
self.label_compounds = label_compounds
self.label_orthologs = label_orthologs
self.label_reaction_entries = label_reaction_entries
self.label_maps = label_maps
self.fontname = fontname
self.fontsize = fontsize
self.draw_relations = draw_relations
self.non_reactant_transparency = 0.3
self.import_imagemap = import_imagemap # Import the map .png from URL
# percentage of canvas that will be margin in on either side in the
# X and Y directions
self.margins = margins
def draw(self, filename):
"""Add the map elements to the drawing."""
# Instantiate the drawing, first
# size x_max, y_max for now - we can add margins, later
if self.import_imagemap:
# We're drawing directly on the image, so we set the canvas to the
# same size as the image
if os.path.isfile(self.pathway.image):
imfilename = self.pathway.image
else:
imfilename = get_temp_imagefilename(self.pathway.image)
im = Image.open(imfilename)
cwidth, cheight = im.size
else:
# No image, so we set the canvas size to accommodate visible
# elements
cwidth, cheight = (self.pathway.bounds[1][0],
self.pathway.bounds[1][1])
# Instantiate canvas
self.drawing = \
canvas.Canvas(filename, bottomup=0,
pagesize=(cwidth *
(1 + 2 * self.margins[0]),
cheight *
(1 + 2 * self.margins[1])))
self.drawing.setFont(self.fontname, self.fontsize)
# Transform the canvas to add the margins
self.drawing.translate(self.margins[0] * self.pathway.bounds[1][0],
self.margins[1] * self.pathway.bounds[1][1])
# Add the map image, if required
if self.import_imagemap:
self.drawing.saveState()
self.drawing.scale(1, -1)
self.drawing.translate(0, -cheight)
self.drawing.drawImage(imfilename, 0, 0)
self.drawing.restoreState()
# Add the reactions, compounds and maps
# Maps go on first, to be overlaid by more information.
# By default, they're slightly transparent.
if self.show_maps:
self.__add_maps()
if self.show_reaction_entries:
self.__add_reaction_entries()
if self.show_orthologs:
self.__add_orthologs()
if self.show_compounds:
self.__add_compounds()
if self.show_genes:
self.__add_genes()
# TODO: complete draw_relations code
# if self.draw_relations:
# self.__add_relations()
# Write the pathway map to PDF
self.drawing.save()
def __add_maps(self):
"""Adds maps to the drawing of the map.
We do this first, as they're regional labels to be overlaid by
information. Also, we want to set the color to something subtle.
We're using Hex colors because that's what KGML uses, and
Reportlab doesn't mind.
"""
for m in self.pathway.maps:
for g in m.graphics:
self.drawing.setStrokeColor('#888888')
self.drawing.setFillColor('#DDDDDD')
self.__add_graphics(g)
if self.label_maps:
self.drawing.setFillColor('#888888')
self.__add_labels(g)
def __add_graphics(self, graphics):
"""Adds the passed graphics object to the map.
Add text, add after the graphics object, for sane Z-ordering.
"""
if graphics.type == 'line':
p = self.drawing.beginPath()
x, y = graphics.coords[0]
# There are optional settings for lines that aren't necessarily
# part of the KGML DTD
if graphics.width is not None:
self.drawing.setLineWidth(graphics.width)
else:
self.drawing.setLineWidth(1)
p.moveTo(x, y)
for (x, y) in graphics.coords:
p.lineTo(x, y)
self.drawing.drawPath(p)
self.drawing.setLineWidth(1) # Return to default
# KGML defines the (x, y) coordinates as the centre of the circle/
# rectangle/roundrectangle, but Reportlab uses the co-ordinates of the
# lower-left corner for rectangle/elif.
if graphics.type == 'circle':
self.drawing.circle(graphics.x, graphics.y, graphics.width * 0.5,
stroke=1, fill=1)
elif graphics.type == 'roundrectangle':
self.drawing.roundRect(graphics.x - graphics.width * 0.5,
graphics.y - graphics.height * 0.5,
graphics.width, graphics.height,
min(graphics.width, graphics.height) * 0.1,
stroke=1, fill=1)
elif graphics.type == 'rectangle':
self.drawing.rect(graphics.x - graphics.width * 0.5,
graphics.y - graphics.height * 0.5,
graphics.width, graphics.height,
stroke=1, fill=1)
def __add_labels(self, graphics):
"""Adds labels for the passed graphics objects to the map (PRIVATE).
We don't check that the labels fit inside objects such as circles/
rectangles/roundrectangles.
"""
if graphics.type == 'line':
# We use the midpoint of the line - sort of - we take the median
# line segment (list-wise, not in terms of length), and use the
# midpoint of that line. We could have other options here,
# maybe even parameterising it to a proportion of the total line
# length.
mid_idx = len(graphics.coords) * 0.5
if not int(mid_idx) == mid_idx:
idx1, idx2 = int(mid_idx - 0.5), int(mid_idx + 0.5)
else:
idx1, idx2 = int(mid_idx - 1), int(mid_idx)
x1, y1 = graphics.coords[idx1]
x2, y2 = graphics.coords[idx2]
x, y = 0.5 * (x1 + x2), 0.5 * (y1 + y2)
elif graphics.type == 'circle':
x, y = graphics.x, graphics.y
elif graphics.type in ('rectangle', 'roundrectangle'):
x, y = graphics.x, graphics.y
# How big so we want the text, and how many characters?
if graphics._parent.type == 'map':
text = graphics.name
self.drawing.setFont(self.fontname, self.fontsize + 2)
elif len(graphics.name) < 15:
text = graphics.name
else:
text = graphics.name[:12] + '...'
self.drawing.drawCentredString(x, y, text)
self.drawing.setFont(self.fontname, self.fontsize)
def __add_orthologs(self):
"""Adds 'ortholog' Entry elements to the drawing of the map (PRIVATE).
In KGML, these are typically line objects, so we render them
before the compound circles to cover the unsightly ends/junctions.
"""
for ortholog in self.pathway.orthologs:
for g in ortholog.graphics:
self.drawing.setStrokeColor(color_to_reportlab(g.fgcolor))
self.drawing.setFillColor(color_to_reportlab(g.bgcolor))
self.__add_graphics(g)
if self.label_orthologs:
# We want the label color to be slightly darker
# (where possible), so it can be read
self.drawing.setFillColor(darken(g.fgcolor))
self.__add_labels(g)
def __add_reaction_entries(self):
"""Adds Entry elements for Reactions to the map drawing (PRIVATE).
In KGML, these are typically line objects, so we render them
before the compound circles to cover the unsightly ends/junctions
"""
for reaction in self.pathway.reaction_entries:
for g in reaction.graphics:
self.drawing.setStrokeColor(color_to_reportlab(g.fgcolor))
self.drawing.setFillColor(color_to_reportlab(g.bgcolor))
self.__add_graphics(g)
if self.label_reaction_entries:
# We want the label color to be slightly darker
# (where possible), so it can be read
self.drawing.setFillColor(darken(g.fgcolor))
self.__add_labels(g)
def __add_compounds(self):
"""Adds compound elements to the drawing of the map (PRIVATE)."""
for compound in self.pathway.compounds:
for g in compound.graphics:
# Modify transparency of compounds that don't participate
# in reactions
fillcolor = color_to_reportlab(g.bgcolor)
if not compound.is_reactant:
fillcolor.alpha *= self.non_reactant_transparency
self.drawing.setStrokeColor(color_to_reportlab(g.fgcolor))
self.drawing.setFillColor(fillcolor)
self.__add_graphics(g)
if self.label_compounds:
if not compound.is_reactant:
t = 0.3
else:
t = 1
self.drawing.setFillColor(colors.Color(0.2, 0.2, 0.2, t))
self.__add_labels(g)
def __add_genes(self):
"""Adds gene elements to the drawing of the map (PRIVATE)."""
for gene in self.pathway.genes:
for g in gene.graphics:
self.drawing.setStrokeColor(color_to_reportlab(g.fgcolor))
self.drawing.setFillColor(color_to_reportlab(g.bgcolor))
self.__add_graphics(g)
if self.label_compounds:
self.drawing.setFillColor(darken(g.fgcolor))
self.__add_labels(g)
def __add_relations(self):
"""Adds relations to the map (PRIVATE).
This is tricky. There is no defined graphic in KGML for a
relation, and the corresponding entries are typically defined
as objects 'to be connected somehow'. KEGG uses KegSketch, which
is not public, and most third-party software draws straight line
arrows, with heads to indicate the appropriate direction
(at both ends for reversible reactions), using solid lines for
ECrel relation types, and dashed lines for maplink relation types.
The relation has:
- entry1: 'from' node
- entry2: 'to' node
- subtype: what the relation refers to
Typically we have entry1 = map/ortholog; entry2 = map/ortholog,
subtype = compound.
"""
# Dashed lines for maplinks, solid for everything else
for relation in list(self.pathway.relations):
if relation.type == 'maplink':
self.drawing.setDash(6, 3)
else:
self.drawing.setDash()
for s in relation.subtypes:
subtype = self.pathway.entries[s[1]]
# Our aim is to draw an arrow from the entry1 object to the
# entry2 object, via the subtype object.
# 1) Entry 1 to subtype
self.__draw_arrow(relation.entry1, subtype)
# 2) subtype to Entry 2
self.__draw_arrow(subtype, relation.entry2)
def __draw_arrow(self, g_from, g_to):
"""Draw an arrow between given Entry objects (PRIVATE).
Draws an arrow from the g_from Entry object to the g_to
Entry object; both must have Graphics objects.
"""
# Centre and bound co-ordinates for the from and two objects
bounds_from, bounds_to = g_from.bounds, g_to.bounds
centre_from = (0.5 * (bounds_from[0][0] + bounds_from[1][0]),
0.5 * (bounds_from[0][1] + bounds_from[1][1]))
centre_to = (0.5 * (bounds_to[0][0] + bounds_to[1][0]),
0.5 * (bounds_to[0][1] + bounds_to[1][1]))
p = self.drawing.beginPath()
# print(True, g_from.name, g_to.name, bounds_to, bounds_from)
# If the 'from' and 'to' graphics are vertically-aligned, draw a line
# from the 'from' to the 'to' entity
if bounds_to[0][0] < centre_from[0] < bounds_to[1][0]:
# print(True, g_from.name, g_to.name, bounds_to, bounds_from)
if centre_to[1] > centre_from[1]: # to above from
p.moveTo(centre_from[0], bounds_from[1][1])
p.lineTo(centre_from[0], bounds_to[0][1])
# Draw arrow point - TODO
else: # to below from
p.moveTo(centre_from[0], bounds_from[0][1])
p.lineTo(centre_from[0], bounds_to[1][1])
# Draw arrow point - TODO
elif bounds_from[0][0] < centre_to[0] < bounds_from[1][0]:
# print(True, g_from.name, g_to.name, bounds_to, bounds_from)
if centre_to[1] > centre_from[1]: # to above from
p.moveTo(centre_to[0], bounds_from[1][1])
p.lineTo(centre_to[0], bounds_to[0][1])
# Draw arrow point - TODO
else: # to below from
p.moveTo(centre_to[0], bounds_from[0][1])
p.lineTo(centre_to[0], bounds_to[1][1])
# Draw arrow point - TODO
self.drawing.drawPath(p) # Draw arrow shaft
# print(g_from)
# print(bounds_from)
# print(g_to)
# print(bounds_to)
|
poojavade/Genomics_Docker
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/Bio/Graphics/KGML_vis.py
|
Python
|
apache-2.0
| 17,163
|
# Copyright (c) 2012 Intel
# Copyright (c) 2012 OpenStack, LLC.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import math
from raksha.openstack.common import log as logging
from raksha.openstack.common.scheduler import filters
LOG = logging.getLogger(__name__)
class CapacityFilter(filters.BaseHostFilter):
"""CapacityFilter filters based on volume host's capacity utilization."""
def host_passes(self, host_state, filter_properties):
"""Return True if host has sufficient capacity."""
volume_size = filter_properties.get('size')
if host_state.free_capacity_gb is None:
# Fail Safe
LOG.error(_("Free capacity not set: "
"volume node info collection broken."))
return False
free_space = host_state.free_capacity_gb
if free_space == 'infinite' or free_space == 'unknown':
# NOTE(zhiteng) for those back-ends cannot report actual
# available capacity, we assume it is able to serve the
# request. Even if it was not, the retry mechanism is
# able to handle the failure by rescheduling
return True
reserved = float(host_state.reserved_percentage) / 100
free = math.floor(free_space * (1 - reserved))
if free < volume_size:
LOG.warning(_("Insufficient free space for volume creation "
"(requested / avail): "
"%(requested)s/%(available)s")
% {'requested': volume_size,
'available': free})
return free >= volume_size
|
DPaaS-Raksha/raksha
|
raksha/scheduler/filters/capacity_filter.py
|
Python
|
apache-2.0
| 2,187
|