repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
maplewolf/kg-adv-models | targeted_attack/attack_triple_iter_target_class.py | Python | gpl-3.0 | 11,966 | 0.004179 | """
LGPL v3 License
More information from the user maplewolf on Kaggle.Implementation of sample attack."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import os
import numpy as np
from scipy.misc import imread, imsave
import tensorflow as tf
from tensorflow.contrib.slim.nets import inception
import inception_resnet_v2
slim = tf.contrib.slim
tf.flags.DEFINE_string(
'master', '', 'The address of the TensorFlow master to use.')
tf.flags.DEFINE_string(
'checkpoint_path', '', 'Path to checkpoint for inception network.')
tf.flags.DEFINE_string(
'input_dir', '', 'Input directory with images.')
tf.flags.DEFINE_string(
'output_dir', '', 'Output directory with images.')
tf.flags.DEFINE_float(
'max_epsilon', 16.0, 'Maximum size of adversarial perturbation.')
tf.flags.DEFINE_float(
'iter_alpha', 1.0, 'Step size for one iteration.')
tf.flags.DEFINE_integer(
'num_iter', 20, 'Number of iterations.')
tf.flags.DEFINE_integer(
'image_width', 299, 'Width of each input images.')
tf.flags.DEFINE_integer(
'image_height', 299, 'Height of each input images.')
tf.flags.DEFINE_integer(
'batch_size', 16, 'How many images process at one time.')
FLAGS = tf.flags.FLAGS
def load_target_class(input_dir):
"""Loads target classes."""
with tf.gfile.Open(os.path.join(input_dir, 'target_class.csv')) as f:
return {row[0]: int(row[1]) for row in csv.reader(f) if len(row) >= 2}
def load_images(input_dir, batch_shape):
"""Read png images from input directory in batches.
Args:
input_dir: input directory
batch_shape: shape of minibatch array, i.e. [batch_size, height, width, 3]
Yields:
filenames: list file names without path of each image
Lenght of this list could be less than batch_size, in this case only
first few images of the result are elements of the minibatch.
images: array with all images from this batch
"""
images = np.zeros(batch_shape)
filenames = []
idx = 0
batch_size = batch_shape[0]
for filepath in tf.gfile.Glob(os.path.join(input_dir, '*.png')):
with tf.gfile.Open(filepath, 'rb') as f:
image = imread(f, mode='RGB').astype(np.float) / 255.0
# Images for inception classifier are normalized to be in [-1, 1] interval.
images[idx, :, :, :] = image * 2.0 - 1.0
filenames.append(os.path.basename(filepath))
idx += 1
if idx == batch_size:
yield filenames, images
filenames = []
images = np.zeros(batch_shape)
idx = 0
if idx > 0:
yield filenames, images
def save_images(images, filenames, output_dir):
"""Saves images to the output directory.
Args:
images: array with minibatch of images
filenames: list of filenames without path
If number of file names in this list less than number of images in
the minibatch then only first len(filenames) images will be saved.
output_dir: directory where to save images
"""
for i, filename in enumerate(filenames):
# Images for inception classifier are normalized to be in [-1, 1] interval,
# so rescale them back to [0, 1].
with tf.gfile.Open(os.path.join(output_dir, filename), 'wb') as f:
imsave(f, np.round(255.0 * (np.float32(images[i, :, :, :]) + 1.0) * 0.5).astype(np.int16), format='png')
def convert_dict(model_vars, checkpoint_path):
from tensorflow.python import pywrap_tensorflow
reader = pywrap_tensorflow.NewCheckpointReader(checkpoint_path)
var_to_shape_map = reader.get_variable_to_shape_map()
ckpt_name_set = set([key for key in var_to_shape_map])
dict4vars = {}
for var in model_vars:
if var.name[8:-2] in ckpt_name_set:
dict4vars[var.name[8:-2]] = var
return dict4vars
def main(_):
# Images for inception classifier are normalized to be in [-1, 1] interval,
# eps is a difference between pixels so it should be in [0, 2] interval.
# Renormalizing epsilon from [0, 255] to [0, 2].
eps = 2.0 * FLAGS.max_epsilon / 255.0
alpha = 2.0 * FLAGS.iter_alpha / 255.0
num_iter = FLAGS.num_iter
batch_shape = [FLAGS.batch_size, FLAGS.image_height, FLAGS.image_width, 3]
num_classes = 1001
tf.logging.set_verbosity(tf.logging.INFO)
all_images_taget_class = load_target_class(FLAGS.input_dir)
label_smoothing = 0.1
coeff_mul_alpha = 0.019
AUX_ENS_V2 = 2.4
AUX_INC_V3 = 0.87
aux_weight = AUX_INC_V3
model_mode = 0
with tf.Graph().as_default():
# Prepare graph
x_input = tf.placeholder(tf.float32, shape=batch_shape)
x_max = tf.clip_by_value(x_input + eps, -1.0, 1.0)
x_min = tf.clip_by_value(x_input - eps, -1.0, 1.0)
with tf.variable_scope('model_a'):
with slim.arg_scope(inception.inception_v3_arg_scope()):
inception.inception_v3(
x_input, num_classes=num_classes, is_training=False)
with tf.variable_scope('model_b'):
with slim.arg_scope(inception.inception_v3_arg_scope()):
inception.inception_v3(
x_input, num_classes=num_classes, is_training=False)
with tf.variable_scope('model_c'):
with slim.arg_scope(inception_resnet_v2.inception_resnet_v2_arg_scope()):
inception_resnet_v2.inception_resnet_v2(
x_input, num_classes=num_classes, is_training=False)
x_adv = x_input
target_class_input = tf.placeholder(tf.int32, shape=[FLAGS.batch_size])
one_hot_target_class = tf.one_hot(target_class_input, num_classes)
for i_iter in range(num_iter):
model_mode = i_iter % 4
if i_iter >= 16:
model_mode = 3
if i_iter == 0:
label_smoothing = 0.1
coeff_mul_alpha = 0.019
elif i_iter == 10:
label_smoothing = 0
coeff_mul_alpha = 0.031
if model_mode == 1:
with tf.variable_scope('model_a'):
with slim.arg_scope(inception.inception_v3_arg_scope()):
logits, end_points = inception.inception_v3(
x_adv, num_classes=num_classes, is_training=False, reuse=True)
elif model_mode == 0:
with tf.variable_scope('model_b'):
with slim.arg_scope(inception.inception_v3_arg_scope()):
logits, end_points = inception.inception_v3(
x_adv, num_classes=num_classes, is_training=False, reuse=True)
elif model_mode == 2:
with tf.variable_scope('model_a'):
with slim.arg_scope(inception.inception_v3_arg_scope()):
logits, end_points = inception.inception_v3(
x_adv, num_classes=num_classes, is_training=False, reuse=True)
else:
with tf.variable_scope('model_c'):
with slim.arg_scope(inception_resnet_v2.inception_resnet_v2_arg_scope()):
logits, end_points = inception_resnet_v2.inception_resnet_v2(
x_adv, num_classes=num_classes, is_training=F | alse, reuse=True)
if model_mode == 3:
aux_weight = AUX_ENS_V2
else:
aux_weight = AUX_INC_V3
cross_en | tropy = tf.losses.softmax_cross_entropy(one_hot_target_class,
logits,
label_smoothing=label_smoothing,
weights=1.0)
cross_entropy += tf.losses.softmax_cross_entropy(one_hot_target_class,
end_points['AuxLogits'],
label_smoothing=label_smoothing,
weigh |
byron1655/spider | spider.py | Python | gpl-3.0 | 496 | 0.008065 | # -*- coding: utf-8 -*-
__author__ = 'byron'
import datetime
import traceback
import os
import re
import time
from spider_core import SpiderCore
url = "http://www.lagou.com/"
maxcount = 2000
maxlevel = 5
SpiderCore = SpiderCore(url, maxcount, maxlevel)
SpiderCore.start()
d | ef getAutomaticId | (name):
global db
item = db.ids.findAndModify({
'query':{'spider': name},
'update':{'$inc': 1}
})
if 'id' in item:
return item['id']
return None
|
jptomo/rpython-lang-scheme | rpython/rtyper/lltypesystem/rstr.py | Python | mit | 43,498 | 0.001494 | from weakref import WeakValueDictionary
from rpython.annotator import model as annmodel
from rpython.rlib import jit, types
from rpython.rlib.debug import ll_assert
from rpython.rlib.objectmodel import (malloc_zero_filled, we_are_translated,
_hash_string, keepalive_until_here, specialize, enforceargs)
from rpython.rlib.signature import signature
from rpython.rlib.rarithmetic import ovfcheck
from rpython.rtyper.error import TyperError
from rpython.rtyper.lltypesystem import ll_str, llmemory
from rpython.rtyper.lltypesystem.lltype import (GcStruct, Signed, Array, Char,
UniChar, Ptr, malloc, Bool, Void, GcArray, nullptr, cast_primitive,
typeOf, staticAdtMethod, GcForwardReference)
from rpython.rtyper.rmodel import inputconst, Repr
from rpython.rtyper.rint import IntegerRepr
from rpython.rtyper.rstr import (AbstractStringRepr, AbstractCharRepr,
AbstractUniCharRepr, AbstractStringIteratorRepr, AbstractLLHelpers,
AbstractUnicodeRepr)
from rpython.tool.sourcetools import func_with_new_name
# ____________________________________________________________
#
# Concrete implementation of RPython strings:
#
# struct str {
# hash: Signed
# chars: array of Char
# }
STR = GcForwardReference()
UNICODE = GcForwardReference()
def new_malloc(TP, name):
@enforceargs(int)
def mallocstr(length):
ll_assert(length >= 0, "negative string length")
r = malloc(TP, length)
if not we_are_translated() or not malloc_zero_filled:
r.hash = 0
return r
return func_with_new_name(mallocstr, name)
mallocstr = new_malloc(STR, 'mallocstr')
mallocunicode = new_malloc(UNICODE, 'mallocunicode')
def emptystrfun():
return emptystr
def emptyunicodefun():
return emptyunicode
def _new_copy_contents_fun(SRC_TP, DST_TP, CHAR_TP, name):
@specialize.arg(0)
def _str_ofs(TP, item):
return (llmemory.offsetof(TP, 'chars') +
llmemory.itemoffsetof(TP.chars, 0) +
llmemory.sizeof(CHAR_TP) * item)
@signature(types.any(), types.any(), types.int(), returns=types.any())
@specialize.arg(0)
def _get_raw_buf(TP, src, ofs):
assert typeOf(src).TO == TP
assert ofs >= 0
return llmemory.cast_ptr_to_adr(src) + _str_ofs(TP, ofs)
_get_raw_b | uf._always_inline_ = True
@jit.oopspec('stroruni.copy_contents(src, dst, srcstart, dststart, length)')
@signature(types.any(), types.any(), types.int(), types.int(), types.int(), returns=types.none())
def copy_string_contents(src | , dst, srcstart, dststart, length):
"""Copies 'length' characters from the 'src' string to the 'dst'
string, starting at position 'srcstart' and 'dststart'."""
# xxx Warning: don't try to do this at home. It relies on a lot
# of details to be sure that it works correctly in all cases.
# Notably: no GC operation at all from the first cast_ptr_to_adr()
# because it might move the strings. The keepalive_until_here()
# are obscurely essential to make sure that the strings stay alive
# longer than the raw_memcopy().
assert length >= 0
ll_assert(srcstart >= 0, "copystrc: negative srcstart")
ll_assert(srcstart + length <= len(src.chars), "copystrc: src ovf")
ll_assert(dststart >= 0, "copystrc: negative dststart")
ll_assert(dststart + length <= len(dst.chars), "copystrc: dst ovf")
# from here, no GC operations can happen
asrc = _get_raw_buf(SRC_TP, src, srcstart)
adst = _get_raw_buf(DST_TP, dst, dststart)
llmemory.raw_memcopy(asrc, adst, llmemory.sizeof(CHAR_TP) * length)
# end of "no GC" section
keepalive_until_here(src)
keepalive_until_here(dst)
copy_string_contents._always_inline_ = True
copy_string_contents = func_with_new_name(copy_string_contents,
'copy_%s_contents' % name)
@jit.oopspec('stroruni.copy_string_to_raw(src, ptrdst, srcstart, length)')
def copy_string_to_raw(src, ptrdst, srcstart, length):
"""
Copies 'length' characters from the 'src' string to the 'ptrdst'
buffer, starting at position 'srcstart'.
'ptrdst' must be a non-gc Array of Char.
"""
# xxx Warning: same note as above apply: don't do this at home
assert length >= 0
# from here, no GC operations can happen
asrc = _get_raw_buf(SRC_TP, src, srcstart)
adst = llmemory.cast_ptr_to_adr(ptrdst)
adst = adst + llmemory.itemoffsetof(typeOf(ptrdst).TO, 0)
llmemory.raw_memcopy(asrc, adst, llmemory.sizeof(CHAR_TP) * length)
# end of "no GC" section
keepalive_until_here(src)
copy_string_to_raw._always_inline_ = True
copy_string_to_raw = func_with_new_name(copy_string_to_raw, 'copy_%s_to_raw' % name)
@jit.dont_look_inside
@signature(types.any(), types.any(), types.int(), types.int(),
returns=types.none())
def copy_raw_to_string(ptrsrc, dst, dststart, length):
# xxx Warning: same note as above apply: don't do this at home
assert length >= 0
# from here, no GC operations can happen
adst = _get_raw_buf(SRC_TP, dst, dststart)
asrc = llmemory.cast_ptr_to_adr(ptrsrc)
asrc = asrc + llmemory.itemoffsetof(typeOf(ptrsrc).TO, 0)
llmemory.raw_memcopy(asrc, adst, llmemory.sizeof(CHAR_TP) * length)
# end of "no GC" section
keepalive_until_here(dst)
copy_raw_to_string._always_inline_ = True
copy_raw_to_string = func_with_new_name(copy_raw_to_string,
'copy_raw_to_%s' % name)
return copy_string_to_raw, copy_raw_to_string, copy_string_contents
copy_string_to_raw, copy_raw_to_string, copy_string_contents = _new_copy_contents_fun(STR, STR, Char, 'string')
copy_unicode_to_raw, copy_raw_to_unicode, copy_unicode_contents = _new_copy_contents_fun(UNICODE, UNICODE,
UniChar, 'unicode')
CONST_STR_CACHE = WeakValueDictionary()
CONST_UNICODE_CACHE = WeakValueDictionary()
class BaseLLStringRepr(Repr):
def convert_const(self, value):
if value is None:
return nullptr(self.lowleveltype.TO)
#value = getattr(value, '__self__', value) # for bound string methods
if not isinstance(value, self.basetype):
raise TyperError("not a str: %r" % (value,))
try:
return self.CACHE[value]
except KeyError:
p = self.malloc(len(value))
for i in range(len(value)):
p.chars[i] = cast_primitive(self.base, value[i])
p.hash = 0
self.ll.ll_strhash(p) # precompute the hash
self.CACHE[value] = p
return p
def make_iterator_repr(self, variant=None):
if variant is not None:
raise TyperError("unsupported %r iterator over a str/unicode" %
(variant,))
return self.repr.iterator_repr
def can_ll_be_null(self, s_value):
# XXX unicode
if self is string_repr:
return s_value.can_be_none()
else:
return True # for CharRepr/UniCharRepr subclasses,
# where NULL is always valid: it is chr(0)
def _list_length_items(self, hop, v_lst, LIST):
LIST = LIST.TO
v_length = hop.gendirectcall(LIST.ll_length, v_lst)
v_items = hop.gendirectcall(LIST.ll_items, v_lst)
return v_length, v_items
class StringRepr(BaseLLStringRepr, AbstractStringRepr):
lowleveltype = Ptr(STR)
basetype = str
base = Char
CACHE = CONST_STR_CACHE
def __init__(self, *args):
AbstractStringRepr.__init__(self, *args)
self.ll = LLHelpers
self.malloc = mallocstr
def ll_decode_latin1(self, value):
lgt = len(value.chars)
s = mallocunicode(lgt)
for i in range(lgt):
s.chars[i] = cast_primitive(UniChar, value.chars[i])
return s
class UnicodeRepr(BaseLLStringRepr, AbstractUnicodeRepr):
lowleveltype |
tzhaoredhat/automation | pdc/apps/release/views.py | Python | mit | 29,999 | 0.0008 | #
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import json
from django.shortcuts import render, get_object_or_404
from django.conf import settings
from kobo.django.views.generic import DetailView, SearchView
from rest_framework import viewsets, mixins, status
from rest_framework.response import Response
from . import filters
from . import signals
from . import models
from .forms import (ReleaseSearchForm, BaseProductSearchForm,
ProductSearchForm, ProductVersionSearchForm)
from .serializers import (ProductSerializer, ProductVersionSerializer,
ReleaseSerializer, BaseProductSerializer,
ReleaseTypeSerializer, ReleaseVariantSerializer,
VariantTypeSerializer)
from pdc.apps.repository import models as repo_models
from pdc.apps.common.viewsets import (ChangeSetModelMixin,
ChangeSetCreateModelMixin,
ChangeSetUpdateModelMixin,
MultiLookupFieldMixin,
StrictQueryParamMixin)
from . import lib
class ReleaseListView(SearchView):
form_class = ReleaseSearchForm
queryset = models.Release.objects.select_related('release_type', 'product_version', 'base_product').order_by('id')
allow_empty = True
template_name = "release_list.html"
context_object_name = "release_list"
paginate_by = settings.ITEMS_PER_PAGE
class ReleaseDetailView(DetailView):
queryset = models.Release.objects.select_related('release_type') \
.prefetch_related('variant_set__variant_type',
'variant_set__variantarch_set__arch')
pk_url_kwarg = "id"
template_name = "release_detail.html"
def get_context_data(self, **kwargs):
context = super(ReleaseDetailView, self).get_context_data(**kwargs)
context['repos'] = repo_models.Repo.objects.filter(
variant_arch__variant__release=self.object
).select_related('variant_arch', 'variant_arch__arch',
'content_category', 'content_format', 'repo_family', 'service')
return context
class BaseProductListView(SearchView):
form_class = BaseProductSearchForm
queryset = models.BaseProduct.objects.all().order_by('id')
allow_empty = True
template_name = "base_product_list.html"
context_object_name = "base_product_list"
paginate_by = settings.ITEMS_PER_PAGE
class BaseProductDetailView(DetailView):
model = models.BaseProduct
pk_url_kwarg = "id"
template_name = "base_product_detail.html"
context_object_name = "base_product"
def get_context_data(self, **kwargs):
context = super(BaseProductDetailView, self).get_context_data(**kwargs)
context["release_list"] = models.Release.objects.filter(
base_product=self.object.id
).select_related('product_version', 'base_product', 'release_type')
return context
class ProductListView(SearchView):
form_class = ProductSearchForm
queryset = models.Product.objects.prefetch_related('productversion_set__release_set').order_by('id')
allow_empty = True
template_name = "product_list.html"
context_object_name = "product_list"
paginate_by = settings.ITEMS_PER_PAGE
class ProductDetailView(DetailView):
queryset = models.Product.objects.prefetch_related('productversion_set__release_set')
pk_url_kwarg = "id"
template_name = "product_detail.html"
context_object_name = "product"
class ProductViewSet(ChangeSetCreateModelMixin,
ChangeSetUpdateModelMixin,
StrictQueryParamMixin,
mixins.RetrieveModelMixin,
mixins.ListModelMixin,
viewsets.GenericViewSet):
"""
API endpoint that allows products to be viewed or edited.
Each product can have multiple version. Their identifiers are provided in
the form of `product_version_id` (both in requests and responses).
"""
queryset = models.Product.objects.prefetch_related('productversion_set').order_by('id')
serializer_class = ProductSerializer
lookup_field = 'short'
filter_class = filters.ProductFilter
def create(self, *args, **kwargs):
"""
__Method__: POST
__URL__: $LINK:product-list$
__Data__:
%(WRITABLE_SERIALIZER)s
__Response__:
%(SERIALIZER)s
"""
return super(ProductViewSet, self).create(*args, **kwargs)
def retrieve(self, *args, **kwargs):
"""
__Method__: GET
__URL__: $LINK:product-detail:short$
__Response__:
%(SERIALIZER)s
"""
return super(ProductViewSet, self).retrieve(*args, **kwargs)
def list(self, *args, **kwargs):
"""
__Method__: GET
__URL__: $LINK:product-list$
__Query params__:
%(FILTERS)s
__Response__: a paged list of following objects
%(SERIALIZER)s
"""
return super(ProductViewSet, self).list(*args, **kwargs)
def update(self, *args, **kwargs):
"""
__Method__: PUT, PATCH
__URL__: $LINK:product-detail:short$
__Data__:
| %(WRITABLE_SERIALIZER)s
Please note that if you update the `short` field, the URL of this
product will change. The change of short name is *not* propagated to
product | versions nor releases.
__Response__:
%(SERIALIZER)s
"""
return super(ProductViewSet, self).update(*args, **kwargs)
class ProductVersionViewSet(ChangeSetCreateModelMixin,
ChangeSetUpdateModelMixin,
StrictQueryParamMixin,
mixins.RetrieveModelMixin,
mixins.ListModelMixin,
viewsets.GenericViewSet):
"""
API endpoint that allows product versions to be viewed or edited.
Product versions always refer to a product by means of a human readable
`short` name. Similarly releases are referenced by `release_id`. This
applies to both requests and responses.
"""
queryset = models.ProductVersion.objects.select_related('product').prefetch_related('release_set').order_by('id')
serializer_class = ProductVersionSerializer
lookup_field = 'product_version_id'
lookup_value_regex = '[^/]+'
filter_class = filters.ProductVersionFilter
def create(self, *args, **kwargs):
"""
__Method__: POST
__URL__: $LINK:productversion-list$
__Data__:
%(WRITABLE_SERIALIZER)s
If `short` is not specified, the short name of associated product will
be used.
__Response__:
%(SERIALIZER)s
"""
return super(ProductVersionViewSet, self).create(*args, **kwargs)
def retrieve(self, *args, **kwargs):
"""
__Method__: GET
__URL__: $LINK:productversion-detail:product_version_id$
__Response__:
%(SERIALIZER)s
The list of releases is ordered by short and version.
"""
return super(ProductVersionViewSet, self).retrieve(*args, **kwargs)
def list(self, *args, **kwargs):
"""
__Method__: GET
__URL__: $LINK:productversion-list$
__Query params__:
%(FILTERS)s
__Response__: a paged list of following objects
%(SERIALIZER)s
The list of releases for each product version is ordered by short and
version.
"""
return super(ProductVersionViewSet, self).list(*args, **kwargs)
def update(self, *args, **kwargs):
"""
__Method__: PUT, PATCH
__URL__: $LINK:productversion-detail:product_version_id$
__Data__:
%(WRITABLE_SERIALIZER)s
Please note that if you change the `short` or `version` field, the
`product_version_id` will be modified accordingly, and the URL of the
object will be changed. All changes are local to the updated model |
projecthamster/hamster-dbus | tests/storage/test_dbus_store.py | Python | gpl-3.0 | 1,981 | 0 | # -*- coding: utf-8 -*-
"""
Unittests for DBusStore.
Please refer to ``__init__.py`` for general details.
"""
from __future__ import absolute_import, unicode_literals
import subprocess
from hamster_dbus import storage
from . import common
class TestDBusStore(common.HamsterDBusManagerTestCase):
def setUp(self):
"""Setup test environment."""
# We have to launch a mocked service in order to provide the
# 'org.projecthamster.HamsterDBus' namespace.
# Which object and interface does not actually matter for this test.
self.service_mock = self.spawn_server(
'org.projecthamster.HamsterDBus',
'/org/projecthamster/HamsterDBus/FactManager',
'org.projecthamster.HamsterDBus.FactManager1',
stdout=subprocess.PIPE
)
# For our test purpose, an empty config test suffices.
self.store = storage.DBusStore({})
def test_categories_manager(self):
"""Make sure a ``storage.CategoryManager`` is instantiated."""
self.assertIsInstance(self.store.categories, storage.CategoryManager)
def test_activities_manager(self):
"""Make sure a ``storage.ActivityManager`` is instantiated."""
self.assertIsInstance(self.store.activities, storage.ActivityManager)
def test_t | ags_manager(self):
"""Make sure a ``storage.TagManager`` is instantiated."""
self.assertIsInstance(self.store.tags, storage.TagManager)
def test_facts_manager(self):
"""Make sure a ``storage.FactManager`` is instantiated."""
self.assertIsInstance(self.store.facts, storage.FactManager)
def test_cleanup(self):
"""Test the cleanup method."""
self.assertIs | None(self.store.cleanup())
def test_explicit_bus(self):
"""Make sure that an explicitly passed bus is really used."""
self.store = storage.DBusStore({}, bus=self.dbus_con)
self.assertEqual(self.store._bus, self.dbus_con)
|
thesgc/chembiohub_ws | cbh_core_model/migrations/0031_auto_20160112_0307.py | Python | gpl-3.0 | 2,204 | 0.006352 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.core.exceptions import ObjectDoesNotExist
def migrate_old_permissions_to_new_ones(apps, schema_editor):
"""Idempotent function to migrate the old permissions to the new ones. Old custom contentypes are not deleted for now
This will allow the app to be migrated to django 1.9 and keeps the permissions system in line with django's meaning
functions such as user.is_administrator dontate all permissions to the user properly
"""
Permission = apps.get_model("auth", "Permission")
Project = apps.get_model("cbh_core_model", "Project")
ContentType = apps.get_model("contenttypes", "ContentType")
try:
new_ct = ContentType.objects.get(app_label="cbh_core_model", model="project")
skin_ct = ContentType.objects.get(app_label="cbh_core_model", model="skinningconfig")
from cbh_core_model.models import get_permission_name, get_permission_codename
for perm in Permission.objects.all():
ct = perm.content_type
if ct.app_label.isdigit():
try:
project = Project.objects.get(id=int(ct.app_label))
if perm.codename == "admin":
perm.codename = "owner"
perm.name = get_permission_name(project.name, perm.codename)
perm.codename = get_permission_codename(project.id, perm.codename)
| perm.content_type = new_ct
perm.content_type_id = new_ct.id
perm.save()
except ObjectDoesNotExist:
pass
if ct.app_label == "_can_see":
perm.co | ntent_type = skin_ct
perm.content_type_id = skin_ct.id
perm.save()#
except ObjectDoesNotExist:
print("No contenttypes therefore nothing to migrate")
class Migration(migrations.Migration):
dependencies = [
('cbh_core_model', '0030_auto_20151215_0548'),
]
operations = [
migrations.RunPython(migrate_old_permissions_to_new_ones,)
]
|
ReproducibleBuilds/diffoscope | diffoscope/comparators/odt.py | Python | gpl-3.0 | 1,450 | 0 | # -*- coding: utf-8 -*-
#
# diffoscope: in-depth comparison of files, archives, and directories
#
# Copyright © 2017 | Chris Lamb <lamby@debian.org>
#
# diffoscope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# diffoscope is distribu | ted in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with diffoscope. If not, see <https://www.gnu.org/licenses/>.
import re
from diffoscope.tools import tool_required
from diffoscope.difference import Difference
from .utils.file import File
from .utils.command import Command
class Odt2txt(Command):
@tool_required('odt2txt')
def cmdline(self):
return (
'odt2txt',
'--encoding=UTF-8',
self.path,
)
class OdtFile(File):
DESCRIPTION = "OpenOffice .odt files"
FILE_TYPE_RE = re.compile(r'^OpenDocument Text\b')
def compare_details(self, other, source=None):
return [Difference.from_command(
Odt2txt,
self.path,
other.path,
source='odt2txt',
)]
|
samuelmaudo/yepes | yepes/exceptions.py | Python | bsd-3-clause | 1,875 | 0.000533 | # -*- coding:utf-8 -*-
from __future__ import unicode_literals
class LookupTypeError(TypeError):
def __init__(self, lookup_type):
msg = "Lookup type '{0}' not supported"
super(LookupTypeError, self).__init__(msg.format(lookup_type))
self.lookup_type = lookup_type
class MissingAttributeError(AttributeError):
def __init__(self, obj, attr_name):
args = (
obj.__class__.__name__,
attr_name,
)
msg = "'{0}' object has no attribute '{1}'"
super(MissingAttributeError, self).__init__(msg.format(*args))
self.obj = obj
self.attr_name = attr_name
class ReadOnlyAttributeError(AttributeError):
def __init__(self, obj, attr_name):
args = (
obj.__class__.__name__,
attr_name,
)
msg = "'{0}.{1}' attribute cannot be assigned"
super(ReadOnlyAttributeError, self).__init__(msg.format(*args))
self.obj = obj
self.attr_name = attr_name
class ReadOnlyObjectError(AttributeError):
def __init__(self, obj, attr_name):
args = (
obj.__class__.__name__,
attr_name,
)
msg = "'{0}' does not accept attribute assignment"
super(ReadOnlyObjectError, self).__init__(msg.format(*args))
self.obj = obj
class UnexpectedTypeError( | TypeError):
def __init__(self, expected_type, received_object):
if not isinstance(expected_type, (tuple, list)):
expected_type = (expected_type, )
args = (
' or '.join(cls.__name__ for cls in expected_type),
received_object.__class__.__name__,
)
msg = '{0} was expected, got | {1}'
super(UnexpectedTypeError, self).__init__(msg.format(*args))
self.expected_type = expected_type
self.received_object = received_object
|
davidzchen/tensorflow | tensorflow/python/keras/layers/dense_attention_test.py | Python | apache-2.0 | 34,547 | 0.002403 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests dense attention layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python.eager import context
from tensorflow.python.keras import combinations
from tensorflow.python.keras.layers import core
from tensorflow.python.keras.layers import dense_attention
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
class BaseDenseAttentionTest(test.TestCase, parameterized.TestCase):
def test_one_dim_with_mask(self):
# Scores tensor of shape [1, 1, 1]
scores = np.array([[[1.1]]], dtype=np.float32)
# Value tensor of shape [1, 1, 1]
v = np.array([[[1.6]]], dtype=np.float32)
# Scores mask tensor of shape [1, 1, 1]
scores_mask = np.array([[[True]]], dtype=np.bool_)
actual, actual_scores = dense_attention.BaseDenseAttention()._apply_scores(
scores=scores, value=v, scores_mask=scores_mask)
# Expected softmax_scores = [[[1]]]
expected_scores = np.array([[[1.]]], dtype=np.float32)
self.assertAllClose(expected_scores, actual_scores)
# Expected tensor of shape [1, 1, 1].
# expected000 = softmax_scores[0, 0] * 1.6 = 1.6
expected = np.array([[[1.6]]], dtype=np.float32)
self.assertAllClose(expected, actual)
def test_one_dim_no_mask(self):
# Scores tensor of shape [1, 1, 1]
scores = np.array([[[1.1]]], dtype=np.float32)
# Value tensor of shape [1, 1, 1]
v = np.array([[[1.6]]], dtype=np.float32)
actual, actual_scores = dense_attention.BaseDenseAttention()._apply_scores(
scores=scores, value=v)
# Expected softmax_scores = [[[1]]]
expected_scores = np.array([[[1.]]], dtype=np.float32)
self.assertAllClose(expected_scores, actual_scores)
# Expected tensor of shape [1, 1, 1].
# expected000 = softmax_scores[0, 0] * 1.6 = 1.6
expected = np.array([[[1.6]]], dtype=np.float32)
self.assertAllClose(expected, actual)
def test_multi_dim_with_mask(self):
# Scores tensor of shape [1, 1, 3]
scores = np.array([[[1., 0., 1.]]], dtype=np.float32)
# Value tensor of shape [1, 3, 1]
v = np.array([[[1.6], [0.7], [-0.8]]], dtype=np.float32)
# Scores mask tensor of shape [1, 1, 3]
scores_mask = np.array([[[True, True, False]]], dtype=np.bool_)
actual, actual_scores = dense_attention.BaseDenseAttention()._apply_scores(
scores=scores, value=v, scores_mask=scores_mask)
# Expected softmax scores = softmax(scores) with zeros in positions where
# v_mask == False.
# => softmax_scores000 = exp(1)/(exp(1) + exp(0)) = 0.73105857863
# softmax_scores001 = exp(0)/(exp(1) + exp(0)) = 0.268941 | 42137
# softmax_scores002 = 0
| expected_scores = np.array(
[[[0.73105857863, 0.26894142137, 0.]]], dtype=np.float32)
self.assertAllClose(expected_scores, actual_scores)
# Expected tensor of shape [1, 1, 1].
# expected000 = 0.73105857863 * 1.6 + 0.26894142137 * 0.7 - 0 * 0.8
# = 1.35795272077
expected = np.array([[[1.35795272077]]], dtype=np.float32)
self.assertAllClose(expected, actual)
def test_multi_dim_no_mask(self):
# Scores tensor of shape [1, 1, 3]
scores = np.array([[[1., 0., 1.]]], dtype=np.float32)
# Value tensor of shape [1, 3, 1]
v = np.array([[[1.6], [0.7], [-0.8]]], dtype=np.float32)
actual, actual_scores = dense_attention.BaseDenseAttention()._apply_scores(
scores=scores, value=v)
# Expected softmax_scores = softmax(scores).
# => softmax_scores000 = exp(1)/(exp(1) + exp(0) + exp(1))
# = 0.42231879825
# softmax_scores001 = exp(0)/(exp(1) + exp(0) + exp(1))
# = 0.15536240349
# softmax_scores002 = exp(1)/(exp(1) + exp(0) + exp(1))
# = 0.42231879825
expected_scores = np.array(
[[[0.42231879825, 0.15536240349, 0.42231879825]]], dtype=np.float32)
self.assertAllClose(expected_scores, actual_scores)
# Expected tensor of shape [1, 1, 1].
# expected000 = 0.42231879825 * 1.6 + 0.15536240349 * 0.7
# - 0.42231879825 * 0.8
# = 0.44660872104
expected = np.array([[[0.44660872104]]], dtype=np.float32)
self.assertAllClose(expected, actual)
def test_one_dim_batch_size_two(self):
# Scores tensor of shape [2, 1, 1]
scores = np.array([[[1.1]], [[2.1]]], dtype=np.float32)
# Value tensor of shape [2, 1, 1]
v = np.array([[[1.6]], [[2.6]]], dtype=np.float32)
# Scpres mask tensor of shape [2, 1, 1]
scores_mask = np.array([[[True]], [[True]]], dtype=np.bool_)
actual, actual_scores = dense_attention.BaseDenseAttention()._apply_scores(
scores=scores, value=v, scores_mask=scores_mask)
# Expected softmax_scores = [[[1]], [[1]]]
expected_scores = np.array([[[1.]], [[1.]]], dtype=np.float32)
self.assertAllClose(expected_scores, actual_scores)
# Expected tensor of shape [2, 1, 1].
# expected000 = softmax_scores[0, 0] * 1.6 = 1.6
# expected100 = softmax_scores[1, 0] * 2.6 = 2.6
expected = np.array([[[1.6]], [[2.6]]], dtype=np.float32)
self.assertAllClose(expected, actual)
def test_shape_with_dropout(self):
# scores: Scores float tensor of shape `[batch_size, tq, tv]`.
# value: Value tensor of shape `[batch_size, tv, dim]`.
batch_size = 4
tq = 5
tv = 6
dim = 7
scores = np.ones((batch_size, tq, tv))
value = np.ones((batch_size, tv, dim))
actual, actual_scores = dense_attention.BaseDenseAttention(
dropout=0.1)._apply_scores(
scores=scores, value=value, training=False)
# Expected Tensor of shape `[batch_size, tq, tv]`.
expected_scores_shape = [batch_size, tq, tv]
self.assertAllEqual(expected_scores_shape, array_ops.shape(actual_scores))
# Expected Tensor of shape `[batch_size, tq, dim]`.
expected_shape = [batch_size, tq, dim]
self.assertAllEqual(expected_shape, array_ops.shape(actual))
def test_serialization(self):
# Test serialization with causal
layer = dense_attention.BaseDenseAttention(causal=True)
config = keras.layers.serialize(layer)
new_layer = keras.layers.deserialize(config)
self.assertEqual(new_layer.causal, True)
config = layer.get_config()
new_layer = dense_attention.BaseDenseAttention.from_config(config)
self.assertEqual(new_layer.causal, True)
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
class AttentionTest(test.TestCase, parameterized.TestCase):
def test_calculate_scores_one_dim(self):
# Query tensor of shape [1, 1, 1]
q = np.array([[[1.1]]], dtype=np.float32)
# Key tensor of shape [1, 1, 1]
k = np.array([[[1.6]]], dtype=np.float32)
attention_layer = dense_attention.Attention()
attention_layer.build(input_shape=([1, 1, 1], [1, 1, 1]))
actual = attention_layer._calculate_scores(query=q, key=k)
# Expected tensor of shape [1, 1, 1].
# expected000 = 1.1*1.6 = 1.76
expected = np.array([[[1.76]]], dtype=np.float32)
self.assertAllClose(expected, actual)
def test_calculate_scores_multi_dim(self):
# Query tensor of shape [1, 2, 4]
q = np.array(
[[[1., 1.1, 1.2, 1.3], [2., 2.1, 2.2, 2.3]]], dtype=np.float32)
# Key tensor of shape [1, 3, 4]
|
SNoiraud/gramps | gramps/gen/lib/test/date_test.py | Python | gpl-2.0 | 22,594 | 0.015535 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2007 Donald N. Allingham
# Copyright (C) 2013-2014 Vassilii Khachaturov
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
""" Unittest for testing dates """
#-------------------------------------------------------------------------
#
# Standard python modules
#
#-------------------------------------------------------------------------
import unittest
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ...config import config
from ...datehandler import get_date_formats, set_format
from ...datehandler import parser as _dp
from ...datehandler import displayer as _dd
from ...datehandler._datedisplay import DateDisplayEn
from ...lib.date import Date, DateError, Today, calendar_has_fixed_newyear
date_tests = {}
# first the "basics".
testset = "basic test"
dates = []
calendar = Date.CAL_GREGORIAN
for quality in (Date.QUAL_NONE, Date.QUAL_ESTIMATED, Date.QUAL_CALCULATED):
for modifier in (Date.MOD_NONE, Date.MOD_BEFORE, Date.MOD_AFTER, Date.MOD_ABOUT):
for month in range(1,13):
d = Date()
d.set(quality,modifier,calendar,(4,month,1789,False),"Text comment")
dates.append( d)
for modifier in (Date.MOD_RANGE, Date.MOD_SPAN):
for month1 in range(1,13):
for month2 in range(1,13):
d = Date()
d.set(quality,modifier,calendar,(4,month1,1789,False,5,month2,1876,False),"Text comment")
dates.append( d)
modifier = Date.MOD_TEXTONLY
d = Date()
d.set(quality,modifier,calendar,Date.EMPTY,"This is a textual date")
dates.append( d)
date_tests[testset] = dates
# incomplete dates (day or month missing)
testset = "partial date"
dates = []
calendar = Date.CAL_GREGORIAN
for quality in (Date.QUAL_NONE, Date.QUAL_ESTIMATED, Date.QUAL_CALCULATED):
for modifier in (Date.MOD_NONE, Date.MOD_BEFORE, Date.MOD_AFTER, Date.MOD_ABOUT):
d = Date()
d.set(quality,modifier,calendar,(0,11,1789,False),"Text comment")
dates.append( d)
d = Date()
d.set(quality,modifier,calendar,(0,0,1789,False),"Text comment")
dates.append( d)
for modifier in (Date.MOD_RANGE, Date.MOD_SPAN):
d = Date()
d.set(quality,modifier,calendar,(4,10,1789,False,0,11,1876,False),"Text comment")
dates.append( d)
d = Date()
d.set(quality,modifier,calendar,(4,10,1789,False,0,0,1876,False),"Text comment")
dates.append( d)
d = Date()
d.set(quality,modifier,calendar,(0,10,1789,False,5,11,1876,False),"Text comment")
dates.append( d)
d = Date()
d.set(quality,modifier,calendar,(0,10,1789,False,0,11,1876,False),"Text comment")
dates.append( d)
d = Date()
d.set(quality,modifier,calendar,(0,10,1789,False,0,0,1876,False),"Text comment")
dates.append( d)
d = Date()
d.set(quality,modifier,calendar,(0,0,1789,False,5,11,1876,False),"Text comment")
dates.append( d)
d = Date()
d.set(quality,modifier,calendar,(0,0,1789,False,0,11,1876,False),"Text comment")
dates.append( d)
d = Date()
d.set(quality,modifier,calendar,(0,0,1789,False,0,0,1876,False),"Text comment")
dates.append( d)
date_tests[testset] = dates
# slash-dates
testset = "slash-dates"
dates = []
calendar = Date.CAL_GREGORIAN
for quality in (Date.QUAL_NONE, Date.QUAL_ESTIMATED, Date.QUAL_CALCULATED):
for modifier in (Date.MOD_NONE, Date.MOD_BEFORE, Date.MOD_AFTER, Date.MOD_ABOUT):
# normal date
d = Date()
d.set(quality,modifier,calendar,(4,11,1789,True),"Text comment")
dates.append( d)
for modifier in (Date.MOD_RANGE, Date.MOD_SPAN):
d = Date()
d.set(quality,modifier,calendar,(4,11,1789,True,5,10,1876,False),"Text comment")
dates.append( d)
d = Date()
d.set(quality,modifier,calendar,(4,11,1789,False,5,10,1876,True),"Text comment")
dates.append( d)
d = Date()
d.set(quality,modifier,calendar,(4,11,1789,True,5,10,1876,True),"Text comment")
dates.append( d)
date_tests[testset] = dates
# BCE
testset = "B. C. E."
dates = []
calendar = Date.CAL_GREGORIAN
for quality in (Date.QUAL_NONE, Date.QUAL_ESTIMATED, Date.QUAL_CALCULATED):
for modifier in (Date.MOD_NONE, Date.MOD_BEFORE, Date.MOD_AFTER, Date.MOD_ABOUT):
# normal date
d = Date()
d.set(quality,modifier,calendar,(4,11,-90,False),"Text comment")
dates.append( d)
for modifier in (Date.MOD_RANGE, Date.MOD_SPAN):
d = Date()
d.set(quality,modifier,calendar,(5,10,-90,False,4,11,-90,False),"Text comment")
dates.append( d)
d = Date()
date_tests[testset] = dates
# test for all other different calendars
testset = "Non-gregorian"
dates = []
for calendar in (Date.CAL_JULIAN,
Date.CAL_HEBREW,
Date.CAL_ISLAMIC,
Date.CAL_FRENCH,
Date.CAL_PERSIAN,
):
for quality in (Date.QUAL_NONE, Date.QUAL_ESTIMATED, Date.QUAL_CALCULATED):
for modifier in (Date.MOD_NONE, Date.MOD_BEFORE, Date.MOD_AFTER, Date.MOD_ABOUT):
d = Date()
d.set(quality,modifier,calendar,(4,11,1789,False),"Text comment")
dates.append( d)
for modifier in (Date.MOD_RANGE, Date.MOD_SPAN):
d = Date()
d.set(quality,modifier,calendar,(4,10,1789,False,5,11,1876,False),"Text comment")
dates.append( d)
# CAL_SWEDISH - Swedish calendar 1700-03-01 -> 1712-02-30!
class Context:
def __init__(self, retval):
self.retval = retv | al
def __enter__(self):
return self.retval
def __exit__(self, *args, **kwargs):
pass
with Context(Date.CAL_SWEDISH) as calendar:
for quality in (Date.QUAL_NONE, Date.QUAL_ESTIMATED, Date.QUAL_CALCULATED):
for modifier in (Date.MOD_NONE, Date.MOD_BEFORE, Date.MOD_AFTER, Date.MOD_ABOUT):
d = | Date()
d.set(quality,modifier,calendar,(4,11,1700,False),"Text comment")
dates.append( d)
for modifier in (Date.MOD_RANGE, Date.MOD_SPAN):
d = Date()
d.set(quality,modifier,calendar,(4,10,1701,False,
5,11,1702,False),"Text comment")
dates.append( d)
quality = Date.QUAL_NONE
modifier = Date.MOD_NONE
for calendar in (Date.CAL_JULIAN,
Date.CAL_ISLAMIC,
Date.CAL_PERSIAN,
):
for month in range(1,13):
d = Date()
d.set(quality,modifier,calendar,(4,month,1789,False),"Text comment")
dates.append( d)
for calendar in (Date.CAL_HEBREW, Date.CAL_FRENCH):
for month in range(1,14):
d = Date()
d.set(quality,modifier,calendar,(4,month,1789,False),"Text comment")
dates.append( d)
date_tests[testset] = dates
swedish_dates = []
# CAL_SWEDISH - Swedish calendar 1700-03-01 -> 1712-02-30!
with Context(Date.CAL_SWEDISH) as calendar:
for year in range(1701, 1712):
for month in range(1,13):
d = Date()
d.set(quality,modifier,calendar,(4,month,year,False),"Text comment")
swedish_dates.append( d)
#-------------------------------------------------------------------------
#
# BaseDateTest
#
#------------ |
ddico/odoo | addons/website_sale_stock/models/stock_picking.py | Python | agpl-3.0 | 467 | 0.004283 | # -*- coding: utf-8 -*-
# Pa | rt of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, api, fields
from odoo.tools.translate import _
class StockPicking(models.Model):
_inherit = 'stock.picking'
website_id = fields.Many2one('website', related='sale_id.website_id', string='Website',
help='Website this picking belongs to.',
| store=True, readonly=True)
|
nuagenetworks/nuage-openstack-horizon | nuage_horizon/dashboards/project/networks/workflows.py | Python | apache-2.0 | 19,029 | 0 | # Copyright 2020 Nokia.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import base
from horizon import exceptions
from horizon import forms
from horizon import workflows
from openstack_dashboard.dashboards.project.networks import \
workflows as original
from nuage_horizon.api import neutron
LOG = logging.getLogger(__name__)
class UnsafeChoiceField(forms.ChoiceField):
"""
This is an extension of the default choicefield with the exception that it
will not validate that the value in the POST request matches the value
during rendering of the Choicefield (In case Javascript alters the values
client-side)
"""
def validate(self, value):
pass
class CreateSubnetTypeAction(workflows.Action):
with_subnet = forms.BooleanField(label=_("Create Subnet"),
widget=forms.CheckboxInput(attrs={
'class': 'switchable',
'data-slug': 'with_subnet',
'data-hide-tabs': 'create_network__'
'createsubnetdetail'
'action '
'create_network__'
'createsubnetinfo'
'action',
'data-hide-on-checked': 'false'
}),
initial=True,
required=False)
subnet_type = forms.ChoiceField(label=_("Subnet type choice"),
widget=forms.Select(attrs={
'class': 'switched',
'data-slug': 'nuage_id',
'data-switch-on': 'with_subnet',
}),
help_text=_(
"Optional Subnet ID from Nuage. "
"This links the subnet to an "
"existing Nuage one, making it "
"VSD managed"),
required=False)
org_id = UnsafeChoiceField(label=_("Organisation choice"),
required=False)
dom_id = UnsafeChoiceField(label=_("Domain choice"),
required=False)
zone_id = UnsafeChoiceField(label=_("Zone choice"),
required=False)
sub_id = UnsafeChoiceField(label=_("Subnet choice"),
required=False)
ip_version_ = UnsafeChoiceField(label=_("Cidr choice"),
required=False)
hidden_org = forms.CharField(widget=forms.HiddenInput,
required=False)
hidden_dom = forms.CharField(widget=forms.HiddenInput,
required=False)
hidden_zone = forms.CharField(widget=forms.HiddenInput,
required=False)
hidden_sub = forms.CharField(widget=forms.HiddenInput,
required=False)
hidden_ip_version_ = forms.CharField(widget=forms.HiddenInput,
required=False)
hidden_gateway_ = forms.CharField(widget=forms.HiddenInput,
required=False)
class Meta:
name = _("Subnet Type")
help_text = _('Choose the type of subnet you are about to create.')
def __init__(self, request, context, *args, **kwargs):
super(CreateSubnetTypeAction, self).__init__(request, context, *args,
**kwargs)
if request.user.is_superuser:
self.fields['org_id'].choices = [('', _("Choose an Organization"))]
self.fields['dom_id'].choices = [('', _("Choose a Domain"))]
self.fields['zone_ | id'].choices = [('', _("Choose a Zone"))]
| self.fields['sub_id'].choices = [('', _("Choose a Subnet"))]
self.fields['ip_version_'].choices = [('', _("Choose a cidr"))]
type_choices = [('os', _("OpenStack Managed Subnet")),
('vsd_manual', _("VSD Managed Subnet (Manual)")),
('vsd_auto', _("VSD Managed Subnet (Auto)"))]
self.fields['subnet_type'].choices = type_choices
def _org_to_choices(self, organisations):
choices = []
for org in organisations:
display_name = '(' + org['id'][:6] + ') ' + org['name']
choices.append((org['id'], display_name))
return choices
def is_valid(self):
valid = super(CreateSubnetTypeAction, self).is_valid()
if not self.request.user.is_superuser:
return valid
if self.data['subnet_type'] == 'vsd_auto':
if not self.data['hidden_sub']:
self._errors['__all__'] = self.error_class(
['A subnet must be selected below.'])
valid = False
if ((self.data.get('with_subnet') or self.initial.get('network_id'))
and not self.data['subnet_type']):
self._errors['subnet_type'] = self.error_class(
['This is a required field.'])
valid = False
return valid
class CreateSubnetType(workflows.Step):
action_class = CreateSubnetTypeAction
contributes = ("with_subnet", "subnet_type", "org_id", "zone_id", "sub_id",
"hidden_org", "hidden_dom", "hidden_zone", "hidden_sub",
"hidden_ip_version_", "hidden_gateway_")
class CreateSubnetInfoAction(original.CreateSubnetInfoAction):
nuage_id = forms.CharField(max_length=255,
label=_("Nuage UUID"),
required=True,
initial='.')
net_partition = forms.CharField(max_length=255,
label=_("Nuage Net Partition"),
required=True,
initial='.')
def __init__(self, request, context, *args, **kwargs):
super(CreateSubnetInfoAction, self).__init__(request, context, *args,
**kwargs)
if 'with_subnet' in self.fields:
del self.fields['with_subnet']
def clean(self):
cleaned_data = super(workflows.Action, self) \
.clean()
if 'cidr' in cleaned_data.keys() \
and cleaned_data['cidr']:
self._check_subnet_data(cleaned_data)
return cleaned_data
def get_hidden_fields(self, context):
hidden = True
shown = False
if context['subnet_type'] == 'os':
return {'id_nuage_id': hidden,
'id_net_partition': hidden,
'subnet_name': shown,
'id_cidr': shown,
'id_ip_version': shown,
'id_gateway_ip': shown,
'id_no_gateway': shown}
elif context['subnet_type'] == 'vsd_manual':
return {'id_nuage_id': shown,
'id_net_partition': shown,
'subnet_name': shown,
'id_cidr': shown,
|
Thraxis/pymedusa | sickbeard/indexers/indexer_config.py | Python | gpl-3.0 | 1,348 | 0.001484 | # coding=utf-8
from tvdb_api.tvdb_api import Tvdb
from sickbeard import helpers
initConfig = {
'valid_languages': [
"da", "fi", "nl", "de", "it", "es", "fr", "pl", "hu", "el", "tr",
"ru", "he", "ja", "pt", "zh", "cs", "sl", "hr", "ko", "en", "sv", "no"
],
'langabbv_to_id': {
'el': 20, 'en': 7, 'zh': 27,
'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9,
'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11,
'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30
| }
}
INDEXER_TVDB = 1
I | NDEXER_TVRAGE = 2 # Must keep
indexerConfig = {
INDEXER_TVDB: {
'id': INDEXER_TVDB,
'name': 'theTVDB',
'module': Tvdb,
'api_params': {
'apikey': 'F9C450E78D99172E',
'language': 'en',
'useZip': True,
},
'session': helpers.make_session(),
'trakt_id': 'tvdb_id',
'xem_origin': 'tvdb',
'icon': 'thetvdb16.png',
'scene_loc': 'https://cdn.pymedusa.com/scene_exceptions/scene_exceptions.json',
'show_url': 'http://thetvdb.com/?tab=series&id=',
'base_url': 'http://thetvdb.com/api/%(apikey)s/series/'
}
}
indexerConfig[INDEXER_TVDB]['base_url'] %= indexerConfig[INDEXER_TVDB]['api_params'] # insert API key into base url
|
monikagrabowska/osf.io | addons/wiki/views.py | Python | apache-2.0 | 20,751 | 0.001108 | # -*- coding: utf-8 -*-
import httplib as http
import logging
from bs4 import BeautifulSoup
from flask import request
from framework.mongo.utils import to_mongo_key
from framework.exceptions import HTTPError
from framework.auth.utils import privacy_info_handle
from framework.auth.decorators import must_be_logged_in
from framework.flask import redirect
from addons.wiki import settings
from addons.wiki import utils as wiki_utils
from website.profile.utils import get_gravatar
from website.project.views.node import _view_project
from website.project.model import has_anonymous_link
from website.project.decorators import (
must_be_contributor_or_public,
must_have_addon, must_not_be_registration,
must_be_valid_project,
must_have_permission,
must_have_write_permission_or_public_wiki,
)
from website.exceptions import NodeStateError
from osf.exceptions import ValidationError
from .exceptions import (
NameEmptyError,
NameInvalidError,
NameMaximumLengthError,
PageCannotRenameError,
PageConflictError,
PageNotFoundError,
InvalidVersionError,
)
from .models import NodeWikiPage
logger = logging.getLogger(__name__)
WIKI_NAME_EMPTY_ERROR = HTTPError(http.BAD_REQUEST, data=dict(
message_short='Invalid request',
message_long='The wiki page name cannot be empty.'
))
WIKI_NAME_MAXIMUM_LENGTH_ERROR = HTTPError(http.BAD_REQUEST, data=dict(
message_short='Invalid request',
message_long='The wiki page name cannot be more than 100 characters.'
))
WIKI_PAGE_CANNOT_RENAME_ERROR = HTTPError(http.BAD_REQUEST, data=dict(
message_short='Invalid request',
message_long='The wiki page cannot be renamed.'
))
WIKI_PAGE_CONFLICT_ERROR = HTTPError(http.CONFLICT, data=dict(
message_short='Page conflict',
message_long='A wiki page with that name already exists.'
))
WIKI_PAGE_NOT_FOUND_ERROR = HTTPError(http.NOT_FOUND, data=dict(
message_short='Not found',
message_long='A wiki page could not be found.'
))
WIKI_INVALID_VERSION_ERROR = HTTPError(http.BAD_REQUEST, data=dict(
message_short='Invalid request',
message_long='The requested version of this wiki page does not exist.'
))
def _get_wiki_versions(node, name, anonymous=False):
key = to_mongo_key(name)
# Skip if wiki_page doesn't exist; happens on new projects before
# default "home" page is created
if key not in node.wiki_pages_versions:
return []
versions = [
NodeWikiPage.load(version_wiki_id)
for version_wiki_id in node.wiki_pages_versions[key]
]
return [
{
'version': version.version,
'user_fullname': privacy_info_handle(version.user.fullname, anonymous, name=True),
'date': '{} UTC'.format(version.date.replace(microsecond=0).isoformat().replace('T', ' ')),
}
for version in reversed(versions)
]
def _get_wiki_pages_current(node):
return [
{
'name': sorted_page.page_name,
'url': node.web_url_for('project_wiki_view', wname=sorted_page.page_name, _guid=True),
'wiki_id': sorted_page._primary_key,
'wiki_content': wiki_page_content(sorted_page.page_name, node=node)
}
for sorted_page in [
node.get_wiki_page(sorted_key)
for sorted_key in sorted(node.wiki_pages_current)
]
# TODO: remove after forward slash migration
if sorted_page is not None
]
def _get_wiki_api_urls(node, name, additional_urls=None):
urls = {
'base': node.api_url_for('project_wiki_home'),
'delete': node.api_url_for('project_wiki_delete', wname=name),
'rename': node.api_url_for('project_wiki_rename', wname=name),
'content': node.api_url_for('wiki_page_content', wname=name),
's | ettings': node.api_url_for('edit_wiki_settings'),
'grid': node.api_url_for('project_wiki_g | rid_data', wname=name)
}
if additional_urls:
urls.update(additional_urls)
return urls
def _get_wiki_web_urls(node, key, version=1, additional_urls=None):
urls = {
'base': node.web_url_for('project_wiki_home', _guid=True),
'edit': node.web_url_for('project_wiki_view', wname=key, _guid=True),
'home': node.web_url_for('project_wiki_home', _guid=True),
'page': node.web_url_for('project_wiki_view', wname=key, _guid=True),
}
if additional_urls:
urls.update(additional_urls)
return urls
@must_be_contributor_or_public
@must_have_addon('wiki', 'node')
def wiki_widget(**kwargs):
node = kwargs['node'] or kwargs['project']
wiki = node.get_addon('wiki')
wiki_page = node.get_wiki_page('home')
# Show "Read more" link if there are multiple pages or has > 400 characters
more = len(node.wiki_pages_current.keys()) >= 2
MAX_DISPLAY_LENGTH = 400
rendered_before_update = False
if wiki_page and wiki_page.html(node):
wiki_html = wiki_page.html(node)
if len(wiki_html) > MAX_DISPLAY_LENGTH:
wiki_html = BeautifulSoup(wiki_html[:MAX_DISPLAY_LENGTH] + '...', 'html.parser')
more = True
else:
wiki_html = BeautifulSoup(wiki_html)
rendered_before_update = wiki_page.rendered_before_update
else:
wiki_html = None
ret = {
'complete': True,
'wiki_content': unicode(wiki_html) if wiki_html else None,
'wiki_content_url': node.api_url_for('wiki_page_content', wname='home'),
'rendered_before_update': rendered_before_update,
'more': more,
'include': False,
}
ret.update(wiki.config.to_json())
return ret
@must_be_valid_project
@must_have_write_permission_or_public_wiki
@must_have_addon('wiki', 'node')
def wiki_page_draft(wname, **kwargs):
node = kwargs['node'] or kwargs['project']
wiki_page = node.get_wiki_page(wname)
return {
'wiki_content': wiki_page.content if wiki_page else None,
'wiki_draft': (wiki_page.get_draft(node) if wiki_page
else wiki_utils.get_sharejs_content(node, wname)),
}
@must_be_valid_project
@must_be_contributor_or_public
@must_have_addon('wiki', 'node')
def wiki_page_content(wname, wver=None, **kwargs):
node = kwargs['node'] or kwargs['project']
wiki_page = node.get_wiki_page(wname, version=wver)
rendered_before_update = wiki_page.rendered_before_update if wiki_page else False
return {
'wiki_content': wiki_page.content if wiki_page else '',
'rendered_before_update': rendered_before_update
}
@must_be_valid_project # injects project
@must_have_permission('write') # injects user, project
@must_not_be_registration
@must_have_addon('wiki', 'node')
def project_wiki_delete(auth, wname, **kwargs):
node = kwargs['node'] or kwargs['project']
wiki_name = wname.strip()
wiki_page = node.get_wiki_page(wiki_name)
sharejs_uuid = wiki_utils.get_sharejs_uuid(node, wiki_name)
if not wiki_page:
raise HTTPError(http.NOT_FOUND)
node.delete_node_wiki(wiki_name, auth)
wiki_utils.broadcast_to_sharejs('delete', sharejs_uuid, node)
return {}
@must_be_valid_project # returns project
@must_be_contributor_or_public
@must_have_addon('wiki', 'node')
def project_wiki_view(auth, wname, path=None, **kwargs):
node = kwargs['node'] or kwargs['project']
anonymous = has_anonymous_link(node, auth)
wiki_name = (wname or '').strip()
wiki_key = to_mongo_key(wiki_name)
wiki_page = node.get_wiki_page(wiki_name)
wiki_settings = node.get_addon('wiki')
can_edit = (
auth.logged_in
and not node.is_registration
and (
node.has_permission(auth.user, 'write')
or wiki_settings.is_publicly_editable
)
)
versions = _get_wiki_versions(node, wiki_name, anonymous=anonymous)
# Determine panels used in view
panels = {'view', 'edit', 'compare', 'menu'}
if request.args and set(request.args).intersection(panels):
panels_used = [panel for panel in request.args if panel in panels]
num_columns = len(set(panels_used).intersection({'view', 'edit', 'compare'}))
if num_c |
yil8/NNtoolbox | bin/plot.py | Python | gpl-2.0 | 551 | 0.012704 | #!/usr/bin/env python
import sys
import numpy as np
from matplotlib import pyplot as plt
def main():
infile = open(sys.argv[1])
data = np.array(map(lambda x:map(float, x.strip('\n').split('\t')), infile.readlines()))
X | = data[:, 0:-1]
(N, D) = X.shape
| Y = data[:, -1].reshape((N, 1))
plt.plot(X[np.where(Y == 0)[0]][:, 0], X[np.where(Y == 0)[0]][:, 1], 'b.')
plt.plot(X[np.where(Y == 1)[0]][:, 0], X[np.where(Y == 1)[0]][:, 1], 'r.')
plt.show()
infile.close()
if __name__ == '__main__':
main() |
ikben/troposphere | troposphere/elasticloadbalancingv2.py | Python | bsd-2-clause | 11,035 | 0 | # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, If, Tags
from .validators import (
elb_name, exactly_one, network_port,
tg_healthcheck_port, integer,
one_of, boolean
)
class LoadBalancerAttributes(AWSProperty):
props = {
'Key': (basestring, False),
'Value': (basestring, False)
}
class Certificate(AWSProperty):
props = {
'CertificateArn': (basestring, False)
}
class AuthenticateCognitoConfig(AWSProperty):
props = {
"AuthenticationRequestExtraParams": (dict, False),
"OnUnauthenticatedRequest": (basestring, False),
"Scope": (basestring, False),
"SessionCookieName": (basestring, False),
"SessionTimeout": (integer, False),
"UserPoolArn": (basestring, True),
"UserPoolClientId": (basestring, True),
"UserPoolDomain": (basestring, True)
}
class AuthenticateOidcConfig(AWSProperty):
props = {
"AuthenticationRequestExtraParams": (dict, False),
"AuthorizationEndpoint": (basestring, True),
"ClientId": (basestring, True),
"ClientSecret": (basestring, True),
"Issuer": (basestring, True),
"OnUnauthenticatedRequest": (basestring, False),
"Scope": (basestring, False),
"SessionCookieName": (basestring, False),
"SessionTimeout": (integer, False),
"TokenEndpoint": (basestring, True),
"UserInfoEndpoint": (basestring, True)
}
class RedirectConfig(AWSProperty):
# https://docs.aws.amazon.com/
# AWSCloudFormation/latest/UserGuide/
# aws-properties-elasticloadbalancingv2-listener-redirectconfig.html
props = {
'Host': (basestring, False),
'Path': (basestring, False),
'Port': (basestring, False),
'Protocol': (basestring, False),
'Query': (basestring, False),
'StatusCode': (basestring, True),
}
def validate(self):
one_of(self.__class__.__name__,
self.properties,
'StatusCode',
['HTTP_301', 'HTTP_302'])
class FixedResponseConfig(AWSProperty):
props = {
'ContentType': (basestring, False),
'MessageBody': (basestring, False),
'StatusCode': (basestring, True),
}
def validate(self):
one_of(self.__class__.__name__,
self.properties,
'ContentType',
[None, 'text/plain', 'text/css', 'text/html',
'application/javascript', 'application/json'])
class Action(AWSProperty):
props = {
"AuthenticateCognitoConfig": (AuthenticateCognitoConfig, False),
"AuthenticateOidcConfig": (AuthenticateOidcConfig, False),
"FixedResponseConfig": (FixedResponseConfig, False),
"Order": (integer, False),
"RedirectConfig": (RedirectConfig, False),
"TargetGroupArn": (basestring, False),
"Type": (basestring, True)
}
def validate(self):
one_of(self.__class__.__name__,
self.properties,
'Type',
['forward', 'redirect', 'fixed-response',
'authenticate-cognito', 'authenticate-oidc'])
def requires(action_type, prop):
if self.properties.get('Type') == action_type and \
prop not in self.properties:
raise ValueError(
'Type "%s" requires definition of "%s"' % (
action_type, prop
)
)
if prop in self.properties and \
self.properties.get('Type') != action_type:
raise ValueError(
'Definition of "%s" allowed only with '
'type "%s", was: "%s"' % (
prop, action_type, self.properties.get('Type')
)
)
requires('forward', 'TargetGroupArn')
requires('redirect', 'RedirectConfig')
requires('fixed-response', 'FixedResponseConfig')
class HostHeaderConfig(AWSProperty):
props = {
'Values': ([basestring], False),
}
class HttpHeaderConfig(AWSProperty):
props = {
'HttpHeaderName': (basestring, False),
'Values': ([basestring], False),
}
class HttpRequestMethodConfig(AWSProperty):
props = {
'Values': ([basestring], False),
}
class PathPatternConfig(AWSProperty):
props = {
'Values': ([basestring], False),
}
class QueryStringKeyValue(AWSProperty):
props = {
'Key': (basestring, False),
'Value': (basestring, False),
}
class QueryStringConfig(AWSProperty):
props = {
'Values': ([QueryStringKeyValue], False),
}
class SourceIpConfig(AWSProperty):
props = {
'Values': ([basestring], False),
}
class Condition(AWSProperty):
props = {
'Field': (basestring, False),
'HostHeaderConfig': (HostHeaderConfig, False),
'HttpHeaderConfig': (HttpHeaderConfig, False),
'HttpRequestMethodConfig': (HttpRequestMethodConfig, False),
'PathPatternConfig': (PathPatternConfig, False),
'QueryStringConfig': (QueryStringConfig, False),
'SourceIpConfig': (SourceIpConfig, False),
'Values': ([basestring], False),
}
class Matcher(AWSProperty):
props = {
'HttpCode': (basestring, True)
}
class SubnetMapping(AWSProperty):
props = {
'AllocationId': (basestring, True),
'SubnetId': (basestring, True)
}
class TargetGroupAttribute(AWSProperty):
props = {
'Key': (basestring, False),
'Value': (basestring, False)
}
class TargetDescription(AWSProperty):
props = {
'AvailabilityZone': (basestring, False),
'Id': (basestring, True),
'Port': (network_port, False)
}
class Listener(AWSObject):
resource_type = "AWS::ElasticLoadBalancingV2::Listener"
props = {
'Certificates': ([Certificate], False),
'DefaultActions': ([Action], True),
'LoadBalancerArn': (basestring, True),
'Port': (network_port, True),
'Protocol': (basestring, True),
'SslPolicy': (basestring, False)
}
class ListenerCertificate(AWSObject):
resource_type = "AWS::ElasticLoadBalancingV2::ListenerCertificate"
props = {
'Certificates': ([Certificate], True),
'ListenerArn': (basestring, True),
}
class ListenerRule(AWSObject):
resource_type = "AWS::ElasticLoadBalancingV2::ListenerRule"
props = {
'Actions': ([Action], True),
'Conditions': ([Condition], True),
'ListenerArn': (basestring, True),
'Priority': (integer, True)
}
TARGET_TYPE_INSTANCE = 'instance'
TARGET_TYPE_IP = 'ip'
TARGET_TYPE_LAMBDA = 'lambda'
class TargetGroup(AWSObject):
resource_type = "AWS::ElasticLoadBalancingV2::TargetGroup"
props = {
'HealthCheckEnabled': (boolean, False),
'HealthCheckIntervalSeconds': (integer, False),
'HealthCheckPath': (basestring, False),
' | HealthCheckPort': (tg_healthcheck_port, False),
'HealthCheckProtocol': (basestring, False),
'HealthCheckTimeoutSeconds': (integer, False),
'HealthyThresholdCount': (integer, False),
'Matcher': (Matcher, False),
'Name': (basestring, False),
'Port': (network_port, False),
'Protocol': (basestring, False),
'Tags': ((Tags, list), False),
'TargetGroupAttributes': ([TargetGroupAttribute], False),
| 'Targets': ([TargetDescription], False),
'TargetType': (basestring, False),
'UnhealthyThresholdCount': (integer, False),
'VpcId': (basestring, False),
}
def validate(self):
one_of(self.__class__.__name__,
self.properties,
'TargetType',
[
None,
TARGET_TYPE_INSTANCE,
TARGET_TYPE_IP,
TARGET_TYPE_LAMBDA
])
def check_properties(action_types, props_to_check, required): |
CoutinhoElias/danibraz | danibraz/bolsa/views.py | Python | mit | 5,742 | 0.008188 | # Create your views here.
from danibraz.bolsa.models import PlanoDeContas
from django.core.files.storage import FileSystemStorage
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
import xlrd
# def simple_upload(request):
# if request.method == 'POST' and request.FILES['excelfile']:
# myfile = request.FILES['excelfile']
# fs = FileSystemStorage()
# filename = fs.save(myfile.name, myfile)
# uploaded_file_url = fs.url(filename)
# return render(request, '/bolsa/importar/', {
# 'uploaded_file_url': uploaded_file_url
# })
# return render(request, 'bolsa/import_form.html')
def simple_upload(request):
if request.method == 'POST' and request.FILES['excelfile']:
myfile = request.FILES['excelfile']
fs = FileSystemStorage()
filename = fs.save(myfile.name, myfile)
#uploaded_file_url = fs.url(filename)
dir = fs.path(myfile.name)
importaPlanilha(dir)
return HttpResponseRedirect('/bolsa/planodecontas/listar/')
return render(request, 'bolsa/import_form.html')
def remove(field):
a = str(field)
if a[len(a) - 2:] == '.0':
a = a[:len(a) - 2]
else:
a = a.replace("-", "").replace(".", "")
return a
def importaPlanilha(dir):
#Funcionacom *.xls e *.xlsx
workbook = xlrd.open_workbook(dir)
#workbook = xlrd.open_workbook("/home/eliaspai/Área de Trabalho/MODELO_PLANO_DE_CONTAS_PARA_IMPORTAR.xlsx")
# Posso usar sheet_by_name("Name") ou sheet_by_index(0, 1, 2, ..., N)
#worksheet = workbook.sheet_ | by_name("Plan1")
worksheet = workbook.sheet_by_index(0)
lista = []
#print(worksheet.nrows)
for r in range(1, worksheet.nrows):
lista.append(
PlanoDeContas(classification=remove(str(worksheet.cell(r, 0).value)),
| name=remove(str(worksheet.cell(r, 1).value)),
reduced_account=remove(str(worksheet.cell(r, 2).value)),
account_type=remove(str(worksheet.cell(r, 4).value)),
source=remove(str(worksheet.cell(r, 3).value)),
)
)
PlanoDeContas.objects.bulk_create(lista)
return HttpResponseRedirect('/bolsa/planodecontas/listar/')
def planodecontas_list(request):
q = request.GET.get('search_box')
print(request.GET)
if q:
print(q)
planodecontas = PlanoDeContas.objects.filter(name__icontains=q)
else:
planodecontas = PlanoDeContas.objects.all()
context = {'planodecontas': planodecontas}
print(context)
return render(request, 'bolsa/bolsa_list.html', context)
def pcontas_export(request):
filename = "my-file.txt"
content = 'any string generated by django'
response = HttpResponse(content, content_type='text/plain')
response['Content-Disposition'] = 'attachment; filename={0}'.format(filename)
return response
def planodecontas_export(request):
response = HttpResponse(content_type='text/plain')
response['Content-Disposition'] = 'attachment; filename="PlanoDeContas.txt"'
planodecontas = PlanoDeContas.objects.all()
writer = (response)
for planodecontas_obj in planodecontas:
writer.write(planodecontas_obj.classification + ' ' * (30 - len(planodecontas_obj.classification))
+ ' ' * (30)
+ planodecontas_obj.reduced_account + ' ' * (10 - len(planodecontas_obj.reduced_account))
+ planodecontas_obj.account_type[:1]
+ planodecontas_obj.name[:50] + ' ' * (50 - len(planodecontas_obj.name[:50]))
+ planodecontas_obj.source[:1] + ' ' * (1 - len(planodecontas_obj.source[:1]))
+ "INN"
+ ' ' * (15)
+ ' ' * (15)
+ '0' * (10)
+ "N"
+ ' ' * (10)
+ "NNN"
+ ' ' * (50)
+ ' ' * (15)
+ "NN"
+ '0' * (10)
+ ' ' * (30)
+ '0' * (10)
+ ' ' * (12)
+ ' ' * (10)
+ ' ' * (30)
+ ' ' * (30)
+ ' ' * (20)
+ "\n")
return response
def planodecontas_export1(request):
planodecontas = PlanoDeContas.objects.all()
file = open("/home/eliaspai/Área de Trabalho/PlanoDeContas.txt", "a")
for planodecontas_obj in planodecontas:
file.write(planodecontas_obj.classification + ' '*(30-len(planodecontas_obj.classification))
+ ' ' * (30)
+ planodecontas_obj.reduced_account + ' '*(10-len(planodecontas_obj.reduced_account))
+ planodecontas_obj.account_type[:1]
+ planodecontas_obj.name[:50] + ' ' * (50 - len(planodecontas_obj.name[:50]))
+ planodecontas_obj.source[:1] + ' ' * (1 - len(planodecontas_obj.source[:1]))
+ "INN"
+ ' '*(15)
+ ' ' * (15)
+ '0' * (10)
+ "N"
+ ' ' * (10)
+ "NNN"
+ ' ' * (50)
+ ' ' * (15)
+ "NN"
+ '0' * (10)
+ ' ' * (30)
+ '0' * (10)
+ ' ' * (12)
+ ' ' * (10)
+ ' ' * (30)
+ ' ' * (30)
+ ' ' * (20)
+"\n")
file.close()
return HttpResponseRedirect('/bolsa/planodecontas/listar/') |
dstufft/sessions | setup.py | Python | apache-2.0 | 2,121 | 0 | #!/usr/bin/env python
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Licens | e is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import os
import setupt | ools
base_dir = os.path.dirname(__file__)
about = {}
with open(os.path.join(base_dir, "sessions", "__about__.py")) as f:
exec(f.read(), about)
with open(os.path.join(base_dir, "README.rst")) as f:
long_description = f.read()
setuptools.setup(
name=about["__title__"],
version=about["__version__"],
description=about["__summary__"],
long_description=long_description,
license=about["__license__"],
url=about["__uri__"],
author=about["__author__"],
author_email=about["__email__"],
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Operating System :: POSIX :: BSD",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
packages=setuptools.find_packages(exclude=["tests", "tests.*"]),
)
|
mathieudesro/pathos | examples/nested.py | Python | bsd-3-clause | 629 | 0.027027 | def g(x):
import random
return int(x * random.random())
def h(x):
return sum(tmap(g, x))
def f(x,y):
return x*y
x = range(10)
y = range(5)
if __name__ == '__main__':
from pathos.helpers import freeze_support
freeze_support()
from pathos.pools import ProcessPool, ThreadPool
amap = ProcessPool().amap
tma | p = ThreadPool().map
print amap(f, [h(x),h(x),h(x),h(x),h(x)], y).get()
def _f(m, g, x, y):
return sum(m(g,x))*y
print amap(_f, [tmap]*len(y), [g]*len(y), [x]*len(y), y).get()
from math import sin | , cos
print amap(tmap, [sin,cos], [range(10),range(10)]).get()
|
florinn/veryfay-python | veryfay/tests/features/steps/steps_public_api.py | Python | mit | 9,490 | 0.006006 | from behave import when, then
from hamcrest import assert_that, calling, raises
from veryfay import *
from fixtures import *
@when('action target not found')
def step_impl(context):
pass
@then('it should fail when action target not found')
def step_impl(context):
result = context.ae(Create(SomeClass)).is_allowing(PrincipalClass('commiter'))
assert_that(calling(context.ae(Create(SomeClass)).verify).with_args(PrincipalClass('commiter')), raises(AuthorizationException))
assert_that(result.is_failure)
@when('action target found')
def step_impl(context):
pass
@then('it should fail when target type not matching')
def step_impl(context):
result = context.ae(Read(OtherSomeOtherClass)).is_allowing(PrincipalClass('supervisor'))
assert_that(calling(context.ae(Read(OtherSomeOtherClass)).verify).with_args(PrincipalClass('supervisor')), raises(AuthorizationException))
assert_that(result.is_failure)
@when('deny role found')
def step_impl(context):
pass
@when('deny role found once')
def step_impl(context):
pass
@then('it should fail when principal match the deny role definition')
def step_impl(context):
result = context.ae(Read()).is_allowing(OtherPrincipalClass('contributor'))
assert_that(calling(context.ae(Read()).verify).with_args(OtherPrincipalClass('contributor')), raises(AuthorizationException))
assert_that(result.is_failure)
@then('it should fail when principal and extra info match the deny role definition')
def step_impl(context):
result = context.ae(Read(SomeClass)).is_allowing(PrincipalClass('reader'), 1234)
assert_that(calling(context.ae(Read(SomeClass)).verify).with_args(PrincipalClass('reader'), 1234), raises(AuthorizationException))
assert_that(result.is_failure)
@then('it should succeed when principal does not match every deny role definition in a set')
def step_impl(context):
result = context.ae(Create()).is_allowing(PrincipalClass('commiter'))
context.ae(Create()).verify(PrincipalClass('commiter'))
assert_that(result.is_success)
@then('it should fail when principal match every deny role definition in a set')
def step_impl(context):
result = context.ae(Create()).is_allowing(PrincipalClass('supervisor-commiter'))
assert_that(calling(context.ae(Create()).verify).with_args(PrincipalClass('supervisor-commiter')), raises(AuthorizationException))
assert_that(result.is_failure)
@then('it should fail when a matching deny role definition does not define a "contains" method')
def step_impl(context):
assert_that(calling(context.ae(Update(OtherSomeOtherClass)).is_allowing).with_args(PrincipalClass('supervisor')), raises(NotImplementedError))
assert_that(calling(context.ae(Update(OtherSomeOtherClass)).verify).with_args(PrincipalClass('supervisor')), raises(NotImplementedError))
@then('it should fail when a matching deny role definition has a "contains" method that does not take at least one parameter')
def step_impl(context):
assert_that(calling(context.ae(Update(SomeOtherClass)).is_allowing).with_args(PrincipalClass('commiter')), raises(NotImplementedError))
assert_that(calling(context.ae(Update(SomeOtherClass)).verify).with_args(PrincipalClass('commiter')), raises(NotImplementedError))
@then('it should fail when a matching deny role definition has a "contains" method that does not return a boolean value')
def step_impl(context):
assert_that(calling(context.ae(Update(OtherClass)).is_allowing).with_args(PrincipalClass('commiter')), raises(NotImplementedError))
assert_that(calling(context.ae(Update(OtherClass)).verify).with_args(PrincipalClass('commiter')), raises(NotImplementedError))
@when('deny role found more than once')
def step_impl(context):
pass
@then('it should fail when principal and any extra info match any deny role definition')
def step_impl(context):
result = context.ae(Read()).is_allowing(OtherPrincipalClass('contributor'))
assert_that(calling(context.ae(Read()).verify).with_args(OtherPrincipalClass('contributor')), raises(AuthorizationException))
assert_that(result.is_failure)
@then('it should fail when principal and any extra info match any contained deny role definition')
def step_impl(context):
result = context.ae(Patch()).is_allowing(OtherPrincipalClass('contributor'))
assert_that(calling(context.ae(Patch()).verify).with_args(OtherPrincipalClass('contributor')), raises(AuthorizationException))
assert_that(result.is_failure)
@then('it should fail when principal and any extra info match any deny role definition in an embedded container action')
def step_impl(context):
result = context.ae(Delete()).is_allowing(OtherPrincipalClass('contributor'))
assert_that(calling(context.ae(Delete()).verify).with_args(OtherPrincipalClass('contributor')), raises(AuthorizationException))
assert_that(result.is_failure)
@when('deny role not found')
def step_impl(context):
pass
@when('allow role not found')
def step_impl(context):
pass
@then('it should fail when allow role not found')
def step_impl(context):
result = context.ae(Read(SomeClass)).is_allowing(PrincipalClass('laura'))
assert_that(calling(context.ae(Read(SomeClass)).verify).with_args(PrincipalClass('laura')), raises(AuthorizationException))
assert_that(result.is_failure)
@when('allow role found')
def step_impl(context):
pass
@when('allow role found once')
def step_impl(context):
pass
@then('it should succeed when principal match an allow role definition')
def step_impl(context):
result = context.ae(Read(SomeOtherClass)).is_allowing(OtherPrincipalClass('contributor'))
context.ae(Read(SomeOtherClass)).verify(OtherPrincipalClass('contributor'))
assert_that(result.is_success)
@then('it should succeed when principal and extra info match an allow role definition')
def step_impl(context):
result = context.ae(Read(OtherSomeOtherClass)).is_allowing(OtherPrincipalClass('reader'), 1234, "1234")
context.ae(Read(OtherSomeOtherClass)).verify(OtherPrincipalClass('reader'), 1234, "1234")
assert_that(result.is_success)
@then('it should fail when principal does not match every allow role definition in a set')
def step_impl(context):
result = context.ae(Read(SomeClass)).is_allowing(PrincipalClass('commiter'))
assert_that(calling(context.ae(Read(SomeClass)).verify).with_args(PrincipalClass('commiter')), raises(AuthorizationException))
assert_that(result.is_failure)
@then('it should succeed when principal does match every allow role definition in a set')
def step_impl(context):
result = context.ae(Read(SomeClass)).is_allowing(PrincipalClass('supervisor-commiter'))
context.ae(Read(SomeClass)).verify(PrincipalClass('supervisor-commiter'))
assert_that(result.is_success)
@then('it should fail when a matching allow role definition does not define a "contains" method')
def step_impl(context):
assert_that(calling(context.ae(Delete(OtherSomeOtherClass)).is_allowing).with_args(PrincipalClass('supervisor')), raises(NotImplementedError))
assert_that(calling(context.ae(Delete(OtherSomeOtherClass)).verify).with_args(PrincipalClass('supervisor')), raises(NotImplementedError))
@then('it should fail when a matching allow role definition has a "contains" method that does not take at least one parameter')
def step_impl(context):
assert_that(calling(context.ae(Delete(SomeOtherClass)).is_allowing).with_args(PrincipalClass('comm | iter')), raises(NotImplementedError))
assert_that(calling(context.ae(Delete(SomeOtherClass)).verify).with_args(PrincipalClass('commiter')), raises(NotImplementedError))
@ | then('it should fail when a matching allow role definition has a "contains" method that does not return a boolean value')
def step_impl(context):
assert_that(calling(context.ae(Delete(OtherClass)).is_allowing).with_args(PrincipalClass('commiter')), raises(NotImplementedError))
assert_that(calling(context.ae(Delete(OtherClass)).verify).with_args(PrincipalClass('commiter')), raises(NotImplementedError))
@when('allow role found more than once')
def step_impl(context):
pass
@then('it should succeed when principal and any |
trendelkampschroer/PyEMMA | pyemma/msm/models/msm_sampled.py | Python | bsd-2-clause | 15,417 | 0.002724 | from __future__ import absolute_import
__author__ = 'noe'
from pyemma._base.model import SampledModel
from pyemma.msm.models.msm import MSM
from pyemma.util.types import is_iterable
class SampledMSM(MSM, SampledModel):
def __init__(self, samples, ref=None, conf=0.95):
r""" Constructs a sampled MSM
Parameters
----------
samples : list of MSM
Sampled MSM objects
ref : EstimatedMSM
Single-point estimator, e.g. containing a maximum likelihood or mean MSM
conf : float, optional, default=0.95
Confidence interval. By default two-sigma (95.4%) is used. Use 95.4% for two sigma or 99.7% for three sigma.
"""
# validate input
assert is_iterable(samples), 'samples must be a list of MSM objects, but is not.'
assert isinstance(samples[0], MSM), 'samples must be a list of MSM objects, but is not.'
# construct superclass 1
SampledModel.__init__(self, samples, conf=conf)
# construct superclass 2
if ref is None:
Pref = self.sample_mean('P')
MSM.__init__(self, Pref, dt_model=samples[0].dt_model, neig=samples[0].neig, ncv=samples[0].ncv)
else:
MSM.__init__(self, ref.Pref, pi=ref.pi, reversible=ref.reversible, dt_model=ref.dt_model,
neig=ref.neig, ncv=ref.ncv)
# TODO: maybe rename to parametrize in order to avoid confusion with set_params that has a different behavior?
def set_model_params(self, samples=None, conf=0.95,
P=None, pi=None, reversible=None, dt_model='1 step', neig=None):
"""
Parameters
----------
samples : list of MSM objects
sampled MSMs
conf : float, optional, default=0.68
Confidence interval. By default one-sigma (68.3%) is used. Use 95.4% for two sigma or 99.7% for three sigma.
"""
# set model parameters of superclass
SampledModel.set_model_params(self, samples=samples, conf=conf)
MSM.set_model_params(self, P=P, pi=pi, reversible=reversible, dt_model=dt_model, neig=neig)
#
# class SampledEstimatedMSM(EstimatedMSM, SampledModel):
#
# def __init__(self, samples, ref, Pref='mle', conf=0.95):
# r""" Constructs a sampled MSM
#
# Parameters
# ----------
# samples : list of MSM
# Sampled MSM objects
# ref : EstimatedMSM
# Single-point estimator, e.g. containing a maximum likelihood or mean MSM
# conf : float, optional, default=0.68
# Confidence interval. By default one-sigma (68.3%) is used. Use 95.4% for two sigma or 99.7% for three sigma.
#
# """
# # construct superclass 1
# SampledModel.__init__(self, samples, conf=conf)
# # use reference or mean MSM.
# if ref is None:
# Pref = self.sample_mean('P')
# else:
# Pref = ref.P
# # construct superclass 2
# EstimatedMSM.__init__(self, ref.discrete_trajectories_full, ref.timestep, ref.lagtime, ref.connectivity,
# ref.active_set, ref.connected_sets, ref.count_matrix_full, ref.count_matrix_active, Pref)
# def _do_sample_eigendecomposition(self, k, ncv=None):
# """Conducts the eigenvalue decompositions for all sampled matrices.
#
# Stores k eigenvalues, left and right eigenvectors for all sampled matrices
#
# Parameters
# ----------
# k : int
# The number of eigenvalues / eigenvectors to be kept
# ncv : int (optional)
# Relevant for eigenvalue d | ecomposition of reversible transition matrices.
# ncv is the number of Lanczos vectors generated, `ncv` must be greater than k;
# it is recommended that ncv > 2*k
#
# | """
# from msmtools.analysis import rdl_decomposition
# from pyemma.util import linalg
#
# # left eigenvectors
# self.sample_Ls = np.empty((self._nsample), dtype=object)
# # eigenvalues
# self.sample_eigenvalues = np.empty((self._nsample), dtype=object)
# # right eigenvectors
# self.sample_Rs = np.empty((self._nsample), dtype=object)
# # eigenvector assignments
# self.sample_eig_assignments = np.empty((self._nsample), dtype=object)
#
# for i in range(self._nsample):
# if self._reversible:
# R, D, L = rdl_decomposition(self.sample_Ps[i], k=k, norm='reversible', ncv=ncv)
# # everything must be real-valued
# R = R.real
# D = D.real
# L = L.real
# else:
# R, D, L = rdl_decomposition(self.sample_Ps[i], k=k, norm='standard', ncv=ncv)
# # assign ordered
# I = linalg.match_eigenvectors(self.eigenvectors_right(), R,
# w_ref=self.stationary_distribution, w=self.sample_mus[i])
# self.sample_Ls[i] = L[I,:]
# self.sample_eigenvalues[i] = np.diag(D)[I]
# self.sample_Rs[i] = R[:,I]
#
# def _ensure_sample_eigendecomposition(self, k=None, ncv=None):
# """Ensures that eigendecomposition has been performed with at least k eigenpairs
#
# k : int
# number of eigenpairs needed. This setting is mandatory for sparse transition matrices
# (if you set sparse=True in the initialization). For dense matrices, k will be ignored
# as all eigenvalues and eigenvectors will be computed and stored.
# ncv : int (optional)
# Relevant for eigenvalue decomposition of reversible transition matrices.
# ncv is the number of Lanczos vectors generated, `ncv` must be greater than k;
# it is recommended that ncv > 2*k
#
# """
# # check input?
# if self._sparse:
# if k is None:
# raise ValueError(
# 'You have requested sparse=True, then the number of eigenvalues neig must also be set.')
# else:
# # override setting - we anyway have to compute all eigenvalues, so we'll also store them.
# k = self._nstates
# # ensure that eigenvalue decomposition with k components is done.
# try:
# m = len(self.sample_eigenvalues[0]) # this will raise and exception if self._eigenvalues doesn't exist yet.
# if m < k:
# # not enough eigenpairs present - recompute:
# self._do_sample_eigendecomposition(k, ncv=ncv)
# except:
# # no eigendecomposition yet - compute:
# self._do_sample_eigendecomposition(k, ncv=ncv)
#
# @property
# def stationary_distribution_mean(self):
# """Sample mean for the stationary distribution on the active set.
#
# See also
# --------
# MSM.stationary_distribution
#
# """
# return np.mean(self.sample_mus, axis=0)
#
# @property
# def stationary_distribution_std(self):
# """Sample standard deviation for the stationary distribution on the active set.
#
# See also
# --------
# MSM.stationary_distribution
#
# """
# return np.std(self.sample_mus, axis=0)
#
# @property
# def stationary_distribution_conf(self):
# """Sample confidence interval for the stationary distribution on the active set.
#
# See also
# --------
# MSM.stationary_distribution
#
# """
# return stat.confidence_interval(self.sample_mus, alpha=self._confidence)
#
# def eigenvalues_mean(self, k=None, ncv=None):
# """Sample mean for the eigenvalues.
#
# See also
# --------
# MSM.eigenvalues
#
# """
# self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# return np.mean(self.sample_eigenvalues, axis=0)
#
# def eigenvalues_std(self, k=None, ncv=None):
# """Sample standard deviation for the eigenvalues.
#
# See also
# --------
# MSM.eigenvalues
#
# |
wandec/grr | lib/flows/general/checks.py | Python | apache-2.0 | 3,098 | 0.006456 | #!/usr/bin/env python
"""A flow to run checks for a host."""
from grr.lib import aff4
from grr.lib import flow
from grr.lib import rdfvalue
from grr.lib.checks import checks
from grr.proto import flows_pb2
class CheckFlowArgs(rdfvalue.RDFProtoStruct):
protobuf = flows_pb2.CheckFlowArgs
class CheckRunner(flow.GRRFlow):
"""This flow runs checks on a host.
CheckRunner:
- Identifies what checks should be run for a host.
- Identifies the artifacts that need to be collected to perform those checks.
- Orchestrates collection of the host data.
- Routes host data to the relevant checks.
- Returns check data ready for reporting.
"""
friendly_name = "Run Checks"
category = "/Checks/"
behaviours = flow.GRRFlow.behaviours + "BASIC"
@flow.StateHandler(next_state=["MapArtifactData"])
def Start(self):
"""."""
client = aff4.FACTORY.Open(self.client_id, token=self.token)
self.state.Register("knowledge_base",
client.Get(client.Schema.KNOWLEDGE_BASE))
self.state.Register("labels", client.GetLabels())
self.state.Register("artifacts_wanted", set())
self.state.Register("artifacts_fetched", set())
self.state.Register("checks_run", [])
self.state.Register("checks_with_findings", [])
self.state.Register("results_store", None)
self.state.Register("host_data", {})
self.CallState(next_state="MapArtifactData")
@flow.StateHandler(next_state=["AddResponses", "RunChecks"])
def MapArtifactData(self, responses):
"""Get processed data, mapped to artifacts."""
self.state.artifacts_wanted = checks.CheckRegistry.SelectArtifacts(
os_type=self.state.knowledge_base.os)
# Fetch Artifacts and map results to the artifacts that generated them.
# This is an inefficient collection, but necessary because results need to
# be mapped to the originating artifact. An alternative would be to have
# rdfvalues labeled with originating artifact ids.
for artifact_id in self.state.artifacts_wanted:
self.CallFlow("ArtifactCollectorFlow", artifact_list=[artifact_id],
request_data={"artifact_id": artifact_id},
next_state="AddResponses")
self.CallState(next_state="RunChecks")
@flow.StateHandler()
def AddResponses(self, responses):
artifact_id = responses.request_data["artifact_id"]
# TODO(user): Check whether artifact collection succeeded.
self.state.host_data[artifact_id] = list(responses)
@flow.StateHandler(next_state=["Done"])
def RunChecks(self, responses):
if not responses.success:
raise RuntimeError("Checks did not run successfully.")
# Hand host data across to checks. Do this after all data has been collected
# in case some checks require multiple artifacts/results.
for find | ing in checks.CheckHost(self.state.host_data,
| os_type=self.state.knowledge_base.os):
self.state.checks_run.append(finding.check_id)
if finding.anomaly:
self.state.checks_with_findings.append(finding.check_id)
self.SendReply(finding)
|
shohamp/Gobi | runner/gobi_runner.py | Python | gpl-3.0 | 3,321 | 0.002108 | from glob import glob
import subprocess
import vagrant
from fabric.api import execute, env, quiet
from fabric.state import connections
from logger import init_logger, debug, info
VM_NAME = "default"
def clear_fabric_cache():
"""
Fabric caches it's connections, so it won't have to re-connect every time you use it.
But, when working with VMs whose connections are getting reset, we can't use a cache.
Use this function to reset fabric's cache
"""
connection_keys = connections.keys()
for host_string in connection_keys:
connections[host_string].close()
del connections[host_string]
def get_all_test_functions():
"""
Get all the tests from the current directory
Looking for python files starting with "test", and within, functions that start with "test"
"""
test_files = glob("test*.py")
test_modules = [__import__(module_name[:-3]) for module_name in test_files]
test_tasks = []
for test_module in test_modules:
functions_in_module = dir(test_module)
test_functions = [func for func in functions_in_module if func.startswith("test")]
for test_function in test_functions:
test_tasks.append(test_module.__dict__[test_function])
return test_tasks
def vagrant_run_command(command):
"""
Run the given command in a shell, after preceding it with "vagrant"
"""
subprocess.call("vagrant " + command, shell=True, stdout=subprocess.PIPE)
def vagrant_take_snapshot():
"""
Take a snapshot from the running machine, and name it "snapshot"
"""
vagrant_run_command("snapshot take snapshot")
def vagrant_revert_to_snapshot():
"""
In the running machine, revert to the last snapshot
"""
vagrant_run_command("snapshot back")
def init_fabric(vclient):
"""
init all the required environment for fabric
"""
env.host_string = vclient.user_hostname_port(vm_name=VM_NAME)
env.key_filename = vclient.keyfile(vm_name=VM_NAME)
env.disable_known_hosts = True
env.quiet = True
env.warn_only = True
def main():
"""
Gobi's main function.
Finds the test functions, | runs the machine, connects to them, and runs the tests
"""
init_logger()
info("Welcome to gobi. Sit back and relax :)")
vclient = vagrant.Vagrant()
test_funcs = get_all_test_functions()
assert test_funcs > 0, "No tests found. What do you want me to run?"
info("Found %d tests to run" % len(test_funcs)) |
info("Setting up the environment...")
vclient.up()
info("Environment is up and ready")
debug("Taking snapshot...")
vagrant_take_snapshot()
debug("Snapshot taken")
init_fabric(vclient)
counter = 1
for task in test_funcs:
# After the first test, clean - delete cache and revert to snapshot
if counter != 1:
clear_fabric_cache()
debug("Reverting to snapshot...")
vagrant_revert_to_snapshot()
debug("Reverted!")
info("Running test number %d - %s" % (counter, task.__name__))
execute(task)
counter += 1
info("All tests finished")
info("Destroying environment...")
vclient.destroy()
info("Environment has been destroyed...")
info("Gobi, out")
if __name__ == "__main__":
main() |
trac-ja/trac-ja | tracopt/perm/config_perm_provider.py | Python | bsd-3-clause | 2,233 | 0.000448 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of volunta | ry contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.core import *
from trac.config import ConfigSection
from trac.perm import IPermissionRequestor
class ExtraPermissionsProvider(Component):
| """Extra permission provider."""
implements(IPermissionRequestor)
extra_permissions_section = ConfigSection('extra-permissions',
doc="""This section provides a way to add arbitrary permissions to a
Trac environment. This can be useful for adding new permissions to use
for workflow actions, for example.
To add new permissions, create a new section `[extra-permissions]` in
your `trac.ini`. Every entry in that section defines a meta-permission
and a comma-separated list of permissions. For example:
{{{
[extra-permissions]
extra_admin = extra_view, extra_modify, extra_delete
}}}
This entry will define three new permissions `EXTRA_VIEW`,
`EXTRA_MODIFY` and `EXTRA_DELETE`, as well as a meta-permissions
`EXTRA_ADMIN` that grants all three permissions.
If you don't want a meta-permission, start the meta-name with an
underscore (`_`):
{{{
[extra-permissions]
_perms = extra_view, extra_modify
}}}
""")
def get_permission_actions(self):
permissions = {}
for meta, perms in self.extra_permissions_section.options():
perms = [each.strip().upper() for each in perms.split(',')]
for perm in perms:
permissions.setdefault(perm, [])
meta = meta.strip().upper()
if meta and not meta.startswith('_'):
permissions.setdefault(meta, []).extend(perms)
return [(k, v) if v else k for k, v in permissions.iteritems()]
|
eoogbe/api-client-staging | generated/python/proto-google-cloud-logging-v2/google/cloud/proto/logging/v2/logging_metrics_pb2_grpc.py | Python | bsd-3-clause | 5,957 | 0.005372 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
import google.cloud.proto.logging.v2.logging_metrics_pb2 as google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2
import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2
class MetricsServiceV2Stub(object):
"""Service for configuring logs-based metrics.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ListLogMetrics = channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/ListLogMetrics',
request_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.ListLogMetricsResponse.FromString,
)
self.GetLogMetric = channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/GetLogMetric',
request_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.GetLogMetricRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.LogMetric.FromString,
)
self.CreateLogMetric = channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/CreateLogMetric',
request_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.CreateLogMetricRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.LogMetric.FromString,
)
self.UpdateLogMetric = channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/UpdateLogMetric',
request_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.UpdateLogMetricRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.LogMetric.FromString,
)
self.DeleteLogMetric = channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/DeleteLogMetric',
request_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.DeleteLogMetricRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class MetricsServiceV2Servicer(object):
"""Service for configuring logs-based metrics.
"""
def ListLogMetrics(self, request, context):
"""Lists logs-based metrics.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetLogMetric(self, request, context):
"""Gets a logs-based m | etric.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateLogMetric(self, request, context):
"""Creates a logs-based metric.
""" |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateLogMetric(self, request, context):
"""Creates or updates a logs-based metric.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteLogMetric(self, request, context):
"""Deletes a logs-based metric.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_MetricsServiceV2Servicer_to_server(servicer, server):
rpc_method_handlers = {
'ListLogMetrics': grpc.unary_unary_rpc_method_handler(
servicer.ListLogMetrics,
request_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.ListLogMetricsRequest.FromString,
response_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.ListLogMetricsResponse.SerializeToString,
),
'GetLogMetric': grpc.unary_unary_rpc_method_handler(
servicer.GetLogMetric,
request_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.GetLogMetricRequest.FromString,
response_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.LogMetric.SerializeToString,
),
'CreateLogMetric': grpc.unary_unary_rpc_method_handler(
servicer.CreateLogMetric,
request_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.CreateLogMetricRequest.FromString,
response_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.LogMetric.SerializeToString,
),
'UpdateLogMetric': grpc.unary_unary_rpc_method_handler(
servicer.UpdateLogMetric,
request_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.UpdateLogMetricRequest.FromString,
response_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.LogMetric.SerializeToString,
),
'DeleteLogMetric': grpc.unary_unary_rpc_method_handler(
servicer.DeleteLogMetric,
request_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.DeleteLogMetricRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.logging.v2.MetricsServiceV2', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
david-farrar/exaproxy | lib/exaproxy/html/menu.py | Python | bsd-2-clause | 3,102 | 0.039329 | # encoding: utf-8
"""
menu.py
Created by Thomas Mangin on 2012-02-25.
Copyright (c) 2011-2013 Exa Networks. All rights reserved.
"""
import sys
import time
from .img import png
from .images import logo
python = '%s %s' % ('Pypy' if 'PyPy' in sys.version else 'Python', sys.version.split()[0])
def html (title,header,color='#FF0000',image=png(logo)):
if header: header += '<br/>'
return """\
<html>
<head>
<title>%s</title>
<meta http-equiv="cache-control" content="no-cache">
</head>
<!-- If you are so curious look at /humans.txt or /humans.html -->
<style type="text/css" media="screen">
.vmenu {
font-family: Verdana, Arial, Helvetica, sans-serif;
font-size: 100%%%%;
width: 160px;
padding: 0px;
margin: 0px 10px 0px 0px;
border-left: 1px solid #000000;
float: right;
}
.vmenu h1 {
display: block;
background-color:#F4F4F4;
font-size: 90%%%%;
padding: 13px 0px 5px 3px;
color: #333333;
margin: 0px;
border-bottom: 1px solid #000000;
width:159px;
}
.vmenu h1 a, .vmenu h1 a:hover, .vmenu h1 a:focus {
color: #0000C3;
text-decoration: none;
}
.vmenu ul {
list-style: none;
margin: 0px;
padding: 0px;
border: none;
}
.vmenu ul li {
margin: 0px;
padding: 0px;
}
.vmenu ul li a {
font-size: 80%%%%;
display: block;
border-bottom: 1px dashed #004E9C;
padding: 1px 0px 2px 20px;
text-decoration: none;
color: #666666;
width: 142px;
}
.vmenu ul li a:hover, .vmenu ul li a:focus {
color: #000000;
background-color: #EEEEEE;
}
</style>
<body leftmargin="0" topmargin="0" rightmargin="0" bgcolor="#FFFFFF" text="#000000" link="#0000FF" alink="#0000FF" vlink="#0000FF">
<div style="padding:15px; color: #FFFFFF; background: %s; font-size: 10px; font-family: verdana,sans-serif,arial; font-weight: bold; border-bottom: solid 1px #A0A0A0;">
<center>
%s
%s
</center>
<br/>
<center>
<span style="float: left;">
%s
</span>
<span style="font-size: 10px">
%s
</span>
<span style="float: right;">
*time*
</span>
</center>
</div>
*menu*
*text*
<br/>
<br/>
</div>
</body>
</html>
""" % (title,color,header,image,time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()),python)
_title = 'ExaProxy Monitoring'
_image = '<a href="http://www.exa-networks.co.uk/" target="exa-ne | tworks">%s</a>' % png(logo)
def Menu (options):
menu = '<div class="vmenu">\n'
menu += '\t<h1><a href="/index.html">Home</a></h1>\n'
for name, url, section in options:
menu += '\t<h1>%s</h1>\n' % name
if section:
menu += '\t<ul>\n'
for name, url, new in section:
if new:
menu + | = '\t\t<li><a href="%s" target="%s">%s</a></li>\n' % (url,name,name)
else:
menu += '\t\t<li><a href="%s">%s</a></li>\n' % (url,name)
menu += '\t</ul>\n'
menu += '</div>\n'
_html = html(_title,'','#9999FF',_image).replace('*text*','%s').replace('*menu*',menu)
def _lambda (page):
return _html.replace('*time*',time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime())) % page
return _lambda
|
shodimaggio/SaivDr | appendix/torch_nsolt/nsoltLayerExceptions.py | Python | bsd-2-clause | 819 | 0.030525 | class InvalidDirection(Exception):
def __init__(self,msg):
super().__init__(self,msg)
class InvalidTargetChannels(Exception):
def __init__(self,msg):
| super().__init__(self,msg)
class InvalidMode(Exception):
def __init__(self,msg):
super().__init__(self,msg)
class InvalidMus(Exception):
def __init__(self,msg):
super().__init__(self,msg)
class InvalidNumberOfChannels(Exception):
def __init__(self,msg):
super().__init__(self,msg)
class InvalidPolyPhaseOrder(Exception):
def __init__(self,msg):
super().__init__(s | elf,msg)
class InvalidNumberOfVanishingMoments(Exception):
def __init__(self,msg):
super().__init__(self,msg)
class InvalidNumberOfLevels(Exception):
def __init__(self,msg):
super().__init__(self,msg) |
ihacklog/osdlyrics | daemon/player.py | Python | gpl-3.0 | 19,960 | 0.002455 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2011 Tiger Soldier
#
# This file is part of OSD Lyrics.
#
# OSD Lyrics is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OSD Lyrics is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OSD Lyrics. If not, see <http://www.gnu.org/licenses/>.
#/
import logging
import dbus.service
import osdlyrics
import osdlyrics.dbusext
import osdlyrics.timer
import glib
import config
from osdlyrics.consts import \
MPRIS2_PLAYER_INTERFACE, \
MPRIS2_ROOT_INTERFACE, \
MPRIS2_OBJECT_PATH
class PlayerSupport(dbus.service.Object):
""" Implement org.osdlyrics.Player Interface
"""
DETECT_PLAYER_TIMEOUT = 1000
def __init__(self, conn):
"""
Arguments:
- `conn`: DBus connection of the object
"""
dbus.service.Object.__init__(self,
conn=conn,
object_path=osdlyrics.PLAYER_OBJECT_PATH)
self._active_player = None
self._player_proxies = {}
self._connect_player_proxies()
self._start_detect_player()
self._mpris2_player = Mpris2Player(conn)
def _start_detect_player(self):
self._detect_timer = glib.timeout_add(self.DETECT_PLAYER_TIMEOUT,
lambda : not self._detect_player())
def _detect_player(self):
"""
Detects active players.
This is a callback function of a timer. If active player detected, try to
connect to the player and remove the timer.
"""
detected = False
for proxy in self._player_proxies.values():
try:
active_players = proxy.ListActivePlayers()
for player_info in active_players:
if self._connect_player(proxy, player_info):
detected = True
| break
if detected:
break
except:
pass
if detected and self._detect_timer:
glib.source_remove(self._detect_timer)
self._detect_timer = None
return detected
def _connect_proxy(self, bus_name, activate):
if not bus_name.startswith(osdlyrics.PLAYER_PROXY_BUS_NAME | _PREFIX):
return
logging.info('Connecting to player proxy %s', bus_name)
proxy_name = bus_name[len(osdlyrics.PLAYER_PROXY_BUS_NAME_PREFIX):]
if activate:
try:
self.connection.activate_name_owner(bus_name)
except Exception, e:
logging.warning('Cannot activate proxy %s: %s' % (bus_name, e))
self.connection.watch_name_owner(bus_name,
lambda name: self._proxy_name_changed(proxy_name, len(name) == 0))
def _connect_player_proxies(self):
"""
Activates all player proxy services
"""
active_names = self.connection.list_names()
for bus_name in active_names:
self._connect_proxy(bus_name, False)
activatable_names = self.connection.list_activatable_names()
for bus_name in activatable_names:
self._connect_proxy(bus_name, True)
def _connect_player(self, proxy, player_info):
"""
Tries to connect player through proxy
Return True if connect successful
"""
try:
path = proxy.ConnectPlayer(player_info['name'])
player = self.connection.get_object(proxy.bus_name,
path)
self._active_player = {'info': player_info,
'player': player,
'proxy': proxy}
self._mpris2_player.connect_player(player)
self.PlayerConnected(player_info)
return True
except Exception, e:
return False
def _player_lost_cb(self, player_name):
if self._active_player and self._active_player['info']['name'] == player_name:
logging.info('Player %s lost', player_name)
self._active_player = None
self._mpris2_player.disconnect_player()
self.PlayerLost()
self._start_detect_player()
def _proxy_name_changed(self, proxy_name, lost):
bus_name = osdlyrics.PLAYER_PROXY_BUS_NAME_PREFIX + proxy_name
if not lost:
logging.info('Get player proxy %s' % proxy_name)
proxy = self.connection.get_object(bus_name,
osdlyrics.PLAYER_PROXY_OBJECT_PATH_PREFIX + proxy_name)
proxy.connect_to_signal('PlayerLost',
self._player_lost_cb)
self._player_proxies[proxy_name] = dbus.Interface(proxy, osdlyrics.PLAYER_PROXY_INTERFACE)
else:
if not proxy_name in self._player_proxies:
return
logging.info('Player proxy %s lost' % proxy_name)
proxy = self._player_proxies[proxy_name]
# If current player is provided by the proxy, it is lost.
if self._active_player and self._active_player['proxy'] == proxy:
self._player_lost_cb(self._active_player['info']['name'])
del self._player_proxies[proxy_name]
# Try to reactivate proxy
try:
self.connection.activate_name_owner(bus_name)
except:
pass
@dbus.service.method(dbus_interface=osdlyrics.PLAYER_INTERFACE,
in_signature='',
out_signature='aa{sv}')
def ListSupportedPlayers(self):
ret = []
for proxy in self._player_proxies.values():
try:
ret = ret + proxy.ListSupportedPlayers()
except:
pass
return ret
@dbus.service.method(dbus_interface=osdlyrics.PLAYER_INTERFACE,
in_signature='',
out_signature='aa{sv}')
def ListActivatablePlayers(self):
ret = []
for proxy in self._player_proxies.values():
try:
ret = ret + proxy.ListActivatablePlayers()
except:
pass
return ret
@dbus.service.method(dbus_interface=osdlyrics.PLAYER_INTERFACE,
in_signature='',
out_signature='ba{sv}')
def GetCurrentPlayer(self):
if not self._active_player and not self._detect_player():
return False, {}
return True, self._active_player['info']
@dbus.service.signal(dbus_interface=osdlyrics.PLAYER_INTERFACE,
signature='')
def PlayerLost(self):
pass
@dbus.service.signal(dbus_interface=osdlyrics.PLAYER_INTERFACE,
signature='a{sv}')
def PlayerConnected(self, player_info):
pass
@property
def current_player(self):
return self._mpris2_player
class Mpris2Player(osdlyrics.dbusext.Object):
def __init__(self, conn):
super(Mpris2Player, self).__init__(conn=conn,
object_path=MPRIS2_OBJECT_PATH)
self._signals = []
self._player = None
self._timer = osdlyrics.timer.Timer()
self._clear_properties()
def _clear_properties(self):
self.LoopStatus = 'None'
self.PlaybackStatus = 'Stopped'
self.Metadata = dbus.Dictionary(signature='sv')
self.Shuffle = False
self._timer.stop()
self._timer.time = 0
def connect_player(self, player_proxy):
if self._player == player_proxy:
return
|
chandler14362/panda3d | direct/src/distributed/DistributedSmoothNode.py | Python | bsd-3-clause | 22,204 | 0.004909 | """DistributedSmoothNode module: contains the DistributedSmoothNode class"""
from panda3d.core import *
from panda3d.direct import *
from .ClockDelta import *
from . import DistributedNode
from . import DistributedSmoothNodeBase
from direct.task.Task import cont
from direct.showbase import DConfig as config
# This number defines our tolerance for out-of-sync telemetry packets.
# If a packet appears to have originated from more than MaxFuture
# seconds in the future, assume we're out of sync with the other
# avatar and suggest a resync for both.
MaxFuture = config.GetFloat("smooth-max-future", 0.2)
# How frequently can we suggest a resynchronize with another client?
MinSuggestResync = config.GetFloat("smooth-min-suggest-resync", 15)
# These flags indicate whether global smoothing and/or prediction is
# allowed or disallowed.
EnableSmoothing = config.GetBool("smooth-enable-smoothing", 1)
EnablePrediction = config.GetBool("smooth-enable-prediction", 1)
# These values represent the amount of time, in seconds, to delay the
# apparent position of other avatars, when non-predictive and
# predictive smoothing is in effect, respectively. This is in
# addition to the automatic delay of the observed average latency from
# each avatar, which is intended to compensate for relative clock
# skew.
Lag = config.GetDouble("smooth-lag", 0.2)
PredictionLag = config.GetDouble("smooth-prediction-lag", 0.0)
GlobalSmoothing = 0
GlobalPrediction = 0
def globalActivateSmoothing(smoothing, prediction):
""" Globally activates or deactivates smoothing and prediction on
all DistributedSmoothNodes currently in existence, or yet to be
generated. """
global GlobalSmoothing, GlobalPrediction
GlobalSmoothing = smoothing
GlobalPrediction = prediction
for obj in base.cr.getAllOfType(DistributedSmoothNode):
obj.activateSmoothing(smoothing, prediction)
# For historical reasons, we temporarily define
# DistributedSmoothNode.activateSmoothing() to be the global function.
# We'll remove this soon, so it won't get confused with the instance
# method, below.
activateSmoothing = globalActivateSmoothing
class DistributedSmoothNode(DistributedNode.DistributedNode,
| DistributedSmoothNodeBase.DistributedSmoothNodeBase):
"""
This specializes DistributedNode to add functionality to smooth
motion over time, via the SmoothMover C++ object defined in
DIRECT.
"""
def __init__(self, cr):
try:
self.DistributedSmoothNode_initialized
except:
self.DistributedSm | oothNode_initialized = 1
DistributedNode.DistributedNode.__init__(self, cr)
DistributedSmoothNodeBase.DistributedSmoothNodeBase.__init__(self)
self.smoothStarted = 0
# Set this True to assert that the local process has
# complete authority over the position of this object when
# smoothing is not in effect. When this is True, position
# reports received over the wire will not be applied to
# this node's position, unless those position reports are
# received between startSmooth() and endSmooth().
self.localControl = False
# flag set when we receive a stop message
self.stopped = False
def generate(self):
self.smoother = SmoothMover()
self.smoothStarted = 0
self.lastSuggestResync = 0
self._smoothWrtReparents = False
DistributedNode.DistributedNode.generate(self)
DistributedSmoothNodeBase.DistributedSmoothNodeBase.generate(self)
self.cnode.setRepository(self.cr, 0, 0)
self.activateSmoothing(GlobalSmoothing, GlobalPrediction)
# clear stopped flag for re-generate
self.stopped = False
def disable(self):
DistributedSmoothNodeBase.DistributedSmoothNodeBase.disable(self)
DistributedNode.DistributedNode.disable(self)
del self.smoother
def delete(self):
DistributedSmoothNodeBase.DistributedSmoothNodeBase.delete(self)
DistributedNode.DistributedNode.delete(self)
### Methods to handle computing and updating of the smoothed
### position.
def smoothPosition(self):
"""
This function updates the position of the node to its computed
smoothed position. This may be overridden by a derived class
to specialize the behavior.
"""
self.smoother.computeAndApplySmoothPosHpr(self, self)
def doSmoothTask(self, task):
self.smoothPosition()
return cont
def wantsSmoothing(self):
# Override this function to return 0 if this particular kind
# of smooth node doesn't really want to be smoothed.
return 1
def startSmooth(self):
"""
This function starts the task that ensures the node is
positioned correctly every frame. However, while the task is
running, you won't be able to lerp the node or directly
position it.
"""
if not self.wantsSmoothing() or self.isDisabled() or self.isLocal():
return
if not self.smoothStarted:
taskName = self.taskName("smooth")
taskMgr.remove(taskName)
self.reloadPosition()
taskMgr.add(self.doSmoothTask, taskName)
self.smoothStarted = 1
def stopSmooth(self):
"""
This function stops the task spawned by startSmooth(), and
allows show code to move the node around directly.
"""
if self.smoothStarted:
taskName = self.taskName("smooth")
taskMgr.remove(taskName)
self.forceToTruePosition()
self.smoothStarted = 0
def setSmoothWrtReparents(self, flag):
self._smoothWrtReparents = flag
def getSmoothWrtReparents(self):
return self._smoothWrtReparents
def forceToTruePosition(self):
"""
This forces the node to reposition itself to its latest known
position. This may result in a pop as the node skips the last
of its lerp points.
"""
#printStack()
if (not self.isLocal()) and \
self.smoother.getLatestPosition():
self.smoother.applySmoothPosHpr(self, self)
self.smoother.clearPositions(1)
def reloadPosition(self):
"""
This function re-reads the position from the node itself and
clears any old position reports for the node. This should be
used whenever show code bangs on the node position and expects
it to stick.
"""
self.smoother.clearPositions(0)
self.smoother.setPosHpr(self.getPos(), self.getHpr())
self.smoother.setPhonyTimestamp()
self.smoother.markPosition()
def _checkResume(self,timestamp):
"""
Determine if we were previously stopped and now need to
resume movement by making sure any old stored positions
reflect the node's current position
"""
if (self.stopped):
currTime = globalClock.getFrameTime()
now = currTime - self.smoother.getExpectedBroadcastPeriod()
last = self.smoother.getMostRecentTimestamp()
if (now > last):
# only set a new timestamp postion if we still have
# a position being smoothed to (so we don't interrupt
# any current smoothing and only do this if the object
# is actually locally stopped)
if (timestamp == None):
# no timestamp, use current time
local = 0.0
else:
local = globalClockDelta.networkToLocalTime(
timestamp, currTime)
self.smoother.setPhonyTimestamp(local,True)
self.smoother.markPosition()
self.stopped = False
# distributed set pos and hpr functions
# 'send' versions are inherited from DistributedSmoothNodeBase
def setSmStop(self, timestamp=None):
self.setComponentTLive(timestamp)
self.stopped = True
def setSmH(self, h, |
kylerbrown/pyjack | setup.py | Python | lgpl-2.1 | 1,661 | 0.020482 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Distutils installer for PyJack
# Test for Jack2
#---------------------------------------------------#
import os
if os.path.exists("/usr/local/include/jack/jack.h"):
path = "/usr/local/include/jack/jack.h"
elif os.path.exists("/usr/include/jack/jack.h"):
path = "/usr/include/jack/jack.h"
else:
print("You don't seem to have the jack headers installed.\nPlease install them first")
exit(-1)
test = open(path).read()
pyjack_macros=[]
if ("jack_get_version_string" in test):
pyjack_macros+=[('JACK2', '1')]
else:
pyjack_macros+=[('JACK1', '1')]
#----------------------------------------------------#
from distutils.core import setup, Extension
import numpy.distutils
numpy_include_dirs = numpy.distutils.misc_util.get_numpy_include_dirs()
setup(
name = "pyjack",
version = "0.5.1",
description = "Python bindings for the Jack Audio Server",
author = "Andrew W. Schme | der, falkTX, IOhannes m zmölnig",
author_email = "andy@a2hd.com",
url = "http://sourceforge.net/projects/py-jack",
long_description = '''PyJack is a module written in C which exposes the Jack API to Python.
For information about Jack see http://jackaudio.org. This
enables a Python program to connect to and interact with pro-audio
applications which use the Jack Audio Server''',
license = "GNU LGPL2.1",
ext | _modules = [Extension("jack",
["pyjack.c"],
libraries=["jack", "dl"],
include_dirs=numpy_include_dirs,
define_macros=pyjack_macros,
)],
)
|
atagh/gavel-clone | gavel/controllers/admin.py | Python | agpl-3.0 | 6,919 | 0.001734 | from gavel import app
from gavel.models import *
from gavel.constants import *
import gavel.settings as settings
import gavel.utils as utils
from flask import (
redirect,
render_template,
request,
url_for,
)
import urllib.parse
@app.route('/admin/')
@utils.requires_auth
def admin():
annotators = Annotator.query.order_by(Annotator.id).all()
items = Item.query.order_by(Item.id).all()
decisions = Decision.query.all()
counts = {}
item_counts = {}
for d in decisions:
a = d.annotator_id
w = d.winner_id
l = d.loser_id
counts[a] = counts.get(a, 0) + 1
item_counts[w] = item_counts.get(w, 0) + 1
item_counts[l] = item_counts.get(l, 0) + 1
viewed = {i.id: {a.id for a in i.viewed} for i in items}
skipped = {}
for a in annotators:
for i in a.ignore:
if a.id not in viewed[i.id]:
skipped[i.id] = skipped.get(i.id, 0) + 1
# settings
setting_closed = Setting.value_of(SETTING_CLOSED) == SETTING_TRUE
return render_template(
'admin.html',
annotators=annotators,
counts=counts,
item_counts=item_counts,
skipped=skipped,
items=items,
votes=len(decisions),
setting_closed=setting_closed,
)
@app.route('/admin/item', methods=['POST'])
@utils.requires_auth
def item():
action = request.form['action']
if action == 'Submit':
csv = request.form['data']
data = utils.data_from_csv_string(csv)
for row in data:
_item = Item(*row)
db.session.add(_item)
db.session.commit()
elif action == 'Prioritize' or action == 'Cancel':
item_id = request.form['item_id']
target_state = action == 'Prioritize'
Item.by_id(item_id).prioritized = target_state
db.session.commit()
elif action == 'Disable' or action == 'Enable':
item_id = request.form['item_id']
target_state = action == 'Enable'
Item.by_id(item_id).active = target_state
db.session.commit()
elif action == 'Delete':
item_id = request.form['item_id']
try:
db.session.execute(ignore_table.delete(ignore_table.c.item_id == item_id))
Item.query.filter_by(id=item_id).delete()
db.session.commit()
except IntegrityError as e:
return render_template('error.html', message=str(e))
return redirect(url_for('admin'))
@app.route('/admin/item_patch', methods=['POST'])
@utils.requires_auth
def item_patch():
item = Item.by_id(request.form['item_id'])
if not item:
return render_template('error.html', message='Item not found.')
if 'location' in request.form:
item.location = request.form['location']
if 'name' in request.form:
item.name = request.form['name']
if 'description' in request.form:
item.description = request.form['description']
db.session.commit()
return redirect(url_for('item_detail', item_id=item.id))
@app.route('/admin/annotator', methods=['POST'])
@utils.requires_auth
def annotator():
action = request.form['action']
if action == 'Submit':
csv = request.form['data']
data = utils.data_from_csv_string(csv)
added = []
for row in data:
annotator = Annotator(*row)
added.append(annotator)
db.session.add(annotator)
db.session.commit()
try:
email_invite_links(added)
except Exception as e:
return render_template('error.html', message=str(e))
elif action == 'Email':
annotator_id = request.form['annotator_id']
try:
email_invite_links(Annotator.by_id(annotator_id))
except Exception as e:
return render_template('error.html', message=str(e))
elif action == 'Disable' or action == 'Enable':
annotator_id = request.form['annotator_id']
target_state = action == 'Enable'
Annotator.by_id(annotator_id).active = target_state
db.session.commit()
elif action == 'Delete':
annotator_id = request.form['annotator_id']
try:
db.session.execute(ignore_table.delete(ignore_table.c.annotator_id == annotator_id))
Annotator.query.filter_by(id=annotator_id).delete()
db.session.commit()
except IntegrityError as e:
return render_template('error.html', message=str(e))
return redirect( | url_for('admin'))
@app.route('/admin | /setting', methods=['POST'])
@utils.requires_auth
def setting():
key = request.form['key']
if key == 'closed':
action = request.form['action']
new_value = SETTING_TRUE if action == 'Close' else SETTING_FALSE
Setting.set(SETTING_CLOSED, new_value)
db.session.commit()
return redirect(url_for('admin'))
@app.route('/admin/item/<item_id>/')
@utils.requires_auth
def item_detail(item_id):
item = Item.by_id(item_id)
if not item:
return render_template('error.html', message='Item not found.')
else:
assigned = Annotator.query.filter(Annotator.next == item).all()
viewed_ids = {i.id for i in item.viewed}
if viewed_ids:
skipped = Annotator.query.filter(
Annotator.ignore.contains(item) & ~Annotator.id.in_(viewed_ids)
)
else:
skipped = Annotator.query.filter(Annotator.ignore.contains(item))
return render_template(
'admin_item.html',
item=item,
assigned=assigned,
skipped=skipped
)
@app.route('/admin/annotator/<annotator_id>/')
@utils.requires_auth
def annotator_detail(annotator_id):
annotator = Annotator.by_id(annotator_id)
if not annotator:
return render_template('error.html', message='Annotator not found.')
else:
seen = Item.query.filter(Item.viewed.contains(annotator)).all()
ignored_ids = {i.id for i in annotator.ignore}
if ignored_ids:
skipped = Item.query.filter(
Item.id.in_(ignored_ids) & ~Item.viewed.contains(annotator)
)
else:
skipped = []
return render_template(
'admin_annotator.html',
annotator=annotator,
seen=seen,
skipped=skipped
)
def email_invite_links(annotators):
if settings.DISABLE_EMAIL or annotators is None:
return
if not isinstance(annotators, list):
annotators = [annotators]
emails = []
for annotator in annotators:
link = urllib.parse.urljoin(settings.BASE_URL, '/login/%s' % annotator.secret)
raw_body = settings.EMAIL_BODY.format(name=annotator.name, link=link)
body = '\n\n'.join(utils.get_paragraphs(raw_body))
emails.append((annotator.email, settings.EMAIL_SUBJECT, body))
utils.send_emails(emails)
|
rapidpro/tracpro | tracpro/polls/maps.py | Python | bsd-3-clause | 2,550 | 0.001569 | from __future__ import unicode_literals
from collections import Counter
from itertools import groupby
from operator import itemgetter
import numpy
from django.db.models import F
from tracpro.charts.formatters import format_number
from .utils import get_numeric_values
from . import rules
def get_map_data(responses, question):
answers = get_answers(responses, question)
if question.question_type == question.TYPE_NUMERIC:
map_data = numeric_map_data(answers, question)
elif question.question_type == question.TYPE_MULTIPLE_CHOICE:
map_data = multiple_choice_map_data(answers, question)
else:
map_data = None
if map_data:
return {
'map-data': map_data,
'all-categories': rules.get_all_categories(question, answers),
}
else:
return None
def get_answers(responses, question):
"""Return answers to the question from the responses, annotated with `boundary`.
Excludes answers that are not associated with a boundary.
"""
answers = question.answers.filter(response__in=responses)
answers = answers.a | nnotate(boundary=F('response__contact__region__boundary'))
answers = answers.exclude(boundary=None)
return answers
def numeric_map_data(answers, question):
"""For each boundary, display the category of the average answer value."""
map_data = {}
answer_data = [
{
'boundary': answer.boundary,
'value_to_use': answer.value_to_use
}
| for answer in answers.order_by('boundary')
]
for boundary_id, _answers in groupby(answer_data, itemgetter('boundary')):
values = get_numeric_values(a['value_to_use'] for a in _answers)
if len(values) > 0:
average = round(numpy.mean(values), 2)
map_data[boundary_id] = {
'average': format_number(average, digits=2),
'category': question.categorize(average),
}
return map_data
def multiple_choice_map_data(answers, question):
"""For each boundary, display the most common answer category."""
map_data = {}
answer_data = answers.exclude(category=None).exclude(category="")
answer_data = answer_data.order_by('boundary').values('boundary', 'category')
for boundary_id, _answers in groupby(answer_data, itemgetter('boundary')):
top_category = Counter(a['category'] for a in _answers).most_common(1)[0][0]
map_data[boundary_id] = {
'category': top_category,
}
return map_data
|
Lotterleben/desvirt | desvirt/vm.py | Python | gpl-3.0 | 5,090 | 0.005894 | import sys
import os
import pty, shlex
import signal
import subprocess
import socket
import time
import atexit
import re
import string
import logging
import random
from .vif import VirtualInterface
from .vnet import VirtualNet
from .riotnative import RIOT
from string import Template
import libvirt
import hashlib
all_domains = None
class VMException(Exception):
def __init__(s, msg=None):
if not msg:
s.message = "Unknown VM Error."
else:
s.message = msg
class VM():
def __init__(self, name, nodeType, nics=None, binary=None, vmgroup_name=""):
self.name = name
self.nodeType = nodeType
self.binary = binary
self.nics = nics
if not nics:
self.nics = []
self.vmgroup_name = vmgroup_name
| self.vm_instance = None
self.fullname = self.name
if self.vmgroup_name:
self.fullname = "%s_%s" % (self.vmgroup_name, name)
def lookup(self, | conn=None):
global all_domains
if self.nodeType == "meshrouter":
if not all_domains:
all_domains = {}
for id in conn.listDomainsID():
dom = conn.lookupByID(id)
all_domains[dom.name()] = dom
for id in conn.listDefinedDomains():
all_domains[id] = conn.lookupByName(id)
try:
self.vm_instance = all_domains[self.fullname]
logging.getLogger("").debug("Domain %s already defined." % self.fullname)
self.conn = conn
return True
except libvirt.libvirtError:
return False
except KeyError:
return False
elif self.nodeType == "riot_native":
logging.getLogger("Looking up this node")
self.vm_instance = RIOT(self.fullname, self.binary, self.vmgroup_name, self.nics[0].tap)
return True
def define(self, conn=None):
if self.nodeType == "meshrouter":
if not self.lookup(conn):
logging.getLogger("").info("Defining VM %s" %(self.fullname))
self.vm_instance = conn.defineXML(self.create_vm_xml())
else:
logging.getLogger("").info("Defining RIOT native process %s" % (self.fullname))
if not self.binary:
logging.getLogger("").error("No binary for RIOT native given. Exiting...")
sys.exit(1)
self.vm_instance = RIOT(self.fullname, self.binary, self.vmgroup_name, self.nics[0].tap)
def undefine(self, conn=None):
# TODO: needs here anything to be done for RIOT native?
if self.nodeType == "meshrouter":
if self.vm_instance or self.lookup(conn):
self.vm_instance.undefine()
def start(self):
if self.vm_instance:
if not self.vm_instance.isActive():
self.vm_instance.create()
def stop(self):
if self.vm_instance:
logging.getLogger("").debug("stopping %s (%s)" % (self.name, self.vm_instance.pid))
if self.vm_instance.isActive():
logging.getLogger("").debug("destroying %s" % self.vm_instance.pid)
self.vm_instance.destroy()
def getType(self):
return self.nodeType
def create_interfaces_xml(self):
if len(self.nics)<1:
return ""
ifxml = ""
nic_options = ''
for nic in self.nics:
macaddr = ""
if nic.macaddr:
macaddr = macaddr_template.substitute(mac=nic.macaddr)
ifxml = ifxml + if_tmpl.substitute(mac=macaddr,tap=nic.tap)
return ifxml
def create_vm_xml(self):
ifxml = self.create_interfaces_xml()
return vm_xml_tmpl.substitute(name=self.fullname,memory=262144,interfaces=ifxml)
vm_xml_tmpl = Template('''
<domain type='kvm'>
<name>$name</name>
<memory>$memory</memory>
<vcpu>1</vcpu>
<os>
<type arch='i686'>hvm</type>
<boot dev='hd'/>
</os>
<features>
<acpi/>
<pae/>
</features>
<clock offset='utc'/>
<on_poweroff>restart</on_poweroff>
<on_reboot>restart</on_reboot>
<on_crash>restart</on_crash>
<devices>
<disk type='file' device='disk'>
<source file='/usr/local/share/qemu/gpxe-serial.bin'/>
<target dev='hda' bus='ide'/>
</disk>
<controller type='ide' index='0'/>
$interfaces
<serial type='pty'>
<target port='0'/>
</serial>
<console type='pty'>
<target port='0'/>
</console>
</devices>
</domain>
''')
if_tmpl = Template('''
<interface type='ethernet'>
$mac
<target dev='$tap'/>
<model type='e1000'/>
<script path='/bin/true'/>
</interface>
''');
# if_tmpl = Template('''
# <interface type='bridge'>
# <source bridge='$bridge'/>
# $mac
# <target dev='$tap'/>
# <model type='e1000'/>
# </interface>
# ''');
macaddr_template = Template('''
<mac address='$mac'/>
''');
|
saltstack/salt | tests/pytests/unit/modules/test_pdbedit.py | Python | apache-2.0 | 4,278 | 0.001403 | from textwrap import dedent
import pytest
import salt.modules.pdbedit as pdbedit
from tests.support.mock import MagicMock, patch
@pytest.fixture(autouse=True)
def setup_loader(request):
setup_loader_modules = {pdbedit: {}}
with pytest.helpers.loader_mock(request, setup_loader_modules) as loader_mock:
yield loader_mock
@pytest.mark.parametrize("verbose", [True, False])
def test_when_no_users_returned_no_data_should_be_returned(verbose):
expected_users = {} if verbose else []
with patch.dict(
pdbedit.__salt__,
{
"cmd.run_all": MagicMock(
return_value={"stdout": "", "stderr": "", "retcode": 0}
)
},
):
actual_users = pdbedit.list_users(verbose=verbose)
assert actual_users == expected_users
def test_when_verbose_and_retcode_is_nonzero_output_should_be_had():
expected_stderr = "this is something fnord"
with patch.dict(
pdbedit.__salt__,
{
"cmd.run_all": MagicMock(
return_value={"stdout": "", "stderr": expected_stderr, "retcode": 1}
)
},
), patch("salt.modules.pdbedit.log.error", autospec=True) as fake_error_log:
pdbedit.list_users(verbose=True)
actual_error = fake_error_log.mock_calls[0].args[0]
assert actual_error == expected_stderr
def test_when_verbose_and_single_good_output_expected_data_should_be_parsed():
expected_data = {
"roscivs": {
"unix username": "roscivs",
"nt username": "bottia",
"full name": "Roscivs Bottia",
"user sid": "42",
"primary group sid": "99",
"home directory": r"\\samba\roscivs",
"account desc": "separators! xxx so long and thanks for all the fish",
"logoff time": "Sat, 14 Aug 2010 15:06:39 UTC",
"kickoff time": "Sat, 14 Aug 2010 15:06:39 UTC",
"password must change": "never",
}
}
pdb_output = dedent(
r"""
Unix username: roscivs
NT username: bottia
User SID: 42
Primary Group SID: 99
Full Name: Roscivs Bottia
Home Directory: \\samba\roscivs
Account desc: separators! xxx so long and thanks for all the fish
Logoff time: Sat, 14 Aug 2010 15:06:39 UTC
Kickoff time: Sat, 14 Aug 2010 15:06:39 UTC
Password must change: never
"""
).strip()
with patch.dict(
pdbedit.__salt__,
{
"cmd.run_all": MagicMock(
return_value={"stdout": pdb_output, "stderr": "", "retcode": 0}
)
},
):
actual_data = pdbedit.list_users(verbose=True)
assert actual_data == expected_data
def test_when_verbose_and_multiple_records_present_data_should_be_correctly_parsed():
expected_data = {
"roscivs": {
"unix username": "roscivs",
"nt username": "bottia",
"user sid": "42",
},
"srilyk": {
"unix username": "srilyk",
"nt username": "srilyk",
"account desc": "trololollol",
"user sid": "99",
},
"jewlz": {
"unix username": "jewlz",
"nt username": "flutterbies",
"user sid": "4",
},
}
pdb_output = dedent(
"""
-------------
Unix username: roscivs
| NT username: bottia
User SID: 42
-------------
Unix username: srilyk
NT username: srilyk
User SID: 99
Account desc: trololol\x1dlol
-------------
Unix username: jewlz
NT | username: flutterbies
User SID: 4
-------------
-------------
-------------
"""
).strip()
with patch.dict(
pdbedit.__salt__,
{
"cmd.run_all": MagicMock(
return_value={"stdout": pdb_output, "stderr": "", "retcode": 0}
)
},
):
actual_data = pdbedit.list_users(verbose=True)
assert actual_data == expected_data
|
dacarlin/TSkunkel | transform.py | Python | mit | 2,849 | 0.005616 | import math
from autoprotocol import UserError
from modules.utils import *
def transform(protocol, params):
# general paramet | ers
constructs = params['constructs']
num_constructs = len(constructs)
plates = list(set([construct.container for construct in constructs]))
if len(plates) != | 1:
raise UserError('You can only transform aliquots from one common container.')
# **** need to be able to check if plate is sealed to add run-chaining ****
mm_mult = 1.3
transformation_plate = protocol.ref("transformation_plate", None, "96-pcr", discard=True)
protocol.incubate(transformation_plate, "cold_20", "10:minute")
transformation_wells = transformation_plate.wells_from(0, num_constructs)
for i in range(num_constructs):
protocol.provision("rs16pbjc4r7vvz", transformation_wells[i], "50:microliter")
for i, well in enumerate(constructs):
protocol.transfer(well, transformation_wells[i], "2.0:microliter",
dispense_speed="10:microliter/second",
mix_after=False,
new_group=det_new_group(i))
if well.name:
transformation_wells[i].set_name(well.name)
else:
transformation_wells[i].set_name('construct_%s' % (i+1))
# NEED to confirm second de-seal is working OR move to cover/uncover 96-flat
protocol.seal(transformation_plate)
protocol.incubate(transformation_plate, "cold_4", "20:minute", shaking=False, co2=0)
protocol.unseal(transformation_plate)
protocol.dispense_full_plate( transformation_plate, 'soc', '50:microliter' )
protocol.seal(transformation_plate)
protocol.incubate(transformation_plate, "warm_37", "10:minute", shaking=True)
protocol.unseal(transformation_plate)
# spread on agar plates
# kan "ki17rs7j799zc2"
# amp "ki17sbb845ssx9"
# specto "ki17sbb9r7jf98"
# cm "ki17urn3gg8tmj"
# "noAB" "ki17reefwqq3sq"
agar_plates = []
agar_wells = WellGroup([])
for well in range(0, len(transformation_wells), 6):
agar_name = "agar-%s_%s" % (len(agar_plates), printdatetime(time=False))
agar_plate = ref_kit_container(protocol, agar_name, "6-flat", "ki17rs7j799zc2", discard=False, store='cold_4')
agar_plates.append(agar_plate)
for i, w in enumerate(transformation_wells[well:well + 6]):
protocol.spread(w, agar_plate.well(i), "100:microliter")
agar_wells.append(agar_plate.well(i).set_name(w.name))
for agar_p in agar_plates:
protocol.incubate( agar_p, 'warm_37', '12:hour' )
protocol.image_plate( agar_p, mode='top', dataref=agar_p.name )
# return agar plates to end protocol
return agar_plates
if __name__ == '__main__':
from autoprotocol.harness import run
run(transform, 'Transform')
|
scottgreenup/wiki | main.py | Python | gpl-2.0 | 592 | 0.008446 | #!/usr/bin/env python
"""
Fast Card
| A wikipedia like system where information is kept in a card format. The main
audience for this project are individuals who repeatably refer to references
online. Those references can be pulled in and the data stored as a card, and
can then be easily searched.
It is akin to a mind palace, but on the internet.
"""
from flask import Flask, render_template, url_for
app = Flask(__name__)
@app.route('/', methods=['GET' | , 'POST'])
def home_page():
return render_template('index.html')
if __name__ == "__main__":
app.run()
|
jreback/pandas | pandas/tests/groupby/test_quantile.py | Python | bsd-3-clause | 9,272 | 0.001833 | import numpy as np
import pytest
import pandas as pd
from pandas import DataFrame, Index
import pandas._testing as tm
@pytest.mark.parametrize(
"interpolation", ["linear", "lower", "higher", "nearest", "midpoint"]
)
@pytest.mark.parametrize(
"a_vals,b_vals",
[
# Ints
([1, 2, 3, 4, 5], [5, 4, 3, 2, 1]),
([1, 2, 3, 4], [4, 3, 2, 1]),
([1, 2, 3, 4, 5], [4, 3, 2, 1]),
# Floats
([1.0, 2.0, 3.0, 4.0, 5.0], [5.0, 4.0, 3.0, 2.0, 1.0]),
# Missing data
([1.0, np.nan, 3.0, np.nan, 5.0], [5.0, np.nan, 3.0, np.nan, 1.0]),
([np.nan, 4.0, np.nan, 2.0, np.nan], [np.nan, 4.0, np.nan, 2.0, np.nan]),
# Timestamps
(
list(pd.date_range("1/1/18", freq="D", periods=5)),
list(pd.date_range("1/1/18", freq="D", periods=5))[::-1],
),
# All NA
([np.nan] * 5, [np.nan] * 5),
],
)
@pytest.mark.parametrize("q", [0, 0.25, 0.5, 0.75, 1])
def test_quantile(interpolation, a_vals, b_vals, q):
if interpolation == "nearest" and q == 0.5 and b_vals == [4, 3, 2, 1]:
pytest.skip(
"Unclear numpy expectation for nearest result with equidistant data"
)
a_expected = pd.Series(a_vals).quantile(q, interpolation=interpolation)
b_expected = pd.Series(b_vals).quantile(q, interpolation=interpolation)
df = DataFrame(
{"key": ["a"] * len(a_vals) + ["b"] * len(b_vals), "val": a_vals + b_vals}
)
expected = DataFrame(
[a_expected, b_expected], columns=["val"], index=Index(["a", "b"], name="key")
)
result = df.groupby("key").quantile(q, interpolation=interpolation)
tm.assert_frame_equal(result, expected)
def test_quantile_array():
# https://github.com/pandas-dev/pandas/issues/27526
df = DataFrame({"A": [0, 1, 2, 3, 4]})
result = df.groupby([0, 0, 1, 1, 1]).quantile([0.25])
index = pd.MultiIndex.from_product([[0, 1], [0.25]])
expected = DataFrame({"A": [0.25, 2.50]}, index=index)
tm.assert_frame_equal(result, expected)
df = DataFrame({"A": [0, 1, 2, 3], "B": [4, 5, 6, 7]})
index = pd.MultiIndex.from | _product([[0, 1], [0.25, 0.75]])
result = df.groupby([0, 0, 1, 1]).quantile([0.25, 0.75])
expected = DataFrame(
{"A": [0.25, 0.75, 2.25, 2.75], "B": [4.25, 4.75, 6.25, 6.75]}, index=index
)
tm.assert_frame_equal(result, expected)
def test_quantile_array2():
# https://github.com/pandas-dev/pandas/pull/28085#issuecomment-524066959
df = DataFrame(
np.random.RandomState(0).randint(0, 5, size=(10, 3)), columns=list("ABC") |
)
result = df.groupby("A").quantile([0.3, 0.7])
expected = DataFrame(
{
"B": [0.9, 2.1, 2.2, 3.4, 1.6, 2.4, 2.3, 2.7, 0.0, 0.0],
"C": [1.2, 2.8, 1.8, 3.0, 0.0, 0.0, 1.9, 3.1, 3.0, 3.0],
},
index=pd.MultiIndex.from_product(
[[0, 1, 2, 3, 4], [0.3, 0.7]], names=["A", None]
),
)
tm.assert_frame_equal(result, expected)
def test_quantile_array_no_sort():
df = DataFrame({"A": [0, 1, 2], "B": [3, 4, 5]})
result = df.groupby([1, 0, 1], sort=False).quantile([0.25, 0.5, 0.75])
expected = DataFrame(
{"A": [0.5, 1.0, 1.5, 1.0, 1.0, 1.0], "B": [3.5, 4.0, 4.5, 4.0, 4.0, 4.0]},
index=pd.MultiIndex.from_product([[1, 0], [0.25, 0.5, 0.75]]),
)
tm.assert_frame_equal(result, expected)
result = df.groupby([1, 0, 1], sort=False).quantile([0.75, 0.25])
expected = DataFrame(
{"A": [1.5, 0.5, 1.0, 1.0], "B": [4.5, 3.5, 4.0, 4.0]},
index=pd.MultiIndex.from_product([[1, 0], [0.75, 0.25]]),
)
tm.assert_frame_equal(result, expected)
def test_quantile_array_multiple_levels():
df = DataFrame(
{"A": [0, 1, 2], "B": [3, 4, 5], "c": ["a", "a", "a"], "d": ["a", "a", "b"]}
)
result = df.groupby(["c", "d"]).quantile([0.25, 0.75])
index = pd.MultiIndex.from_tuples(
[("a", "a", 0.25), ("a", "a", 0.75), ("a", "b", 0.25), ("a", "b", 0.75)],
names=["c", "d", None],
)
expected = DataFrame(
{"A": [0.25, 0.75, 2.0, 2.0], "B": [3.25, 3.75, 5.0, 5.0]}, index=index
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("frame_size", [(2, 3), (100, 10)])
@pytest.mark.parametrize("groupby", [[0], [0, 1]])
@pytest.mark.parametrize("q", [[0.5, 0.6]])
def test_groupby_quantile_with_arraylike_q_and_int_columns(frame_size, groupby, q):
# GH30289
nrow, ncol = frame_size
df = DataFrame(np.array([ncol * [_ % 4] for _ in range(nrow)]), columns=range(ncol))
idx_levels = [list(range(min(nrow, 4)))] * len(groupby) + [q]
idx_codes = [[x for x in range(min(nrow, 4)) for _ in q]] * len(groupby) + [
list(range(len(q))) * min(nrow, 4)
]
expected_index = pd.MultiIndex(
levels=idx_levels, codes=idx_codes, names=groupby + [None]
)
expected_values = [
[float(x)] * (ncol - len(groupby)) for x in range(min(nrow, 4)) for _ in q
]
expected_columns = [x for x in range(ncol) if x not in groupby]
expected = DataFrame(
expected_values, index=expected_index, columns=expected_columns
)
result = df.groupby(groupby).quantile(q)
tm.assert_frame_equal(result, expected)
def test_quantile_raises():
df = DataFrame([["foo", "a"], ["foo", "b"], ["foo", "c"]], columns=["key", "val"])
with pytest.raises(TypeError, match="cannot be performed against 'object' dtypes"):
df.groupby("key").quantile()
def test_quantile_out_of_bounds_q_raises():
# https://github.com/pandas-dev/pandas/issues/27470
df = DataFrame({"a": [0, 0, 0, 1, 1, 1], "b": range(6)})
g = df.groupby([0, 0, 0, 1, 1, 1])
with pytest.raises(ValueError, match="Got '50.0' instead"):
g.quantile(50)
with pytest.raises(ValueError, match="Got '-1.0' instead"):
g.quantile(-1)
def test_quantile_missing_group_values_no_segfaults():
# GH 28662
data = np.array([1.0, np.nan, 1.0])
df = DataFrame({"key": data, "val": range(3)})
# Random segfaults; would have been guaranteed in loop
grp = df.groupby("key")
for _ in range(100):
grp.quantile()
@pytest.mark.parametrize(
"key, val, expected_key, expected_val",
[
([1.0, np.nan, 3.0, np.nan], range(4), [1.0, 3.0], [0.0, 2.0]),
([1.0, np.nan, 2.0, 2.0], range(4), [1.0, 2.0], [0.0, 2.5]),
(["a", "b", "b", np.nan], range(4), ["a", "b"], [0, 1.5]),
([0], [42], [0], [42.0]),
([], [], np.array([], dtype="float64"), np.array([], dtype="float64")),
],
)
def test_quantile_missing_group_values_correct_results(
key, val, expected_key, expected_val
):
# GH 28662, GH 33200, GH 33569
df = DataFrame({"key": key, "val": val})
expected = DataFrame(
expected_val, index=Index(expected_key, name="key"), columns=["val"]
)
grp = df.groupby("key")
result = grp.quantile(0.5)
tm.assert_frame_equal(result, expected)
result = grp.quantile()
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"values",
[
pd.array([1, 0, None] * 2, dtype="Int64"),
pd.array([True, False, None] * 2, dtype="boolean"),
],
)
@pytest.mark.parametrize("q", [0.5, [0.0, 0.5, 1.0]])
def test_groupby_quantile_nullable_array(values, q):
# https://github.com/pandas-dev/pandas/issues/33136
df = DataFrame({"a": ["x"] * 3 + ["y"] * 3, "b": values})
result = df.groupby("a")["b"].quantile(q)
if isinstance(q, list):
idx = pd.MultiIndex.from_product((["x", "y"], q), names=["a", None])
true_quantiles = [0.0, 0.5, 1.0]
else:
idx = Index(["x", "y"], name="a")
true_quantiles = [0.5]
expected = pd.Series(true_quantiles * 2, index=idx, name="b")
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("q", [0.5, [0.0, 0.5, 1.0]])
def test_groupby_quantile_skips_invalid_dtype(q):
df = DataFrame({"a": [1], "b": [2.0], "c": ["x"]})
result = df.groupby("a").quantile(q)
expected = df.groupby("a")[["b"]].quantile(q)
tm.assert_frame_equal(result, expected)
def test_groupby_timedelta_quant |
tanyaweaver/data-structures | setup.py | Python | mit | 496 | 0 | # -*- coding: utf -8*-
fr | om setupt | ools import setup
setup(
name="linked list, stack, double linked list, queue, deque implementation",
description="This package implements a linked list",
version=0.1,
license='MIT',
author="Steven Than, Tatiana Weaver",
author_email="email@email.com",
py_modules=['linked_list', 'stack', 'dll', 'queue', 'deque'],
package_dir={' ': 'src'},
install_requires=[],
extras_require={'test': ['pytest', 'pytest-cov', 'tox']},
)
|
carquois/blobon | blobon/blogs/migrations/0008_auto__del_category__del_tag__del_field_blog_status.py | Python | mit | 6,240 | 0.008173 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'Category'
db.delete_table('blogs_category')
# Deleting model 'Tag'
db.delete_table('blogs_tag')
# Deleting field 'Blog.status'
db.delete_column('blogs_blog', 'status')
def backwards(self, orm):
# Adding model 'Category'
db.create_table('blogs_category', (
('blog', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['blogs.Blog'], null=True)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=140, unique=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=1000)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, blank=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('top_level_cat', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['blogs.Category'], null=True, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=140)),
))
db.send_create_signal('blogs', ['Category'])
# Adding model 'Tag'
db.create_table('blogs_tag', (
('blog', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['blogs.Blog'], null=True)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=140, unique=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=1000)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, blank=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=140)),
))
db.send_create_signal('blogs', ['Tag'])
# User chose to not deal with backwards NULL issues for 'Blog.status'
raise RuntimeError("Cannot reverse this migration. 'Blog.status' and its values cannot be restored.")
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
| 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group'] | ", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'blogs.blog': {
'Meta': {'object_name': 'Blog'},
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_open': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '30'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['blogs'] |
ablifedev/ABLIRC | ABLIRC/bin/public/circos_plot.py | Python | mit | 9,818 | 0.00309 | #!/usr/bin/env python3
# coding: utf-8
####################################################################################
### Copyright (C) 2015-2019 by ABLIFE
####################################################################################
####################################################################################
####################################################################################
# Date Version Author ChangeLog
#
#
#
#
#####################################################################################
"""
汇总有用的工具函数
"""
import re, os, sys, logging, time, datetime
from optparse import OptionParser, OptionGroup
import subprocess
import smtplib
import email.mime.multipart
import email.mime.text
from ablib.utils.tools import *
# if sys.version_info < (3, 0):
# print("Python Version error: please use phthon3")
# sys.exit(-1)
_version = 'v1.0'
scriptPath = os.path.abspath(os.path.dirname(__file__)) # absolute script path
binPath = "/".join(scriptPath.split("/")[0:-2]) # absolute bin path
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
def configOpt():
"""Init for option
"""
usage = 'Usage: %prog [-t] [other option] [-h]'
p = OptionParser(usage)
##basic options
p.add_option('-c', '--coveragelist', dest='coveragelist', action='store', type='string', help='coverage file list of each sample', metavar="FILE")
p.add_option('-n', '--namelist', dest='namelist', action='store', type='string', help='name list of each sample')
p.add_option('-l', '--chrlen', dest='chrlen', action='store', type='string', help='chromosome length file', metavar="FILE")
p.add_option('-m', '--minchrlen', dest='minchrlen', action='store', default=1000000, type='int', help='min chromosome length to plot,default is 1000000', metavar="INT")
p.add_option('-g', '--gff', dest='gff', action='store', type='string', help='gff file', metavar="FILE")
p.add_option('-u', '--chromosomesunits', dest='chromosomesunits', default=1, action='store', type='int', help='染色体坐标尺单位,默认为100000,需保证坐标单位不超过5000个,即基因组总长度除以该值应小于5000')
p.add_option('-o', '--outfile', dest='outfile', default='reads_density_of_whole_genome_circos.png', action='store', type='string', help='output file', metavar="FILE")
p.add_option('-d', '--circosconf', dest='circosconf', default='/users/ablife/RNA-Seq/Pipeline/Basic_Analysis_Pipeline/v2.0/circos_config/', action='store', type='string', help='circos conf template dir', metavar="DIR")
group = OptionGroup(p, "Preset options")
##preset options
group.add_option('-O', '--outDir', dest='outDir', default='./', action='store', type='string', help='output directory', metavar="DIR")
group.add_option('-T', '--test', dest='isTest', default=False, action='store_true', help='run this program for test')
p.add_option_group(group)
opt, args = p.parse_args()
return (p, opt, args)
def listToString(x):
"""获得完整的命令
"""
rVal = ''
for a in x:
rVal += a + ' '
return rVal
opt_parser, opt, args = configOpt()
total_chr_len = 0
for eachLine in open(opt.chrlen):
line = eachLine.strip().split('\t')
if int(line[1]) >= opt.minchrlen:
total_chr_len += int(line[1])
if opt.chromosomesunits == 1:
opt.chromosomesunits = int(total_chr_len / 1000)
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
scriptPath = os.path.abspath(os.path.dirname(__file__)) # absolute script path
binPath = scriptPath + '/bin' # absolute bin path
outPath = os.path.abspath(opt.outDir) # absolute output path
os.mkdir(outPath) if not os.path.isdir(outPath) else None
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
### S
# -----------------------------------------------------------------------------------
def make_karyotype_file(chrlen_file, chrset):
file = 'karyotype.txt'
w = open(file, 'w')
flag = 0
temp_chr = ''
for eachline in open(chrlen_file, 'r'):
line = eachline.strip().split('\t')
if int(line[1]) < opt.minchrlen:
continue
if flag == 0:
temp_chr = line[0]
flag = 1
# Chr1 30427617 ----chrlen file
# chr - Chr1 Chr1 0 3042761 | 7 white -----karyotype file
chrset.add(line[0])
w.writelines('chr - ' + line[0] + ' ' + line[0] + ' 0 ' + line[1] + ' white\n')
w.close()
return file, temp_chr
def make_gene_file(gff, chrset):
file = 'gene.txt'
w = open(file, 'w')
for eachline in open(gff, 'r'):
if eachline.startswith('#'):
continue
if eachline.startswith('\n'):
continue
line = eachline.strip().split('\t')
if line[0] not in chrset:
| continue
if line[2] == "gene":
w.writelines(line[0] + '\t' + line[3] + '\t' + line[4] + '\n')
w.close()
return file
def make_circos_conf(karyotype_file, gene_file, chr):
file = 'circos.conf'
w = open(file, 'w')
temp = """<<include colors_fonts_patterns.conf>>
<<include ideogram.conf>>
<<include ticks.conf>>
<<include etc/housekeeping.conf>>
\n')
w.writelines('karyotype = ' + karyotype_file + '\n')
w.writelines('chromosomes_units = ' + str(opt.chromosomesunits) + '\n')
temp = """
### Gene
<highlights>
z = 0
fill_color = blue
<highlight>
"""
w.writelines(temp)
w.writelines('file = ' + gene_file + '\n')
temp = """r0 = 0.97r
r1 = 0.97r + 70p
</highlight>
</highlights>
### Reads density
<plots>
type = line
thickness = 2
# samples
"""
w.writelines(temp)
coverage_files = opt.coveragelist.split(',')
names = opt.namelist.split(',')
n = len(coverage_files)
width = round(0.6 / n - 0.01, 3)
s = 0.3
maxcov = 0
for j in range(n):
clist = []
for eachLine in open(coverage_files[j]):
line = eachLine.strip().split('\t')
if float(line[3]) > 0:
clist.append(float(line[3]))
clist.sort()
clist_n = len(clist)
if clist[int(clist_n * 0.999)] * 1.2 > maxcov:
maxcov = clist[int(clist_n * 0.999)] * 1.2
print(maxcov)
covper2 = maxcov * 0.2
print(covper2)
covper6 = maxcov * 0.6
print(covper6)
for i in range(n):
r0 = s
r1 = s + width
w.writelines('<plot>\nfile = ' + coverage_files[i] + '\nr0 = ' + str(r0) + 'r\nr1 = ' + str(r1) + 'r\n')
temp = """max_gap = 1u
color = black
min = 0
max = """ + str(maxcov) + """
thickness = 1
fill_color = black_a4
<axes>
<axis>
color = lgreen
thickness = 2
position = """ + str(covper6) + """
</axis>
<axis>
color = lred
thickness = 2
position = """ + str(covper2) + """
</axis>
</axes>
</plot>
"""
w.writelines(temp)
label = names[i] + '_label.txt'
o = open(label, 'w')
o.writeli |
tylere/earthengine-api | python/ee/tests/ee_test.py | Python | apache-2.0 | 15,097 | 0.002716 | #!/usr/bin/env python
"""Test for the ee.__init__ file."""
import six
i | mport unittest
import ee
from ee import apitestcase
class EETestCase(apitestcase.ApiTestCase):
def setUp(self):
ee.Reset()
def testInitialization(self):
"""Verifies library initialization."""
def MockSend(path, params, unused_method=None, unused_raw=None):
if path == '/algorithms':
return {}
else | :
raise Exception('Unexpected API call to %s with %s' % (path, params))
ee.data.send_ = MockSend
# Verify that the base state is uninitialized.
self.assertFalse(ee.data._initialized)
self.assertEqual(ee.data._api_base_url, None)
self.assertEqual(ee.ApiFunction._api, None)
self.assertFalse(ee.Image._initialized)
# Verify that ee.Initialize() sets the URL and initializes classes.
ee.Initialize(None, 'foo')
self.assertTrue(ee.data._initialized)
self.assertEqual(ee.data._api_base_url, 'foo/api')
self.assertEqual(ee.ApiFunction._api, {})
self.assertTrue(ee.Image._initialized)
# Verify that ee.Initialize(None) does not override custom URLs.
ee.Initialize(None)
self.assertTrue(ee.data._initialized)
self.assertEqual(ee.data._api_base_url, 'foo/api')
# Verify that ee.Reset() reverts everything to the base state.
ee.Reset()
self.assertFalse(ee.data._initialized)
self.assertEqual(ee.data._api_base_url, None)
self.assertEqual(ee.ApiFunction._api, None)
self.assertFalse(ee.Image._initialized)
def testCallAndApply(self):
"""Verifies library initialization."""
# Use a custom set of known functions.
def MockSend(path, params, unused_method=None, unused_raw=None):
if path == '/algorithms':
return {
'fakeFunction': {
'type': 'Algorithm',
'args': [
{'name': 'image1', 'type': 'Image'},
{'name': 'image2', 'type': 'Image'}
],
'returns': 'Image'
},
'Image.constant': apitestcase.BUILTIN_FUNCTIONS['Image.constant']
}
else:
raise Exception('Unexpected API call to %s with %s' % (path, params))
ee.data.send_ = MockSend
ee.Initialize(None)
image1 = ee.Image(1)
image2 = ee.Image(2)
expected = ee.Image(ee.ComputedObject(
ee.ApiFunction.lookup('fakeFunction'),
{'image1': image1, 'image2': image2}))
applied_with_images = ee.apply(
'fakeFunction', {'image1': image1, 'image2': image2})
self.assertEqual(expected, applied_with_images)
applied_with_numbers = ee.apply('fakeFunction', {'image1': 1, 'image2': 2})
self.assertEqual(expected, applied_with_numbers)
called_with_numbers = ee.call('fakeFunction', 1, 2)
self.assertEqual(expected, called_with_numbers)
# Test call and apply() with a custom function.
sig = {'returns': 'Image', 'args': [{'name': 'foo', 'type': 'Image'}]}
func = ee.CustomFunction(sig, lambda foo: ee.call('fakeFunction', 42, foo))
expected_custom_function_call = ee.Image(
ee.ComputedObject(func, {'foo': ee.Image(13)}))
self.assertEqual(expected_custom_function_call, ee.call(func, 13))
self.assertEqual(expected_custom_function_call, ee.apply(func, {'foo': 13}))
# Test None promotion.
called_with_null = ee.call('fakeFunction', None, 1)
self.assertEqual(None, called_with_null.args['image1'])
def testDynamicClasses(self):
"""Verifies dynamic class initialization."""
# Use a custom set of known functions.
def MockSend(path, unused_params, unused_method=None, unused_raw=None):
if path == '/algorithms':
return {
'Array': {
'type': 'Algorithm',
'args': [
{
'name': 'values',
'type': 'Serializable',
'description': ''
}
],
'description': '',
'returns': 'Array'
},
'Array.cos': {
'type': 'Algorithm',
'args': [
{
'type': 'Array',
'description': '',
'name': 'input'
}
],
'description': '',
'returns': 'Array'
},
'Kernel.circle': {
'returns': 'Kernel',
'args': [
{
'type': 'float',
'description': '',
'name': 'radius',
},
{
'default': 1.0,
'type': 'float',
'optional': True,
'description': '',
'name': 'scale'
},
{
'default': True,
'type': 'boolean',
'optional': True,
'description': '',
'name': 'normalize'
}
],
'type': 'Algorithm',
'description': ''
},
'Reducer.mean': {
'returns': 'Reducer',
'args': []
},
'fakeFunction': {
'returns': 'Array',
'args': [
{
'type': 'Reducer',
'description': '',
'name': 'kernel',
}
]
}
}
ee.data.send_ = MockSend
ee.Initialize(None)
# Verify that the expected classes got generated.
self.assertTrue(hasattr(ee, 'Array'))
self.assertTrue(hasattr(ee, 'Kernel'))
self.assertTrue(hasattr(ee.Array, 'cos'))
self.assertTrue(hasattr(ee.Kernel, 'circle'))
# Try out the constructors.
kernel = ee.ApiFunction('Kernel.circle').call(1, 2)
self.assertEqual(kernel, ee.Kernel.circle(1, 2))
array = ee.ApiFunction('Array').call([1, 2])
self.assertEqual(array, ee.Array([1, 2]))
self.assertEqual(array, ee.Array(ee.Array([1, 2])))
# Try out the member function.
self.assertEqual(
ee.ApiFunction('Array.cos').call(array),
ee.Array([1, 2]).cos())
# Test argument promotion.
f1 = ee.ApiFunction('Array.cos').call([1, 2])
f2 = ee.ApiFunction('Array.cos').call(ee.Array([1, 2]))
self.assertEqual(f1, f2)
self.assertTrue(isinstance(f1, ee.Array))
f3 = ee.call('fakeFunction', 'mean')
f4 = ee.call('fakeFunction', ee.Reducer.mean())
self.assertEqual(f3, f4)
try:
ee.call('fakeFunction', 'moo')
self.fail()
except ee.EEException as e:
self.assertTrue('Unknown algorithm: Reducer.moo' in str(e))
def testDynamicConstructor(self):
# Test the behavior of the dynamic class constructor.
# Use a custom set of known functions for classes Foo and Bar.
# Foo Foo(arg1, [arg2])
# Bar Foo.makeBar()
# Bar Foo.takeBar(Bar bar)
# Baz Foo.baz()
def MockSend(path, unused_params, unused_method=None, unused_raw=None):
if path == '/algorithms':
return {
'Foo': {
'returns': 'Foo',
'args': [
{'name': 'arg1', 'type': 'Object'},
{'name': 'arg2', 'type': 'Object', 'optional': True}
]
},
'Foo.makeBar': {
'returns': 'Bar',
'args': [{'name': 'foo', 'type': 'Foo'}]
},
'Foo.takeBar': {
'returns': 'Bar',
'args': [
{'name': 'foo', 'type': 'Foo'},
{'name': 'bar', 'type': 'Bar'}
]
},
'Bar.baz': {
'returns': 'Baz',
'args': [{'name': 'bar', 'type': 'Bar'}]
}
}
ee.data.send_ = MockSend
ee.Initialize(None)
# Try to cast something that's already of |
Marcelpv96/SITWprac2017 | sportsBetting/features/steps/register_teams.py | Python | gpl-3.0 | 1,654 | 0.000605 | from behave import *
use_step_matcher("parse")
@given('Exists a team created by "{username}"')
def step_impl(context, username):
from django.contrib.auth.models import User
user = User.objects.get(username=username)
from sportsBetting.models import Team
for row in context.table:
team = Team(created_by=user)
for heading in row.headings:
setattr(team, heading, row[heading])
if not Team.objects.filter(name=team.name).exists():
team.save()
@when('I add a new team')
def step_impl(context):
for row in context.table:
context.browser.visit(context.get_url('/teams/create/'))
if context.browser.url == context.get_url('/teams/create/'):
form = context.browser.find_by_tag('form').first
for heading in row.headings:
context.browser.fill(str(heading), str(row[heading]))
form.find_by_id('team_submit').first.click()
@when('I want to edit the team "{team_name}"')
def step_impl(context, team_name):
from sportsBetting.models import Team
id = Team.objects.get(name=tea | m_name).id
context.browser.visit(context.get_url('/teams/edit/' + str(id)))
@when('I edit the team')
def step_impl(context):
for | row in context.table:
if context.browser.url.startswith(context.get_url('/teams/edit/')):
form = context.browser.find_by_tag('form').first
for heading in row.headings:
context.browser.fill(str(heading), str(row[heading]))
form.find_by_id('team_submit').first.click()
assert context.browser.url == context.get_url('/teams/list_teams/')
|
Akagi201/learning-python | pyramid/Pyramid Web开发入门/2. Python语言基础/fun_return.py | Python | mit | 172 | 0 | def | my_max(a, b):
if a > b:
return a
elif b > a:
return b
else:
return None
x = my_max(1, 2)
print x
print my_max(3, 2 | )
|
spladug/sutro | sutro/config.py | Python | bsd-3-clause | 1,192 | 0 | class ConfigurationError(Exception):
def __init__(self, key, error):
self.key = key
self.error = error
def __str__(self):
return "%s: %s" % (self.key, self.error)
def parse_config(config, spec, root=None):
parsed = {}
for key, parser_or_spec in spec.iteritems():
if root:
key_path = "%s.%s" % (root, key)
else:
key_path = key
if callable(parser_or_spec):
parser = parser_or_spec
try:
raw_value | = config[key_path]
excep | t KeyError:
raise ConfigurationError(key, "not found")
try:
parsed[key] = parser(raw_value)
except Exception as e:
raise ConfigurationError(key, e)
elif isinstance(parser_or_spec, dict):
subspec = parser_or_spec
parsed[key] = parse_config(config, subspec, root=key_path)
else:
raise ConfigurationError(key, "invalid spec")
return parsed
def base64(text):
import base64
return base64.b64decode(text)
def comma_delimited(text):
return filter(None, [x.strip() for x in text.split(",")])
|
yhoogstrate/segmentation-fold | scripts/energy-estimation-utility/segmentation_fold_utils/RNA.py | Python | gpl-3.0 | 1,466 | 0.006821 | #!/usr/bin/env python
"""
segmentation-fold can predict RNA 2D structures including K-turns.
Copyright (C) 2012-2016 Youri Hoogstrate
This file is part of segmentation-fold and originally taken from
yh-kt-fold.
segmentation-fold is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
segmentation-fold is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILI | TY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
class RNA:
def __init__(self,name,sequence,organism,structures):
self. | name = name
self.organism = organism
self.sequence = sequence
self.structures = structures
def get_sequence(self):
return self.sequence
def get_structures(self):
return self.structures
def get_unique_associated_segments(self):
segments = []
for structure in self.structures:
for associated_segment in structure['associated_segments']:
segments.append(associated_segment)
return list(set(segments))
|
renmengye/imageqa-public | src/nn/selector.py | Python | mit | 1,219 | 0.005742 | from stage import *
class Selector(Stage):
def __init__(self,
name,
inputNames,
start,
end,
axis=-1):
Stage.__init__(
self,
name=name,
inputNames=inputNames,
outputDim=end-start)
self.start = start
self.end = end
self.axis = axis
if axis < -2 or axis > 2:
| raise Exception('Selector axis=%d not supported' % axis)
def forward(self, X):
self.X = X
if self.axis == -1:
self.axis = len(X.shape) - 1
if self.axis == 0:
return X[self.start:self.end]
elif self.axis == 1:
return X[:, self.start:self.end]
elif self.axis == 2:
return X[:, :, self.start:self.end]
def | backward(self, dEdY):
dEdX = np.zeros(self.X.shape)
if self.axis == 0:
dEdX[self.start:self.end] = dEdY
elif self.axis == 1:
dEdX[:, self.start:self.end] = dEdY
elif self.axis == 2:
dEdX[:, :, self.start:self.end] = dEdY
return dEdX |
renyi533/tensorflow | tensorflow/python/eager/context.py | Python | apache-2.0 | 74,821 | 0.007351 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""State management for eager execution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import contextlib
import copy
import os
import random
import threading
from absl import logging
import numpy as np
import six
from tensorflow.core.framework import function_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python import pywrap_tfe
from tensorflow.python import tf2
from tensorflow.python.client import pywrap_tf_session
from tensorflow.python.eager import executor
from tensorflow.python.eager import monitoring
from tensorflow.python.framework import c_api_util
from tensorflow.python.framework import device as pydev
from tensorflow.python.util import compat
from tensorflow.python.util import is_in_graph_mode
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
GRAPH_MODE = 0
EAGER_MODE = 1
default_execution_mode = EAGER_MODE if tf2.enabled() else GRAPH_MODE
# Cache from (old_device_name, partial_new_device_name) -> (new_device_name,
# new_device_spec).
# Note that we do not protect this with a lock and instead rely on python's GIL
# and the idempotent nature of writes to provide thread safety.
_device_parsing_cache = {}
_starting_device_spec = pydev.DeviceSpec.from_string("")
_MAXINT32 = 2**31 - 1
DEVICE_PLACEMENT_EXPLICIT = pywrap_tfe.TFE_DEVICE_PLACEMENT_EXPLICIT
DEVICE_PLACEMENT_WARN = pywrap_tfe.TFE_DEVICE_PLACEMENT_WARN
DEVICE_PLACEMENT_SILENT = pywrap_tfe.TFE_DEVICE_PLACEMENT_SILENT
DEVICE_PLACEMENT_SILENT_FOR_INT32 = (
pywrap_tfe.TFE_DEVICE_PLACEMENT_SILENT_FOR_INT32)
SYNC = 0
ASYNC = 1
MIRRORING_NONE = pywrap_tfe.TFE_MIRRORING_NONE
MIRRORING_ALL = pywrap_tfe.TFE_MIRRORING_ALL
_KEEP_ALIVE_SECS = 600
_python_eager_context_create_counter = monitoring.Counter(
"/tensorflow/api/python/eager_context_create_counter",
"Counter for number of eager contexts created in Python.")
class _EagerTensorCache(object):
"""Simple cache which evicts items based on length in a FIFO manner."""
def __init__(self, max_items=256, max_tensor_size=10000):
self._data = collections.OrderedDict()
self._max_items = max_items
self._max_tensor_size = max_tensor_size
def put(self, key, value):
if value._num_elements() > self._max_tensor_size: # pylint: disable=protected-access
return
self._data[key] = value
if len(self._data) > self._max_items:
self._data.popitem(last=False)
def get(self, key):
return self._data.get(key, None)
def flush(self):
self._data = {}
class FunctionCallOptions(object):
"""Options applied at call sites of eager functions.
Eager functions are functions decorated with tf.contrib.eager.defun.
"""
def __init__(self, executor_type=None, config_proto=None):
"""Constructor.
Args:
executor_type: (optional) name of the executor to be used to execute the
eager function. If None or an empty string, the default Tensorflow
executor will be used.
config_proto: (optional) a `config_pb2.ConfigProto` proto or
a serialized string of that proto.
The config used by Grappler when optimizing the function graph.
Each concrete function is optimized the first time is called. Changing
config_proto after the first call has no effect.
If config_proto is None, an empty RewriterConfig will be used.
"""
self.config_proto_serialized = config_proto
self.executor_type = executor_type
@property
def executor_type(self):
return self._executor_type
@executor_type.setter
def executor_type(self, executor_type):
self._executor_type = executor_type
@property
def config_proto_serialized(self):
return self._config_proto_serialized
@config_proto_serialized.setter
def config_proto_serialized(self, config):
if isinstance(config, config_pb2.ConfigProto):
self._config_proto_serialized = config.SerializeToString()
elif isinstance(config, str):
self._config_proto_serialized = config
elif config is None:
self._config_proto_serialized = (
config_pb2.ConfigProto().SerializeToString())
else:
raise ValueError("the rewriter config must be either a "
"config_pb2.ConfigProto, or a serialized string of that "
"proto or None. got: {}".format(type(config)))
# Map from context_id (an int) to _TensorCaches.
# Dicts are thread safe in CPython.
# TODO(iga): Remove this once TensorCaches are moved to C++.
_tensor_caches_map = {}
class _TensorCaches(threading.local):
"""Thread local tensor caches."""
def __init__(self):
super(_TensorCaches, self).__init__()
self._ones_rank_cache = None
self._zeros_cache = None
@property
def ones_rank_cache(self):
if not self._ones_rank_cache:
self._ones_rank_cache = _EagerTensorCache()
return self._ones_rank_cache
@property
def zeros_cache(self):
if not self._zeros_cache:
self._zeros_cache = _EagerTensorCache()
return self._zeros_cache
class _ThreadLocalData(threading.local):
"""Thread local storage for the eager context."""
def __init__(self):
super(_ThreadLocalData, self).__init__()
self.device_spec = _starting_device_spec
self.device_name = ""
self.is_eager = default_execution_mode == EAGER_MODE
self.scope_name = ""
self.function_call_options = None
self.executor = None
self.op_callbacks = []
self.invoking_op_callbacks = False
ContextSwitch = collections.namedtuple(
"ContextSwitch", ["is_building_function", "enter_context_fn",
"device_stack"])
# `_ContextSwitchStack` is a `threading.local` to match the semantics of
# ``DefaultGraphStack`, which is also a `threading.local`.
class _ContextSwitchStack(threading.local):
"""A thread-local stack of context switches."""
def __init__(self, eager):
super(_ContextSwitchStack, self).__init__()
self.stack = []
if eager:
# Initialize the stack with a pointer to enter the eager context; this
# ensures that the fact that eager execution was enabled is propagated
# across threads, since (1) `enable_eager_execution` modifies a
# process-level flag (`default_execution_mode`) and (2) `__init__` is
# called each time a threading.local object is used in a separate thread.
self.push(is_building_function=False, enter_context_fn=eager_mode,
device_stack=None)
def push(self, is_building_function, enter_context_fn, device_stack):
"""Push metadata about a context switch onto the stack.
A context switch can take any one of the two forms: installing a graph as
the default graph, or entering the eager context. For each context switch,
we record whether or not the entered context is building a function.
Args:
is_building_function: (bool.) Whether the context is building a function.
en | ter_context_fn: (function.) A callable that executes the context switch.
For example, `graph.as_default` or `eager_mode`.
device_sta | ck: If applicable, the device function stack for this
graph. When breaking out of graphs in init_scope, the innermost nonempty
device stack is used. Eager contexts put `None` here and the value is
never used.
"""
self.stack.append(
ContextSwitch(is_b |
ESOedX/edx-platform | common/djangoapps/static_replace/migrations/0001_initial.py | Python | agpl-3.0 | 1,125 | 0.004444 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='AssetBaseUrlConfig',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled', models.BooleanField(default=False, | verbose_name='Enabled')),
('base_url', models.TextField(help_text=b'The alternative hostname to serve static assets from. Should be in the form of hostname[:port].', blank=True)),
('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, | null=True, verbose_name='Changed by')),
],
),
]
|
qsheeeeen/Self-Driving-Car | 06_have_fun.py | Python | mit | 501 | 0 | from rl_toolbox import Runner
from rl_toolbox.agent import PPOAgent
from rl_toolbox.policy import VAEPolicy, CNNPolicy
def main():
fo | r fun in (VAEPolicy, CNNPolicy):
runner = Runner(
'CarRacing-v0',
PPOAgent,
fun,
record_data=False,
data_path=None,
save=False,
load=True,
weight_path='./weights/')
runner.run({'tra | in': False}, num_episode=2)
if __name__ == '__main__':
main()
|
mirestrepo/voxels-at-lems | dbrec3d/bof/pca/learn_codebook/refined_init_k_means/k_means_on_CM_means.py | Python | bsd-2-clause | 4,760 | 0.033613 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 14, 2011
@author:Isabel Restrepo
A script to run (fast) k-means on J sets of random subsamples
"""
import os;
import dbrec3d_batch
import multiprocessing
import Queue
import time
import random
import optparse
import sys
from numpy import log, ceil
from xml.etree.ElementTree import ElementTree
import glob
#time.sleep(30);
class dbvalue:
def __init__(self, index, type):
self.id = index # unsigned integer
self.type = type # string
class bof_job():
def __init__(self, cm_i_file, CM_set, max_it, fm_i_file):
self.cm_i_file = cm_i_file;
self.CM_set = CM_set;
self.max_it = max_it;
self.fm_i_file = fm_i_file;
def execute_bof_jobs(jobs, num_procs=4):
work_queue=multiprocessing.Queue();
result_queue=multiprocessing.Queue();
for job in jobs:
work_queue.put(job)
for i in range(num_procs):
worker= bof_worker(work_queue,result_queue)
worker.start();
print("worker with name ",worker.name," started!")
# collect the results off the queue
#important: having a result queue makes the execute_jobs wait for all jobs in the queue before exiting
# results = []
# while len(results) < len(jobs):
# result = result_queue.get()
# results.append(result)
#
# return results
class bof_worker(multiprocessing.Process):
def __init__(self,work_queue,result_queue):
# base class initialization
multiprocessing.Process.__init__(self)
# job management stuff
self.work_queue = work_queue
self.result_queue = result_queue
self.kill_received = False
def run(self):
while not self.kill_received:
# get a task
try:
job = self.work_queue.get_nowait()
except Queue.Empty:
break
start_time = time.time();
dbrec3d_batch.set_stdout('logs/log_' + str(os.getpid())+ ".txt");
dbrec3d_batch.init_process("bofKMeansOnVectorProcess");
dbrec3d_batch.set_input_string(0, job.cm_i_file);
dbrec3d_batch.set_input_from_db(1, job.CM_set);
dbrec3d_batch.set_input_unsigned(2, job.max_it);
dbrec3d_batch.set_input_string(3, job.fm_i_file);
dbrec3d_batch.run_process();
dbrec3d_batch.clear();
dbrec3d_batch.reset_stdout();
print ("Runing time for worker:", self.name)
print(time.time() - start_time);
#output exit code in this case
#important: having a result queue makes the execute_jobs wait for all jobs in the queue before exiting
#self.result_queue.put(0);
#******* | ************The Main Algorithm ************************#
if __name__=="__main__":
dbrec3d_batch.register_processes();
dbrec3d_batch.register_datatypes();
#Parse inputs
parser = optparse.OptionParser(description='bof Statistics Pass 0' | );
parser.add_option('--init_k_means_dir', action="store", dest="init_k_means_dir");
parser.add_option('--num_cores', action="store", dest="num_cores", type="int", default=4);
parser.add_option('--max_it', action="store", dest="max_it", type="int", default=100);
options, args = parser.parse_args()
init_k_means_dir = options.init_k_means_dir; #path where all CM_i means are saved and where the ouput FM_i will be written to
num_cores = options.num_cores;
max_it = options.max_it;
if not os.path.isdir(init_k_means_dir +"/"):
print "Invalid init_k_means Dir"
sys.exit(-1);
CM_path = init_k_means_dir + "/CM";
if not os.path.isdir(CM_path +"/"):
print "Invalid CM Dir"
sys.exit(-1);
CM_files = glob.glob1(CM_path, 'CM*');
FM_path = init_k_means_dir + "/FM";
if not os.path.isdir(FM_path +"/"):
os.mkdir(FM_path +"/");
start_time = time.time();
#Combine all CM_i means into one set CM to be passed for k-means
mean_file_sfx = CM_path + "/CM_" ;
dbrec3d_batch.init_process("bofCombineMeansProcess");
dbrec3d_batch.set_input_string(0, mean_file_sfx);
dbrec3d_batch.run_process();
(id, type) = dbrec3d_batch.commit_output(0);
CM_set= dbvalue(id, type);
#Begin multiprocessing
job_list=[];
#Enqueue jobs
for CM_file in CM_files:
cm_file = CM_path + "/" + CM_file;
fm_file = FM_path + "/FM" + CM_file.strip('CM');
current_job = bof_job(cm_file, CM_set, max_it, fm_file);
job_list.append(current_job);
execute_bof_jobs(job_list, num_cores);
|
carlgonz/u-fit | src/python/u_fit/modules/__init__.py | Python | mit | 25 | 0 | __aut | hor | __ = 'cgonzalez'
|
ncrmro/reango | server/users/schema/mutations.py | Python | mit | 3,252 | 0.000615 | from django.contrib.auth import authenticate, get_user_model
from graphene import AbstractType, relay, Field, String, ObjectType, Union, List
from users.jwt_schema import TokensSuccess
from users.jwt_util import get_jwt_token
from users.schema.definitions import Viewer
class Error(ObjectType):
"""Form Errors
https://medium.com/@tarkus/validation-and-user-errors-in-graphql-mutations-39ca79cd00bf#.ts99uxfnr
"""
key = String()
message = String(required=True)
class FormErrors(ObjectType):
"""Form Errors
https://medium.com/@tarkus/validation-and-user-errors-in-graphql-mutations-39ca79cd00bf#.ts99uxfnr
"""
errors = List(Error)
class AuthFormUnion(Union):
"""Returns either token error or token success"""
class Meta:
types = (Viewer, FormErrors)
class LoginMutation(relay.ClientIDMutation):
class Input:
email = String(required=True)
password = String(required=True)
auth_form_payload = Field(AuthFormUnion)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
email = input.get('email')
password = input.get('password')
user_exists = get_user_model().objects.filter(email=email)
errors = []
if not user_exists:
error = Error(
key='email',
message='A user with this email doesn\'t exist.')
errors.append(error)
return LoginMutation(FormErrors(errors))
user_password_correct = user_exists[0].check_password(password)
if not user_password_correct:
error = Error(key='password', message='Password is incorrect')
errors.append(error)
return LoginMutation(FormErrors(errors))
user = authenticate(us | ername=email, password=password)
jwt_token = get_jwt_token(user)
if user and jwt_token:
tokens = T | okensSuccess(
jwt_token
)
viewer = Viewer(
user=user,
tokens=tokens
)
return LoginMutation(viewer)
class SignupUserMutation(relay.ClientIDMutation):
class Input:
email = String(required=True)
password = String(required=True)
auth_form_payload = Field(AuthFormUnion)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
email = input.get('email')
password = input.get('password')
user = get_user_model().objects.filter(email=email)
errors = []
if not user:
user = get_user_model().objects.create_user(email=email, password=password)
jwt_token = get_jwt_token(user)
token = TokensSuccess(
token=jwt_token
)
viewer = Viewer(
user=user,
tokens=token
)
return SignupUserMutation(viewer)
if user:
error = Error(
key='email',
message='A user with this email already exists.')
errors.append(error)
return SignupUserMutation(FormErrors(errors))
class UserMutations(AbstractType):
login = LoginMutation.Field()
signup = SignupUserMutation.Field()
|
PythonCharmers/FunkLoad | src/funkload/tests/test_monitor_plugins.py | Python | gpl-2.0 | 1,940 | 0.008763 | import unittest
import time
from ConfigParser import ConfigParser
from funkload.MonitorPlugins import MonitorPlugins
class TestMonitorPlugins(unittest.TestCase):
default_plugins=['MonitorCPU', 'MonitorNetwork', 'MonitorMemFree', 'MonitorCUs']
def test_register_default(self):
""" Make sure all default plugins are loaded """
p=MonitorPlugins()
p.registerPlugins()
| plugins_loaded=p.MONITORS.keys()
for plugin in self.default_plugins:
self.assertTrue(plugin in plugins_loaded)
def test_getStat(self):
""" Make sure getStat does not raise any exception """
p=MonitorPlugins()
p.registerPlugins()
for plugin in self.default_plugins:
p.MONITORS[plugin].getStat()
def test_network(se | lf):
""" Make sure self.interface is properly read from config in MonitorNetwork plugin """
conf=ConfigParser()
conf.add_section('server')
conf.set('server', 'interface', 'eth9')
p=MonitorPlugins(conf)
p.registerPlugins()
self.assertTrue(p.MONITORS['MonitorNetwork'].interface == 'eth9')
def test_MonitorInfo(self):
""" Make sure Monitor.MonitorInfo still works with plugins """
from funkload.Monitor import MonitorInfo
p=MonitorPlugins()
p.registerPlugins()
m=MonitorInfo('somehost', p)
self.assertTrue(m.host=='somehost')
def test_MonitorThread(self):
""" Make sure Monitor.MonitorThread still works with plugins """
from funkload.Monitor import MonitorThread
p=MonitorPlugins()
p.registerPlugins()
records=[]
monitor = MonitorThread(records, p, 'localhost', 1)
monitor.start()
monitor.startRecord()
time.sleep(3)
monitor.stopRecord()
monitor.stop()
self.assertTrue(len(records)>0)
if __name__ == '__main__':
unittest.main()
|
neil-davis/penfold | src/plugins/PortCheckerPluginMock.py | Python | gpl-3.0 | 470 | 0 | from random im | port randint
from yapsy.IPlugin import IPlugin
class PortCheckerPlugin(IPlugin):
"""
Mocked Version: Return random 0 or 1 as exit_code
Takes an ip address and a port as inputs and trys to make
a TCP connection to that port. Output is success 0 or failure > 0
"""
def execute(self, info):
val = randint(0, 10)
if val != 0:
val = 1
info.output_values['exit_code'] = ran | dint(0, 1)
|
mosra/m.css | plugins/m/test/test_gl.py | Python | mit | 1,575 | 0.000636 | #
# This file is part of m.css.
#
# Copyright © 2017, 2018, 2019, 2020, 2021, 2022
# Vladimír Vondruš <mosra@centrum.cz>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING B | UT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WIT | H THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
from . import PelicanPluginTestCase
class Gl(PelicanPluginTestCase):
def __init__(self, *args, **kwargs):
super().__init__(__file__, '', *args, **kwargs)
def test(self):
self.run_pelican({
'PLUGINS': ['m.htmlsanity', 'm.gl']
})
self.assertEqual(*self.actual_expected_contents('page.html'))
|
madgik/exareme | Exareme-Docker/src/exareme/exareme-tools/madis/src/functions/row/termsetops.py | Python | mit | 2,487 | 0.001206 | # coding: utf-8
import itertools
def tset(*args):
"""
.. function:: termsetdiff(termset1, termset2) -> termset
Returns the termset that is the difference of sets of termset1 - termset2.
Examples:
>>> table1('''
... 't1 t2 t3' 't2 t3'
... 't3 t2 t1' 't3 t4'
... ''')
>>> sql("select tset(a,b) from table1")
tset(a,b)
-----------
t1 t2 t3
t1 t2 t3 t4
"""
return ' '.join(sorted(set(' '.join(args).split(' '))))
tset.registered = True
def tsetdiff(*args):
"""
.. function:: termsetdiff(termset1, termset2) -> termset
Returns the termset that is the difference of sets of termset1 - termset2.
Examples:
>>> table1('''
... 't1 t2 t3' 't2 t3'
... 't3 t2 t1' 't3 t4'
... ''')
>>> sql("select tsetdiff(a,b) from table1")
tsetdiff(a,b)
-------------
t1
t1 t2
"""
if len(args) < 2:
raise functions.OperatorError("tsetdiff", "tsetdiff operator: at least two termsets should be provided")
return ' '.join(sorted(set(args[0].split(' ')) - set(args[1].split(' '))))
tsetdiff.registered = True
def tsetcombinations(*args):
"""
.. function:: tsetcombinations(termset, r) -> termset
Returns all the termset combinations of length r.
It is a multiset operator that returns one column but many rows.
.. seealso::
* :ref:`tutmultiset` functions
>>> sql("select tsetcombinations('t1 t2 t3 t4',2)")
C1
-----
t1 t2
t1 t3
t1 t4
t2 t3
t2 t4
t3 t4
"""
if len(args) < 1:
raise functions.OperatorError("tsetcombinations", "tsetcombinatio | ns operator: no input")
tset = args[0]
if not isinstance(args[1], int):
raise functions.OperatorError("tsetcombinations",
"tsetcombinations operator: second argument s | hould be integer")
yield ("C1",)
for p in itertools.combinations(sorted(tset.split(' ')), args[1]):
first = False
yield [' '.join(p)]
if first:
yield ['']
tsetcombinations.registered = True
tsetcombinations.multiset = True
if not ('.' in __name__):
"""
This is needed to be able to test the function, put it at the end of every
new function you create
"""
import sys
from functions import *
testfunction()
if __name__ == "__main__":
reload(sys)
sys.setdefaultencoding('utf-8')
import doctest
doctest.testmod()
|
hms-dbmi/fourfront | src/encoded/types/file.py | Python | mit | 77,254 | 0.002291 | import boto3
import datetime
import json
import logging
import os
import pytz
import requests
import structlog
import transaction
import urllib.parse
from botocore.exceptions import ClientError
from copy import deepcopy
from pyramid.httpexceptions import (
HTTPForbidden,
HTTPTemporaryRedirect,
HTTPNotFound,
# HTTPBadRequest
)
from pyramid.response import Response
from pyramid.settings import asbool
from pyramid.threadlocal import get_current_request
from pyramid.traversal import resource_path
from pyramid.view import view_config
from snovault import (
AfterModified,
BeforeModified,
CONNECTION,
calculated_property,
collection,
load_schema,
abstract_collection,
)
from snovault.attachment import ItemWithAttachment
from snovault.crud_views import (
collection_add,
item_edit,
)
from snovault.elasticsearch import ELASTIC_SEARCH
from snovault.invalidation import add_to_indexing_queue
from snovault.schema_utils import schema_validator
from snovault.util import debug_log
from snovault.validators import (
validate_item_content_post,
validate_item_content_put,
validate_item_content_patch,
validate_item_content_in_place,
no_validate_item_content_post,
no_validate_item_content_put,
no_validate_item_content_patch
)
from urllib.parse import (
parse_qs,
urlparse,
)
from uuid import uuid4
from ..authentication import session_properties
from ..search import make_search_subreq
# from . import TrackingItem
from .base import (
Item,
ALLOW_SUBMITTER_ADD,
get_item_or_none,
lab_award_attribution_embed_list
)
from .dependencies import DependencyEmbedder
from ..util import check_user_is_logged_in
logging.getLogger('boto3').setLevel(logging.CRITICAL)
log = structlog.getLogger(__name__)
# XXX: Need expanding to cover display_title
file_workflow_run_embeds = [
'workflow_run_inputs.workflow.title',
'workflow_run_inputs.input_files.workflow_argument_name',
'workflow_run_inputs.input_files.value.filename',
'workflow_run_inputs.input_files.value.display_title',
'workflow_run_inputs.input_files.value.file_format',
'workflow_run_inputs.input_files.value.uuid',
'workflow_run_inputs.input_files.value.accession',
'workflow_run_inputs.output_files.workflow_argument_name',
'workflow_run_inputs.output_files.value.display_title',
'workflow_run_inputs.output_files.value.file_format',
'workflow_run_inputs.output_files.value.uuid',
'workflow_run_inputs.output_files.value.accession',
'workflow_run_inputs.output_files.value_qc.url',
'workflow_run_inputs.output_files.value_qc.overall_quality_status'
]
file_workflow_run_embeds_processed = file_workflow_run_embeds + [e.replace('workflow_run_inputs.', 'workflow_run_outputs.') for e in file_workflow_run_embeds]
def show_upload_credentials(request=None, context=None, status=None):
if request is None or status not in ('uploading', 'to be uploaded by workflow', 'upload failed'):
return False
return request.has_permission('edit', context)
def external_creds(bucket, key, name=None, profile_name=None):
"""
if name is None, we want the link to s3 but no need to generate
an access token. This is useful for linking metadata to files that
already exist on s3.
"""
logging.getLogger('boto3').setLevel(logging.CRITICAL)
credentials = {}
if name is not None:
policy = {
'Version': '2012-10-17',
'Statement': [
{
'Effect': 'Allow',
'Action': 's3:PutObject',
'Resource': 'arn:aws:s3:::{bucket}/{key}'.format(bucket=bucket, key=key),
}
]
}
# boto.set_stream_logger('boto3')
conn = boto3.client('sts')
token = conn.get_federation_token(Name=name, Policy=json.dumps(policy))
# 'access_key' 'secret_key' 'expiration' 'session_token'
credentials = token.get('Credentials')
credentials.update({
'upload_url': 's3://{bucket}/{key}'.format(bucket=bucket, key=key),
'federated_user_arn': token.get('FederatedUser').get('Arn'),
'federated_user_id': token.get('FederatedUser').get('FederatedUserId'),
'request_id': token.get('ResponseMetadata').get('RequestId'),
'key': key
})
return {
'service': 's3',
'bucket': bucket,
'key': key,
'upload_credentials': credentials,
}
def property_closure(request, propname, root_uuid):
# Must avoid cycles
conn = request.registry[CONNECTION]
seen = set()
remaining = {str(root_uuid)}
while remaining:
seen.update(remaining)
next_remaining = set()
for uuid in remaining:
obj = conn.get_by_uuid(uuid)
next_remaining.update(obj.__json__(request).get(propname, ()))
remaining = next_remaining - seen
return seen
@collection(
name='file-sets',
unique_key='accession',
properties={
'title': 'File Sets',
'description': 'Listing of File Sets',
})
class FileSet(Item):
"""Collection of files stored under fileset."""
item_type = 'file_set'
schema = load_schema('encoded:schemas/file_set.json')
name_key = 'accession'
@collection(
name='file-set-calibrations',
unique_key='accession',
properties={
'title': 'Calibration File Sets',
'description': 'Listing of File Sets',
})
class FileSetCalibration(FileSet):
"""Collection of files stored under fileset."""
base_types = ['FileSet'] + Item.base_types
item_type = 'file_set_calibration'
schema = load_schema('encoded:schemas/file_set_calibration.json')
name_key = 'accession'
embedded_list = Item.embedded_list + [
# User linkTo
'files_in_set.submitted_by.first_name',
'files_in_set.submitted_by.last_name',
'files_in_set.submitted_by.job_title',
# Lab linkTo
'files_in_set.lab.title',
'files_in_set.lab.name',
# File linkTo
'files_in_set.accession',
'files_in_set.href',
'files_in_set.file_size',
'files_in_set.upload_key',
'files_in_set.file_classification',
# FileFormat linkTo
'files_in_set.file_format.file_format',
]
@collection(
name='file-set-microscope-qcs',
unique_key='accession',
properties={
'title': 'Microscope QC File Sets',
'description': 'Listing of File Sets',
})
class FileSetMicroscopeQc(ItemWithAttachment, FileSet):
"""Collection of files stored under fileset."""
base_types = [ | 'FileSet'] + Item.base_types
item_type = 'file_set_microscope_qc'
schema = load_schema('encoded:schemas/file_set_microscope_qc.json')
name_key = 'accession'
embedded_list = Item.embedded_list + [
# User linkTo
'files_in_set.submitted_by.first_name',
'files_in_set.submitted_by.last_name',
'files_in_set.submitted_by.job_ti | tle',
# Lab linkTo
'files_in_set.lab.title',
'files_in_set.lab.name',
# File linkTo
'files_in_set.accession',
'files_in_set.href',
'files_in_set.file_size',
'files_in_set.upload_key',
'files_in_set.file_classification',
# FileFormat linkTo
'files_in_set.file_format.file_format',
]
@abstract_collection(
name='files',
unique_key='accession',
acl=ALLOW_SUBMITTER_ADD,
properties={
'title': 'Files',
'description': 'Listing of Files',
})
class File(Item):
"""Collection for individual files."""
item_type = 'file'
base_types = ['File'] + Item.base_types
schema = load_schema('encoded:schemas/file.json')
# TODO: embed file_format
embedded_list = Item.embedded_list + lab_award_attribution_embed_list + [
# XXX: Experiment linkTo
'experiments.accession',
# ExperimentType linkTo
'experiments.experiment_type.title',
# ExperimentSet linkTo
'experiments.experiment_sets.accession',
'experiments.experiment_sets.expe |
markofu/scripts | nmap/nmap/zenmap/zenmapGUI/DiffCompare.py | Python | gpl-2.0 | 22,982 | 0.000044 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ***********************IMPORTANT NMAP LICENSE TERMS************************
# * *
# * The Nmap Security Scanner is (C) 1996-2013 Insecure.Com LLC. Nmap is *
# * also a registered trademark of Insecure.Com LLC. This program is free *
# * software; you may redistribute and/or modify it under the terms of the *
# * GNU General Public License as published by the Free Software *
# * Foundation; Version 2 ("GPL"), BUT ONLY WITH ALL OF THE CLARIFICATIONS *
# * AND EXCEPTIONS DESCRIBED HEREIN. This guarantees your right to use, *
# * modify, and redistribute this software under certain conditions. If *
# * you wish to embed Nmap technology into proprietary software, we sell *
# * alternative licenses (contact sales@nmap.com). Dozens of software *
# * vendors already license Nmap technology such as host discovery, port *
# * scanning, OS detection, version detection, and the Nmap Scripting *
# * Engine. *
# * *
# * Note that the GPL places important restrictions on "derivative works", *
# * yet it does not provide a detailed definition of that term. To avoid *
# * misunderstandings, we interpret that term as broadly as copyright law *
# * allows. For example, we consider an application to constitute a *
# * derivative work for the purpose of this license if it does any of the *
# * following with any software or content covered by this license *
# * ("Covered Software"): *
# * *
# * o Integrates source code from Covered Software. *
# * *
# * o Reads or includes copyrighted data files, such as Nmap's nmap-os-db *
# * or nmap-service-probes. *
# * *
# * o Is designed specifically to execute Covered Software and parse the *
# * results (as opposed to typical shell or execution-menu apps, which will *
# * execute anything you tell them to). *
# * *
# * o Includes Covered Software in a proprietary executable installer. The *
# * installers produced by InstallShield are an example of this. Including *
# * Nmap with other software in compressed or archival form does not *
# * trigger this provision, provided appropriate open source decompression *
# * or de-archiving software is widely available for no charge. For the *
# * purposes of this license, an installer is considered to include Covered *
# * Software even if it actually retrieves a copy of Covered Software from *
# * another source during runtime (such as by downloading it from the *
# * Internet). *
# * *
# * o Links (statically or dynamically) to a library which does a | ny of the *
# * above. | *
# * *
# * o Executes a helper program, module, or script to do any of the above. *
# * *
# * This list is not exclusive, but is meant to clarify our interpretation *
# * of derived works with some common examples. Other people may interpret *
# * the plain GPL differently, so we consider this a special exception to *
# * the GPL that we apply to Covered Software. Works which meet any of *
# * these conditions must conform to all of the terms of this license, *
# * particularly including the GPL Section 3 requirements of providing *
# * source code and allowing free redistribution of the work as a whole. *
# * *
# * As another special exception to the GPL terms, Insecure.Com LLC grants *
# * permission to link the code of this program with any version of the *
# * OpenSSL library which is distributed under a license identical to that *
# * listed in the included docs/licenses/OpenSSL.txt file, and distribute *
# * linked combinations including the two. *
# * *
# * Any redistribution of Covered Software, including any derived works, *
# * must obey and carry forward all of the terms of this license, including *
# * obeying all GPL rules and restrictions. For example, source code of *
# * the whole work must be provided and free redistribution must be *
# * allowed. All GPL references to "this License", are to be treated as *
# * including the terms and conditions of this license text as well. *
# * *
# * Because this license imposes special exceptions to the GPL, Covered *
# * Work may not be combined (even as part of a larger work) with plain GPL *
# * software. The terms, conditions, and exceptions of this license must *
# * be included as well. This license is incompatible with some other open *
# * source licenses as well. In some cases we can relicense portions of *
# * Nmap or grant special permissions to use it in other open source *
# * software. Please contact fyodor@nmap.org with any such requests. *
# * Similarly, we don't incorporate incompatible open source software into *
# * Covered Software without special permission from the copyright holders. *
# * *
# * If you have any questions about the licensing restrictions on using *
# * Nmap in other works, are happy to help. As mentioned above, we also *
# * offer alternative license to integrate Nmap into proprietary *
# * applications and appliances. These contracts have been sold to dozens *
# * of software vendors, and generally include a perpetual license as well *
# * as providing for priority support and updates. They also fund the *
# * continued development of Nmap. Please email sales@nmap.com for further *
# * information. *
# * *
# * If you have received a written license agreement or contract for *
# * Covered Software stating terms other than these, you may choose to use *
# * and redistribute Covered Software under those terms instead of these. *
# * *
# * Source is provided to this software because we believe users have a *
# * right to know exactly what a program is going to do before they run it. *
# * This also allows you to audit the software for security holes (none *
# * have been found so far). *
# * *
# * Source code also allows you to port Nmap to new platforms, fix bugs, *
# * and add new features. You are highly encouraged to send your changes *
# * to the dev@nmap.org mailing list for possible incorporation into the *
# * main distribution. By sending these changes to Fyodor or one of the *
# * Insecure.Org development mailing lists, or checking them into the Nmap *
# * source code repository, it is understood (unless you specify otherwise) *
# * that you are offering the Nmap Project (Insecure.Com LLC) the *
# * unlimited, non-exclusive right to reuse, modify, and relicense the *
# * code. Nmap will |
andybalaam/poemtube | server/src/poemtube/jsonapi/json_whoami.py | Python | gpl-2.0 | 644 | 0.037267 |
import json
from poemtube.errors import InvalidRequest
from poemtube.jsonapi.json_errors import JsonInvalidRequest
def my_whoami( user ):
if user:
return { "userid": "http://example.com/oid/andy" }
return { "userid": user }
else:
return { "anonymous" : "" }
def do( fn, *args, **kwargs ):
"""
Run the supplied function, conver | ting the return value
to JSON, and converting any exceptions to JSON exceptions.
"""
try:
return json.dumps( fn( *args, **kwargs ) )
except Invalid | Request, e:
raise JsonInvalidRequest( e )
def GET( user ):
return do( my_whoami, user )
|
sgenoud/scikit-learn | sklearn/datasets/base.py | Python | bsd-3-clause | 17,305 | 0.000058 | """
Base IO code for all datasets
"""
# Copyright (c) 2007 David Cournapeau <cournape@gmail.com>
# 2010 Fabian Pedregosa <fabian.pedregosa@inria.fr>
# 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: Simplified BSD
import os
import csv
import shutil
from os import environ
from os.path import dirname
from os.path import join
from os.path import exists
from os.path import expanduser
from os.path import isdir
from os import listdir
from os import makedirs
import numpy as np
from ..utils import check_random_state
class Bunch(dict):
"""Container object for datasets: dictionary-like object that
exposes its keys as attributes."""
def __init__(self, **kwargs):
dict.__init__(self, kwargs)
self.__dict__ = self
def get_data_home(data_home=None):
"""Return the path of the scikit-learn data dir.
This folder is used by some large dataset loaders to avoid
downloading the data several times.
By default the data dir is set to a folder named 'scikit_learn_data'
in the user home folder.
Alternat | ively, it can be set by the 'SCIKIT_LEARN_DATA' environment
variable or programatically by giving an explit folder path. The
'~' symbol is expanded to the user home folder.
If the folder does not already exist, it is automat | ically created.
"""
if data_home is None:
data_home = environ.get('SCIKIT_LEARN_DATA',
join('~', 'scikit_learn_data'))
data_home = expanduser(data_home)
if not exists(data_home):
makedirs(data_home)
return data_home
def clear_data_home(data_home=None):
"""Delete all the content of the data home cache."""
data_home = get_data_home(data_home)
shutil.rmtree(data_home)
def load_files(container_path, description=None, categories=None,
load_content=True, shuffle=True, charset=None,
charse_error='strict', random_state=0):
"""Load text files with categories as subfolder names.
Individual samples are assumed to be files stored a two levels folder
structure such as the following:
container_folder/
category_1_folder/
file_1.txt
file_2.txt
...
file_42.txt
category_2_folder/
file_43.txt
file_44.txt
...
The folder names are used has supervised signal label names. The indivial
file names are not important.
This function does not try to extract features into a numpy array or
scipy sparse matrix. In addition, if load_content is false it
does not try to load the files in memory.
To use utf-8 text files in a scikit-learn classification or clustering
algorithm you will first need to use the `sklearn.features.text`
module to build a feature extraction transformer that suits your
problem.
Similar feature extractors should be build for other kind of unstructured
data input such as images, audio, video, ...
Parameters
----------
container_path : string or unicode
Path to the main folder holding one subfolder per category
description: string or unicode, optional (default=None)
A paragraph describing the characteristic of the dataset: its source,
reference, etc.
categories : A collection of strings or None, optional (default=None)
If None (default), load all the categories.
If not None, list of category names to load (other categories ignored).
load_content : boolean, optional (default=True)
Whether to load or not the content of the different files. If
true a 'data' attribute containing the text information is present
in the data structure returned. If not, a filenames attribute
gives the path to the files.
charset : string or None (default is None)
If None, do not try to decode the content of the files (e.g. for
images or other non-text content).
If not None, charset to use to decode text files if load_content is
True.
charset_error: {'strict', 'ignore', 'replace'}
Instruction on what to do if a byte sequence is given to analyze that
contains characters not of the given `charset`. By default, it is
'strict', meaning that a UnicodeDecodeError will be raised. Other
values are 'ignore' and 'replace'.
shuffle : bool, optional (default=True)
Whether or not to shuffle the data: might be important for models that
make the assumption that the samples are independent and identically
distributed (i.i.d.), such as stochastic gradient descent.
random_state : int, RandomState instance or None, optional (default=0)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
data : Bunch
Dictionary-like object, the interesting attributes are: either
data, the raw text data to learn, or 'filenames', the files
holding it, 'target', the classification labels (integer index),
'target_names', the meaning of the labels, and 'DESCR', the full
description of the dataset.
"""
target = []
target_names = []
filenames = []
folders = [f for f in sorted(listdir(container_path))
if isdir(join(container_path, f))]
if categories is not None:
folders = [f for f in folders if f in categories]
for label, folder in enumerate(folders):
target_names.append(folder)
folder_path = join(container_path, folder)
documents = [join(folder_path, d)
for d in sorted(listdir(folder_path))]
target.extend(len(documents) * [label])
filenames.extend(documents)
# convert to array for fancy indexing
filenames = np.array(filenames)
target = np.array(target)
if shuffle:
random_state = check_random_state(random_state)
indices = np.arange(filenames.shape[0])
random_state.shuffle(indices)
filenames = filenames[indices]
target = target[indices]
if load_content:
data = [open(filename).read() for filename in filenames]
if charset is not None:
data = [d.decode(charset, charse_error) for d in data]
return Bunch(data=data,
filenames=filenames,
target_names=target_names,
target=target,
DESCR=description)
return Bunch(filenames=filenames,
target_names=target_names,
target=target,
DESCR=description)
def load_iris():
"""Load and return the iris dataset (classification).
The iris dataset is a classic and very easy multi-class classification
dataset.
================= ==============
Classes 3
Samples per class 50
Samples total 150
Dimensionality 4
Features real, positive
================= ==============
Returns
-------
data : Bunch
Dictionary-like object, the interesting attributes are:
'data', the data to learn, 'target', the classification labels,
'target_names', the meaning of the labels, 'feature_names', the
meaning of the features, and 'DESCR', the
full description of the dataset.
Examples
--------
Let's say you are interested in the samples 10, 25, and 50, and want to
know their class name.
>>> from sklearn.datasets import load_iris
>>> data = load_iris()
>>> data.target[[10, 25, 50]]
array([0, 0, 1])
>>> list(data.target_names)
['setosa', 'versicolor', 'virginica']
"""
module_path = dirname(__file__)
data_file = csv.reader(open(join(module_path, 'data', 'iris.csv')))
fdescr = open(join(module_path, 'descr', 'iris.rst'))
temp = data_file.ne |
dcorio/l10n-italy | l10n_it_ipa/model/__init__.py | Python | agpl-3.0 | 301 | 0 | # -*- coding: utf-8 -*-
# Copyright 20 | 14 KTec S.r.l.
# (<http://www.ktec.it>).
# Copyright 2014 Associazione Odoo Italia
# (<http://www.odoo-italia.org>).
# Copyright 2016 Lorenzo Battistini - Agile Business Group
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from . import par | tner
|
FrozenPigs/Taigabot | plugins/_disabled/wordoftheday.py | Python | gpl-3.0 | 672 | 0.002976 | import re
from util import hook, http, misc
from BeautifulSoup import Beauti | fulSoup
@hook.command(autohelp=False)
def word(inp, say=False, nick=False):
"word -- Gets the word of the day."
page = http.get('http://merriam-webster.com/word-of-the-day')
soup = BeautifulSoup(page)
word = soup.find('strong', {'class': 'main_entry_word'}).renderContents()
function = soup.find('p', {'class': 'word_function'}) | .renderContents()
#definitions = re.findall(r'<span class="ssens"><strong>:</strong>'
# r' *([^<]+)</span>', content)
say("(%s) The word of the day is:"\
" \x02%s\x02 (%s)" % (nick, word, function))
|
shmup/miniboa | setup.py | Python | apache-2.0 | 2,304 | 0 | import codecs
import os
import re
from setuptools import setup, find_packages
###################################################################
NAME = "miniboa"
PACKAGES = find_packages(where="src")
META_PATH = os.path.join("src", "miniboa", "__init__.py")
KEYWORDS = ["mud", "tcp", "telnet"]
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Software Development :: Libraries"
]
INSTALL_REQUIRES = []
###################################################################
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
"""
Build an absolute path from *parts* and and return the contents of the
resulting file. Assume UTF-8 encoding.
"""
with codecs.open(os.path.join(HERE, *parts), "rb", "utf-8") as f:
return f.read()
META_FILE = read(META_PATH)
def find_meta(meta):
"""
Extract __*meta*__ from META_FILE.
"""
meta_match = re.search(
r"^__{meta}__ = ['\"]([^'\"]*)['\"]".format(meta=meta),
META_FILE, re.M
)
if meta_match:
return meta_match.group(1)
raise RuntimeError("Unable to find __{meta}__ string.".format(meta=meta))
if __name__ == "__main__":
setup(
name=NAME,
description=find_meta("description"),
license=find_meta("license"),
url=find_meta("uri"),
version=find_meta("version"),
author=find_meta("author"),
author_email=find_meta("email"),
maintainer=find_meta("author"),
mai | ntainer_email=find_meta("email"),
keywords=KEYWORDS,
long_description=read("README.rst"),
packages=PACKAGES,
package_dir={"": "s | rc"},
zip_safe=False,
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
)
|
FreedomCoop/valuenetwork | general/models.py | Python | agpl-3.0 | 55,292 | 0.006511 | #encoding=utf-8
from django.utils.safestring import mark_safe
from django.db import models
from django.utils.six import python_2_unicode_compatible
from mptt.models import MPTTModel
from mptt.fields import TreeForeignKey, TreeManyToManyField
from datetime import date, timedelta
from datetime import datetime
from django.utils.translation import ugettext_lazy as _
from django.utils.translation | import gettext_lazy as __
from decimal import Decimal
from itertools import chain
# Create your models here.
a_strG = "<a onclick='return showRelatedObjectLookupPopup(this);' href='/admin/general/"
a_strW = "<a onclick='return showRelatedObjectLookupPopup(this);' href='/admin/Welcome/"
#a_str2 = "?_popup=1&_changelist_ | filters=_popup=1&t=human' target='_blank' style='margin-left:-100px'>"
a_str2 = "?_popup=1&t=human' target='_blank' >"
a_str3 = "?_popup=1&t=human' target='_blank'>"
a_edit = '<b>Edit</b>'
ul_tag1 = '<ul style="margin-left:-10em;">'
ul_tag = '<ul>'
str_none = __('(none)')
str_remove = 'erase'
def erase_id_link(field, id):
out = '<a class="erase_id_on_box" name="'+str(field)+','+str(id)+'" href="javascript:;">'+str_remove+'</a>'
print(out)
return out
# C O N C E P T S - (Concepts, Ideas...)
@python_2_unicode_compatible
class Concept(MPTTModel): # Abstract
name = models.CharField(unique=True, verbose_name=_("Name"), max_length=200, help_text=_("The name of the Concept"), default="")
description = models.TextField(blank=True, verbose_name=_("Description"))
parent = TreeForeignKey('self', null=True, blank=True, related_name='children', on_delete=models.CASCADE)
def __str__(self):
return self.name
class Meta:
abstract = True
verbose_name = _("Concept")
verbose_name_plural = _("c- Concepts")
@python_2_unicode_compatible
class Type(Concept): # Create own ID's (TREE)
#concept = models.OneToOneField('Concept', primary_key=True, parent_link=True, on_delete=models.CASCADE)
clas = models.CharField(blank=True, verbose_name=_("Class"), max_length=200,
help_text=_("Django model or python class associated to the Type"))
#types = TreeManyToManyField('self', through='rel_Type_Types', verbose_name=_(u"Related Types"), blank=True)
class Meta:
verbose_name = _("c- Type")
#verbose_name_plural = _(u"c- Types")
def __str__(self):
if self.clas is None or self.clas == '':
return self.name
else:
return self.name+' ('+self.clas+')'
def save(self, *args, **kwargs):
if not self.name_ca:
print("save: name_ca:"+self.name_en)
self.name_ca = self.name_en
if not self.name_es:
print("save: name_es:"+self.name_en)
self.name_es = self.name_en
super(Type, self).save(*args, **kwargs)
"""
class rel_Type_Types(models.Model):
typ = TreeForeignKey('Type', on_delete=models.CASCADE)
typ2 = TreeForeignKey('Type', verbose_name=_(u"related Type"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='ty_typ+', blank=True, null=True, verbose_name=_(u"relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"T_type")
verbose_name_plural = _(u"Types related the Type")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.typ2.__str__()
else:
return self.relation.gerund+' > '+self.typ2.__str__()
"""
# B E I N G S - (Éssers, Entitats, Projectes...)
"""
@python_2_unicode_compatible
class Being(models.Model): # Abstract
name = models.CharField(verbose_name=_(u"Name"), max_length=200, help_text=_(u"The name of the Entity"))
#being_type = TreeForeignKey('Being_Type', blank=True, null=True, verbose_name="Type of entity", on_delete=models.SET_NULL)
birth_date = models.DateField(blank=True, null=True, verbose_name=_(u"Born date"), help_text=_(u"The day of starting existence"))
death_date = models.DateField(blank=True, null=True, verbose_name=_(u"Die date"), help_text=_(u"The day of ceasing existence"))
class Meta:
abstract = True
def __str__(self):
return self.name.encode("utf-8")
"""
class Being_Type(Type):
typ = models.OneToOneField('Type', primary_key=True, parent_link=True, on_delete=models.CASCADE)
class Meta:
verbose_name= _("Type of entity")
verbose_name_plural = _("e--> Types of entities")
"""
@python_2_unicode_compatible
class Human(Being): # Create own ID's
nickname = models.CharField(max_length=50, blank=True, verbose_name=_(u"Nickname"), help_text=_(u"The nickname most used of the human entity"))
email = models.EmailField(max_length=100, blank=True, verbose_name=_(u"Email"), help_text=_(u"The main email address of the human entity"))
telephone_cell = models.CharField(max_length=20, blank=True, verbose_name=_(u"Mobile phone"), help_text=_(u"The main telephone of the human entity"))
telephone_land = models.CharField(max_length=20, blank=True, verbose_name=_(u"Land phone"))
website = models.CharField(max_length=100, blank=True, verbose_name=_(u"Web"), help_text=_(u"The main web url of the human entity"))
description = models.TextField(blank=True, null=True, verbose_name=_(u"Entity description"))
jobs = TreeManyToManyField('Job', through='rel_Human_Jobs', verbose_name=_(u"Activities, Jobs, Skills"), blank=True)
addresses = models.ManyToManyField('Address', through='rel_Human_Addresses', verbose_name=_(u"Addresses"), blank=True)
regions = models.ManyToManyField('Region', through='rel_Human_Regions', verbose_name=_(u"Regions"), blank=True)
records = models.ManyToManyField('Record', through='rel_Human_Records', verbose_name=_(u"Records"), blank=True)
materials = models.ManyToManyField('Material', through='rel_Human_Materials', verbose_name=_(u"Material artworks"), blank=True)
nonmaterials = models.ManyToManyField('Nonmaterial', through='rel_Human_Nonmaterials', verbose_name=_(u"Non-material artworks"), blank=True)
persons = models.ManyToManyField('Person', through='rel_Human_Persons', related_name='hum_persons', verbose_name=_(u"Persons"), blank=True)
projects = models.ManyToManyField('Project', through='rel_Human_Projects', related_name='hum_projects', verbose_name=_(u"Projects"), blank=True)
companies = models.ManyToManyField('Company', through='rel_Human_Companies', related_name='hum_companies', verbose_name=_(u"Companies"), blank=True)
class Meta:
verbose_name = _(u"Human")
verbose_name_plural = _(u"e- Humans")
def __str__(self):
if self.nickname is None or self.nickname == '':
return self.name
else:
return self.nickname+' ('+self.name+')'
def _my_accounts(self):
return list(chain(self.accountsCes.all(), self.accountsCrypto.all(), self.accountsBank.all()))
#_my_accounts.list = []
accounts = property(_my_accounts)
def _selflink(self):
if self.id:
if hasattr(self, 'person'):
return mark_safe( a_strG + "person/" + str(self.person.id) + a_str2 + a_edit + "</a>") # % str(self.id))
elif hasattr(self, 'project'):
return mark_safe( a_strG + "project/" + str(self.project.id) + a_str2 + a_edit + "</a>")# % str(self.id) )
else:
return "Not present"
_selflink.allow_tags = True
_selflink.short_description = ''
self_link = property (_selflink)
def _ic_membership(self):
try:
#print(self.ic_membership_set.all())
if hasattr(self, 'ic_person_membership_set'):
ic_ms = self.ic_person_membership_set.all()
out = ul_tag
for ms in ic_ms:
out += '<li>'+a_strW + "ic_person_membership/" + str(ms.id) + a_str3 + '<b>'+ms.name +"</b></a></li>"
return out+'</ul>'
elif hasattr(self, 'ic_project_membership_set'):
ic_ms = self.ic_project_membership_set.all()
out = ul_tag
|
Distrotech/bzr | tools/rst2html.py | Python | gpl-2.0 | 1,883 | 0.003717 | #! /usr/bin/env python
# Originally by Dave Goodger, from the docutils, distribution.
#
# Modified for Bazaar to accommodate options containing dots
#
# This file is in the public domain.
"""
A minimal front end to the Docutils Publisher, producing HTML.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
import docutils
from docutils.core import publish_cmdline, default_description
if True: # this is still required in the distutils trunk as-at June 2008.
from docutils.parsers.rst.states import Body
# we have some option names that contain dot; which is not allowed by
# python-docutils 0.4-4 -- so monkeypatch in a better pattern
#
# This is a bit gross to patch because all this is built up at load time.
Body.pats['optname'] = r'[a-zA-Z0-9][a-zA-Z0-9._-]*'
Body.pats['longopt'] = r'(--|/)%(optname)s([ =]%(optarg)s)?' % Body.pats
Body.pats['option'] = r'(%(shortopt)s|%(longopt)s)' % Body.pats
Body.patterns['option_marker'] = r'%(option)s(, %(option)s)*( +| ?$)' % Body.pats
description = ('Generates (X)HTML documents from standalone reStructuredText '
'sources. ' + default_description)
# workaround for bug with <xxx id="tags" name="tags"> in IE
from docutils.writers import html4css1
class IESafeHtmlTranslator(html4css1.HTMLTranslator):
def starttag(self, node, tagname, suffix='\n', empty=0, **attributes):
x = html4css1.HTMLTranslator.starttag(self, node, tagname, suffix,
empty, **attributes)
y = | x.replace('id="tags"', 'id="tags_"')
y = y.replace('name="tags"', 'name="tags_"')
y = y.replace(' | href="#tags"', 'href="#tags_"')
return y
mywriter = html4css1.Writer()
mywriter.translator_class = IESafeHtmlTranslator
publish_cmdline(writer=mywriter, description=description)
|
bcbnz/pylabels | labels/specifications.py | Python | gpl-3.0 | 17,105 | 0.001812 | # This file is part of pylabels, a Python library to create PDFs for printing
# labels.
# Copyright (C) 2012, 2013, 2014, 2015 Blair Bonnett
#
# pylabels is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# pylabels is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# pylabels. If not, see <http://www.gnu.org/licenses/>.
from decimal import Decimal
import json
class InvalidDimension(ValueError):
"""Raised when a sheet specification has inconsistent dimensions. """
pass
class Specification(object):
"""Specification for a sheet of labels.
All dimensions are given in millimetres. If any of the margins are not
given, then any remaining space is divided equally amongst them. If all the
width or all the height margins are given, they must exactly use up all
non-label space on the sheet.
"""
def __init__(self, sheet_width, sheet_height, columns, rows, label_width, label_height, **kwargs):
"""
Required parameters
-------------------
sheet_width, sheet_height: positive dimension
The size of the sheet.
columns, rows: positive integer
The number of labels on the sheet.
label_width, label_size: positive dimension
The size of each label.
Margins and gaps
----------------
left_margin: positive dimension
The gap between the left edge of the sheet and the first column.
column_gap: positive dimension
The internal gap between columns.
right_margin: positive dimension
The gap between the right edge of the sheet and the last column.
top_margin: positive dimension
The gap between the top edge of the sheet and the first row.
row_gap: positive dimension
The internal gap between rows.
bottom_margin: positive dimension
The gap between the bottom edge of the sheet and the last row.
Padding
-------
left_padding, right_padding, top_padding, bottom_padding: positive dimensions, default 0
The padding between the edges of the label and the area available
to draw on.
Corners
---------------------
corner_radius: positive dimension, default 0
Gives the labels rounded corners with the given radius.
padding_radius: positive dimension, default 0
Give the drawing area rounded corners. If there is no padding, this
must be set to zero.
Background
----------
background_image: reportlab.graphics.shape.Image
An image to use as the background to the page. This will be
automatically sized to fit the page; make sure it has the correct
aspect ratio.
background_filename: string
Filename of an image to use as a background to the page. If both
this and background_image are given, then background_image will
take | precedence.
Raises
------
InvalidDimension
If any given dimension is invalid (i.e., the labels cannot fit on
the sheet).
"""
# Compulsory arguments.
self._sheet_width = Decimal(sheet_width)
self._sheet_height = Decimal(sheet_height)
| self._columns = int(columns)
self._rows = int(rows)
self._label_width = Decimal(label_width)
self._label_height = Decimal(label_height)
# Optional arguments; missing ones will be computed later.
self._left_margin = kwargs.pop('left_margin', None)
self._column_gap = kwargs.pop('column_gap', None)
self._right_margin = kwargs.pop('right_margin', None)
self._top_margin = kwargs.pop('top_margin', None)
self._row_gap = kwargs.pop('row_gap', None)
self._bottom_margin = kwargs.pop('bottom_margin', None)
# Optional arguments with default values.
self._left_padding = kwargs.pop('left_padding', 0)
self._right_padding = kwargs.pop('right_padding', 0)
self._top_padding = kwargs.pop('top_padding', 0)
self._bottom_padding = kwargs.pop('bottom_padding', 0)
self._corner_radius = Decimal(kwargs.pop('corner_radius', 0))
self._padding_radius = Decimal(kwargs.pop('padding_radius', 0))
self._background_image = kwargs.pop('background_image', None)
self._background_filename = kwargs.pop('background_filename', None)
# Leftover arguments.
if kwargs:
args = kwargs.keys()
if len(args) == 1:
raise TypeError("Unknown keyword argument {}.".format(args[0]))
else:
raise TypeError("Unknown keyword arguments: {}.".format(', '.join(args)))
# Track which attributes have been automatically set.
self._autoset = set()
# Check all the dimensions etc are valid.
self._calculate()
def _calculate(self):
"""Checks the dimensions of the sheet are valid and consistent.
NB: this is called internally when needed; there should be no need for
user code to call it.
"""
# Check the dimensions are larger than zero.
for dimension in ('_sheet_width', '_sheet_height', '_columns', '_rows', '_label_width', '_label_height'):
if getattr(self, dimension) <= 0:
name = dimension.replace('_', ' ').strip().capitalize()
raise InvalidDimension("{0:s} must be greater than zero.".format(name))
# Check margins / gaps are not smaller than zero if given.
# At the same time, force the values to decimals.
for margin in ('_left_margin', '_column_gap', '_right_margin', '_top_margin', '_row_gap', '_bottom_margin',
'_left_padding', '_right_padding', '_top_padding', '_bottom_padding'):
val = getattr(self, margin)
if val is not None:
if margin in self._autoset:
val = None
else:
val = Decimal(val)
if val < 0:
name = margin.replace('_', ' ').strip().capitalize()
raise InvalidDimension("{0:s} cannot be less than zero.".format(name))
setattr(self, margin, val)
else:
self._autoset.add(margin)
# Check the corner radius.
if self._corner_radius < 0:
raise InvalidDimension("Corner radius cannot be less than zero.")
if self._corner_radius > (self._label_width / 2):
raise InvalidDimension("Corner radius cannot be more than half the label width.")
if self._corner_radius > (self._label_height / 2):
raise InvalidDimension("Corner radius cannot be more than half the label height.")
# If there is no padding, we don't need the padding radius.
if (self._left_padding + self._right_padding + self._top_padding + self._bottom_padding) == 0:
if self._padding_radius != 0:
raise InvalidDimension("Padding radius must be zero if there is no padding.")
else:
if (self._left_padding + self._right_padding) >= self._label_width:
raise InvalidDimension("Sum of horizontal padding must be less than the label width.")
if (self._top_padding + self._bottom_padding) >= self._label_height:
raise InvalidDimension("Sum of vertical padding must be less than the label height.")
if self._padding_radius < 0:
raise InvalidDimension("Padding radius cannot be less than zero.")
# Calculate the amount of spare space.
hspace = self._she |
GHubgenius/clusterd | src/platform/jboss/fingerprints/JBoss5WC.py | Python | mit | 196 | 0.005102 | from src.platform.jboss.inter | faces import WebConsoleInterface
class FPrint(WebConsoleInterface):
def __init__(self):
super(FPrint, self).__init__()
self.version = " | 5.0"
|
immanetize/nikola | nikola/plugins/task/sources.py | Python | mit | 3,426 | 0.00146 | # -*- coding: utf-8 -*-
# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
from nikola.plugin_categories import Task
from nikola import utils
class Sources(Task):
"""Copy page so | urces into the output."""
name = "render_sources"
def gen_tasks(self):
"""Publish the page sources into the output.
Required keyword arguments:
translations
default_lang
post_pages
output_folder
"""
kw = {
"translations": self.site.config["TRANSLATIONS"],
"output_folder": self.site.config["OUTPUT_FOLDER"],
"default_lang": self.site.config["DEFAULT_LANG"],
"sh | ow_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
}
self.site.scan_posts()
yield self.group_task()
if self.site.config['COPY_SOURCES']:
for lang in kw["translations"]:
for post in self.site.timeline:
if not kw["show_untranslated_posts"] and lang not in post.translated_to:
continue
if post.meta('password'):
continue
output_name = os.path.join(
kw['output_folder'], post.destination_path(
lang, post.source_ext(True)))
# do not publish PHP sources
if post.source_ext(True) == post.compiler.extension():
continue
source = post.source_path
if lang != kw["default_lang"]:
source_lang = utils.get_translation_candidate(self.site.config, source, lang)
if os.path.exists(source_lang):
source = source_lang
if os.path.isfile(source):
yield {
'basename': 'render_sources',
'name': os.path.normpath(output_name),
'file_dep': [source],
'targets': [output_name],
'actions': [(utils.copy_file, (source, output_name))],
'clean': True,
'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.sources')],
}
|
arpith/zulip | zerver/management/commands/realm_filters.py | Python | apache-2.0 | 2,982 | 0.003689 | from __ | future__ import absolute_import
from __future__ import print_function
from typing import Any
from argparse import ArgumentParser
from optparse import make_option
from django.core.management.base import BaseCommand
from zerv | er.models import RealmFilter, all_realm_filters, get_realm_by_string_id
from zerver.lib.actions import do_add_realm_filter, do_remove_realm_filter
import sys
class Command(BaseCommand):
help = """Create a link filter rule for the specified domain.
NOTE: Regexes must be simple enough that they can be easily translated to JavaScript
RegExp syntax. In addition to JS-compatible syntax, the following features are available:
* Named groups will be converted to numbered groups automatically
* Inline-regex flags will be stripped, and where possible translated to RegExp-wide flags
Example: ./manage.py realm_filters --realm=zulip --op=add '#(?P<id>[0-9]{2,8})' 'https://trac.humbughq.com/ticket/%(id)s'
Example: ./manage.py realm_filters --realm=zulip --op=remove '#(?P<id>[0-9]{2,8})'
Example: ./manage.py realm_filters --realm=zulip --op=show
"""
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
parser.add_argument('-r', '--realm',
dest='string_id',
type=str,
required=True,
help='The subdomain or string_id of the realm to adjust filters for.')
parser.add_argument('--op',
dest='op',
type=str,
default="show",
help='What operation to do (add, show, remove).')
parser.add_argument('pattern', metavar='<pattern>', type=str, nargs='?', default=None,
help="regular expression to match")
parser.add_argument('url_format_string', metavar='<url pattern>', type=str, nargs='?',
help="format string to substitute")
def handle(self, *args, **options):
# type: (*Any, **str) -> None
realm = get_realm_by_string_id(options["string_id"])
if options["op"] == "show":
print("%s: %s" % (realm.domain, all_realm_filters().get(realm.domain, [])))
sys.exit(0)
pattern = options['pattern']
if not pattern:
self.print_help("./manage.py", "realm_filters")
sys.exit(1)
if options["op"] == "add":
url_format_string = options['url_format_string']
if not url_format_string:
self.print_help("./manage.py", "realm_filters")
sys.exit(1)
do_add_realm_filter(realm, pattern, url_format_string)
sys.exit(0)
elif options["op"] == "remove":
do_remove_realm_filter(realm, pattern=pattern)
sys.exit(0)
else:
self.print_help("./manage.py", "realm_filters")
sys.exit(1)
|
ryancoleman/autodock-vina | boost_1_54_0/tools/build/v2/tools/msvc.py | Python | apache-2.0 | 55,098 | 0.008766 | # Copyright (c) 2003 David Abrahams.
# Copyright (c) 2005 Vladimir Prus.
# Copyright (c) 2005 Alexey Pakhunov.
# Copyright (c) 2006 Bojan Resnik.
# Copyright (c) 2006 Ilya Sokolov.
# Copyright (c) 2007 Rene Rivera
# Copyright (c) 2008 Jurko Gospodnetic
# Copyright (c) 2011 Juraj Ivancic
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
################################################################################
#
# MSVC Boost Build toolset module.
# --------------------------------
#
# All toolset versions need to have their location either auto-detected or
# explicitly specified except for the special 'default' version that expects the
# environment to find the needed tools or report an error.
#
################################################################################
from os import environ
import os.path
import re
import _winreg
import bjam
from b2.tools import common, rc, pch, builtin, mc, midl
from b2.build import feature, type, toolset, generators, property_set
from b2.build.property import Property
from b2.util import path
from b2.manager import get_manager
from b2.build.generators import Generator
from b2.build.toolset import flags
from b2.util.utility import to_seq, on_windows
from b2.tools.common import Configurations
__debug = None
def debug():
global __debug
if __debug is None:
__debug = "--debug-configuration" in bjam.variable("ARGV")
return __debug
# It is not yet clear what to do with Cygwin on python port.
def on_cygwin():
return False
type.register('MANIFEST', ['manifest'])
feature.feature('embed-manifest',['on','off'], ['incidental', 'propagated']) ;
type.register('PDB',['pdb'])
################################################################################
#
# Public rules.
#
################################################################################
# Initialize a specific toolset version configuration. As the result, path to
# compiler and, possible, program names are set up, and will be used when that
# version of compiler is requested. For example, you might have:
#
# using msvc : 6.5 : cl.exe ;
# using msvc : 7.0 : Y:/foo/bar/cl.exe ;
#
# The version parameter may be ommited:
#
# using msvc : : Z:/foo/bar/cl.exe ;
#
# The following keywords have special meanings when specified as versions:
# - all - all detected but not yet used versions will be marked as used
# with their default options.
# - default - this is an equivalent to an empty version.
#
# Depending on a supplied version, detected configurations and presence 'cl.exe'
# in the path different results may be achieved. The following table describes
# the possible scenarios:
#
# Nothing "x.y"
# Passed Nothing "x.y" detected, detected,
# version detected detected cl.exe in path cl.exe in path
#
# default Error Use "x.y" Create "default" Use "x.y"
# all None Use all None Use all
# x.y - Use "x.y" - Use "x.y"
# a.b Error Erro | r Create "a.b" Create "a.b"
#
# "x.y" - refers to a detected version;
# "a.b" - refers to an undetected version.
#
# FIXME: Currently the command parameter and the <compiler> property parameter
# seem to overlap in duties. Remove this duplication. This seems to be related
# to why someone started preparing to replace init with configure rules.
def init(version = None, command = None, options = None):
# When initialized from
# using msvc : x.0 ;
# we get version as a single | element list i.e. ['x.0'],
# but when specified from the command line we get a string i.e. 'x.0'.
# We want to work with a string, so unpack the list if needed.
is_single_element_list = (isinstance(version,list) and len(version) == 1)
assert(version==None or isinstance(version,str) or is_single_element_list)
if is_single_element_list:
version = version[0]
options = to_seq(options)
command = to_seq(command)
if command:
options.append("<command>"+command)
configure(version,options)
def configure(version=None, options=None):
if version == "all":
if options:
raise RuntimeError("MSVC toolset configuration: options should be empty when '{}' is specified.".format(version))
# Configure (i.e. mark as used) all registered versions.
all_versions = __versions.all()
if not all_versions:
if debug():
print "notice: [msvc-cfg] Asked to configure all registered" \
"msvc toolset versions when there are none currently" \
"registered." ;
else:
for v in all_versions:
# Note that there is no need to skip already configured
# versions here as this will request configure-really rule
# to configure the version using default options which will
# in turn cause it to simply do nothing in case the version
# has already been configured.
configure_really(v)
elif version == "default":
configure_really(None,options)
else:
configure_really(version, options)
def extend_conditions(conditions,exts):
return [ cond + '/' + ext for cond in conditions for ext in exts ]
def configure_version_specific(toolset_arg, version, conditions):
# Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
# /Zc:wchar_t options that improve C++ standard conformance, but those
# options are off by default. If we are sure that the msvc version is at
# 7.*, add those options explicitly. We can be sure either if user specified
# version 7.* explicitly or if we auto-detected the version ourselves.
if not re.match('^6\\.', version):
toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',conditions, ['/Zc:forScope','/Zc:wchar_t'])
toolset.flags('{}.compile.c++'.format(toolset_arg), 'C++FLAGS',conditions, ['/wd4675'])
# Explicitly disable the 'function is deprecated' warning. Some msvc
# versions have a bug, causing them to emit the deprecation warning even
# with /W0.
toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',extend_conditions(conditions,['<warnings>off']), ['/wd4996'])
if re.match('^[78]\\.', version):
# 64-bit compatibility warning deprecated since 9.0, see
# http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx
toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',extend_conditions(conditions,['<warnings>all']), ['/Wp64'])
#
# Processor-specific optimization.
#
if re.match('^[67]', version ):
# 8.0 deprecates some of the options.
toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>speed','<optimization>space']), ['/Ogiy', '/Gs'])
toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>speed']), ['/Ot'])
toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>space']), ['/Os'])
cpu_arch_i386_cond = extend_conditions(conditions, __cpu_arch_i386)
toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>']),['/GB'])
toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>i486']),['/G4'])
toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g5]), ['/G5'])
toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g6]), ['/G6'])
toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', exte |
iamahuman/angr | tests/test_concrete_packed_elf32.py | Python | bsd-2-clause | 3,926 | 0.006113 | import angr
import claripy
import nose
import os
import subprocess
try:
import avatar2
from angr_targets import AvatarGDBConcreteTarget
except ImportError:
raise nose.SkipTest()
binary_x86 = os.path.join(os.path.dirname(os.path.realpath(__file__)),
os.path.join('..', '..', 'binaries', 'tests', 'i386', 'packed_elf32'))
GDB_SERVER_IP = '127.0.0.1'
GDB_SERVER_PORT = 9999
UNPACKING_BINARY = 0xc51466
BINARY_OEP = 0x8048a15
BINARY_DECISION_ADDRESS = 0x8048B30
DROP_STAGE2_V1 = 0x8048BB8
DROP_STAGE2_V2 = 0x08048BED
VENV_DETECTED = 0x8048BFF
FAKE_CC = 0x8048C1C
BINARY_EXECUTION_END = 0x8048C49
avatar_gdb = None
def setup_x86():
#print("gdbserver %s:%s '%s'" % (GDB_SERVER_IP, GDB_SERVER_PORT, binary_x86))
subprocess.Popen("gdbserver %s:%s '%s'" % (GDB_SERVER_IP, GDB_SERVER_PORT, binary_x86), stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True)
def teardown():
global avatar_gdb
if avatar_gdb:
avatar_gdb.exit()
| @nose.with_setup(setup_x86, teardown)
def test_concrete_engine_linux_x86_no_simprocedures():
#print("test_concrete_engine_linux_x86_no_simprocedures")
global avatar_gdb
# pylint: disable=no-member
avatar_gdb = AvatarGDBConcreteTarget(avatar2.archs.x86.X86, GDB_SERVER_IP, GDB_SERVER_PORT)
p = angr.Project(binary_x86, concrete_target=avatar_gdb, use_sim_procedures=False,
page_size=0x1000)
| entry_state = p.factory.entry_state()
solv_concrete_engine_linux_x86(p, entry_state)
@nose.with_setup(setup_x86, teardown)
def test_concrete_engine_linux_x86_unicorn_no_simprocedures():
#print("test_concrete_engine_linux_x86_unicorn_no_simprocedures")
global avatar_gdb
# pylint: disable=no-member
avatar_gdb = AvatarGDBConcreteTarget(avatar2.archs.x86.X86, GDB_SERVER_IP, GDB_SERVER_PORT)
p = angr.Project(binary_x86, concrete_target=avatar_gdb, use_sim_procedures=False,
page_size=0x1000)
entry_state = p.factory.entry_state(add_options=angr.options.unicorn)
solv_concrete_engine_linux_x86(p, entry_state)
def execute_concretly(project, state, address, concretize):
simgr = project.factory.simgr(state)
simgr.use_technique(angr.exploration_techniques.Symbion(find=[address], concretize=concretize))
exploration = simgr.run()
return exploration.stashes['found'][0]
def solv_concrete_engine_linux_x86(p, entry_state):
#print("[1]Executing binary concretely until address: " + hex(UNPACKING_BINARY))
# until unpacking of stub
new_concrete_state = entry_state
# now until stub instructions
for _ in range(0, 4):
new_concrete_state = execute_concretly(p, new_concrete_state, UNPACKING_BINARY, [])
new_concrete_state = execute_concretly(p, new_concrete_state, BINARY_DECISION_ADDRESS, [])
arg0 = claripy.BVS('arg0', 8*32)
symbolic_buffer_address = new_concrete_state.regs.ebp-0xa0
new_concrete_state.memory.store(symbolic_buffer_address, arg0)
# symbolic exploration
simgr = p.factory.simgr(new_concrete_state)
#print("[2]Symbolically executing binary to find dropping of second stage [ address: " + hex(DROP_STAGE2_V1) + " ]")
exploration = simgr.explore(find=DROP_STAGE2_V1, avoid=[DROP_STAGE2_V2, VENV_DETECTED, FAKE_CC])
new_symbolic_state = exploration.stashes['found'][0]
binary_configuration = new_symbolic_state.solver.eval(arg0, cast_to=int)
#print("[3]Executing BINARY concretely with solution found until the end " + hex(BINARY_EXECUTION_END))
execute_concretly(p, new_symbolic_state, BINARY_EXECUTION_END, [(symbolic_buffer_address, arg0)])
#print("[4]BINARY execution ends, the configuration to reach your BB is: " + hex(binary_configuration))
correct_solution = 0xa000000f9ffffff000000000000000000000000000000000000000000000000
nose.tools.assert_true(binary_configuration == correct_solution)
|
kleebaum/bayeosraspberrypi | bayeosraspberrypi/bayeosraspberrypiclient.py | Python | gpl-2.0 | 3,774 | 0.005299 | """Python script to measure temperature, humidity and CO2 concentration with a Raspberry Pi.
A SHT21 sensor and a MCP3424 analog digital converter are connected to gpio pins, i.e. to the I2C bus.
The BayEOSGatewayClient class is extended to transfer data to the BayEOSGateway.
The sender runs in a separate process. Origin frames are sent to distinguish CO2 chambers."""
import sys, numpy # apt-get install python-numpy
from scipy import stats # apt-get install python-scipy
from time import sleep, time
from thread import start_new_thread
from bayeosgatewayclient import BayEOSGatewayClient, bayeos_confparser
from gpio import GPIO
from i2c import I2C
from sht21 import SHT21
from mcp3424 import MCP3424
class RaspberryPiClient(BayEOSGatewayClient):
"""Raspberry Pi client class."""
def init_writer(self):
"""Overwrites the init_writer() method of the BayEOSGatewayClient class."""
# gpio pins
ADDR_PINS = [11, 12, 13, 15, 16, 18] # GPIO 17, 18, 27, 22, 23, 24
DATA_PIN = 24 # GPIO 8
EN_PIN = 26 # GPIO 7
self.gpio = GPIO(ADDR_PINS, EN_PIN, DATA_PIN)
self.init_sensors()
self.addr = 1 # current address
def read_data(self):
"""Overwrites the read_data() method of the BayEOSGatewayClient class."""
# address 0 is reserved for flushing with air
self.gpio.set_addr(0) # set flushing address
sleep(.6) # flush for 60 seconds
self.gpio.reset() # stop flushing
self.gpio.set_addr(self.addr) # start measuring wait 60 seconds, 240 measure
measurement_data = self.measure(3)
self.gpio.reset()
return measurement_data
def save_data(self, values=[], origin='CO2_Chambers'):
"""Overwrites the save_data() method of the BayEOSGatewayClient class."""
self.writer.save(values, origin='RaspberryPi-Chamber-' + str(self.addr))
self.writer.flush()
print 'saved data: ' + str(values)
self.addr += 1
if self.addr > 15:
self.addr = 1
def init_sensors(self):
"""Initializes the I2C Bus including the SHT21 and MCP3424 sensors."""
try:
self.i2c = I2C()
self.sht21 = SHT21(1)
self.mcp3424 = MCP3424(self.i2c.get_smbus())
except IOError as err:
sys.stderr.write('I2C Connection Error: ' + str(err) + '. This must be run as root. Did you use the right device number?')
def measure(self, seconds=10):
"""Measures temperature, humidity and CO2 concentration.
@param seconds: how long should be measured
@return statistically calculated parameters
"""
measured_seconds = []
temp = []
hum = []
co2 = []
start_time = time()
for i in range(0, seconds):
start_new_thread(temp.append, (self.sht21.read_temperature(),))
start_new_thread(hum.append, (self.sht21.read_humidity(),))
start_new_thread(co2.append, (self.mcp3424.read_voltage(1),))
measured_seconds.append(time())
sleep(start_time + i - time() + 1) # to keep in time
mean_temp = numpy.mean(temp)
var_temp = numpy.var(temp)
mean_hum = numpy.mean(hum)
var_hum = numpy.var(hum)
lin_model = stats.linregress(measured_seconds, co2)
slope = lin_model[0]
intercept = lin_model[1]
r_squ | ared = lin_model[2]*lin_model[2]
slope_err = lin_model[4]
return [mean_temp, var_temp, mean_hum, var_hum, slope, intercept, r_squared, slope_err]
OPTIONS = bayeos_confparser('../config/baye | osraspberrypi.ini')
client = RaspberryPiClient(OPTIONS['name'], OPTIONS)
client.run(thread=False) # sender runs in a separate process |
peterwilletts24/Monsoon-Python-Scripts | vort_and_div/vorticity_and_diverg.py | Python | mit | 613 | 0.022838 | """
Load winds on pressure levels and calculate vorticity and divergence
"""
import os, sys
import datetime
import iris
import iris.unit as unit
diag = '30201'
cube_name_u='eastward_wind'
cube_name_v='northward_wind'
pp_file_path='/projects/cascade/pwille/moose_retriev | als/'
#experiment_ids = ['djznw', 'djzny', 'djznq', 'djzns', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu' | , 'dklzq'] # All minus large 3
experiment_ids = ['djznw']
for experiment_id in experiment_ids:
expmin1 = experiment_id[:-1]
#fu = '/projects/cascade/pwille/moose_retrievals/%s/%s/%s.pp' % (expmin1, experiment_id, diag)
try:
print 'hello'
|
CingHu/neutron-ustack | neutron/tests/unit/ofagent/test_arp_lib.py | Python | apache-2.0 | 13,264 | 0.000075 | # Copyright (C) 2014 VA Linux Systems Japan K.K.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Fumihiko Kakuma, VA Linux Systems Japan K.K.
import collections
import contextlib
import mock
from neutron.openstack.common import importutils
import neutron.plugins.ofagent.agent.metadata as meta
from neutron.tests.unit.ofagent import ofa_test_base
_OFALIB_NAME = 'neutron.plugins.ofagent.agent.arp_lib'
class OFAAgentTestCase(ofa_test_base.OFAAgentTestBase):
def setUp(self):
super(OFAAgentTestCase, self).setUp()
Net = collections.namedtuple('Net', 'net, mac, ip')
self.nets = [Net(net=10, mac='11:11:11:44:55:66', ip='10.1.2.20'),
Net(net=10, mac='11:11:11:44:55:67', ip='10.1.2.21'),
Net(net=20, mac='22:22:22:44:55:66', ip='10.2.2.20')]
self.packet_mod = mock.Mock()
self.proto_ethernet_mod = mock.Mock()
self.proto_vlan_mod = mock.Mock()
self.proto_vlan_mod.vid = 999
self.proto_arp_mod = mock.Mock()
self.fake_get_protocol = mock.Mock(return_value=self.proto_vlan_mod)
self.packet_mod.get_protocol = self.fake_get_protocol
self.fake_add_protocol = mock.Mock()
self.packet_mod.add_protocol = self.fake_add_protocol
self.arp = importutils.import_module('ryu.lib.packet.arp')
self.ethernet = importutils.import_module('ryu.lib.packet.ethernet')
self.vlan = importutils.import_module('ryu.lib.packet.vlan')
mock.patch('ryu.lib.packet.packet.Packet',
return_value=self.packet_mod).start()
self.ryuapp = 'ryuapp'
self.inport = '1'
self.ev = mock.Mock()
self.datapath = self._mk_test_dp('tun_br')
self.ofproto = importutils.import_module('ryu.ofproto.ofproto_v1_3')
self.ofpp = mock.Mock()
self.datapath.ofproto = self.ofproto
self.datapath.ofproto_parser = self.ofpp
self.OFPActionOutput = mock.Mock()
self.OFPActionOutput.return_value = 'OFPActionOutput'
self.ofpp.OFPActionOutput = self.OFPActionOutput
self.msg = mock.Mock()
self.msg.datapath = self.datapath
self.msg.buffer_id = self.ofproto.OFP_NO_BUFFER
self.msg_data = 'test_message_data'
self.msg.data = self.msg_data
self.ev.msg = self.msg
self.msg.match = {'in_port': self.inport,
'metadata': meta.LOCAL | self.nets[0].net}
class TestArpLib(OFAAgentTestCase):
def setUp(self):
super(TestArpLib, self).setUp()
self.mod_arplib = importutils.import_module(_OFALIB_NAME)
self.arplib = self.mod_arplib.ArpLib(self.ryuapp)
self.packet_mod.get_protocol = self._fake_get_protocol
self._fake_get_protocol_ethernet = True
self._fake_get_protocol_vlan = True
self._fake_get_protocol_arp = True
self.br = mock.Mock(datapath=self.datapath)
self.arplib.set_bridge(self.br)
def test__send_unknown_packet_no_buffer(self):
in_port = 3
out_port = self.ofproto.OFPP_TABLE
self.ms | g.buffer_id = self.ofproto.OFP_NO_BUFFER
self.arplib._send_unknown_packet(self.msg, in_port, out_port)
actions = [self.ofp | p.OFPActionOutput(self.ofproto.OFPP_TABLE, 0)]
self.ofpp.OFPPacketOut.assert_called_once_with(
datapath=self.datapath,
buffer_id=self.msg.buffer_id,
in_port=in_port,
actions=actions,
data=self.msg_data)
def test__send_unknown_packet_existence_buffer(self):
in_port = 3
out_port = self.ofproto.OFPP_TABLE
self.msg.buffer_id = 256
self.arplib._send_unknown_packet(self.msg, in_port, out_port)
actions = [self.ofpp.OFPActionOutput(self.ofproto.OFPP_TABLE, 0)]
self.ofpp.OFPPacketOut.assert_called_once_with(
datapath=self.datapath,
buffer_id=self.msg.buffer_id,
in_port=in_port,
actions=actions,
data=None)
def test__respond_arp(self):
self.arplib._arp_tbl = {
self.nets[0].net: {self.nets[0].ip: self.nets[0].mac}}
port = 3
arptbl = self.arplib._arp_tbl[self.nets[0].net]
pkt_ethernet = self.ethernet
pkt_vlan = self.vlan
pkt_arp = self.arp
pkt_arp.opcode = self.arp.ARP_REQUEST
pkt_arp.dst_ip = self.nets[0].ip
with mock.patch.object(
self.arplib, '_send_arp_reply'
) as send_arp_rep_fn:
self.assertTrue(
self.arplib._respond_arp(self.datapath, port, arptbl,
pkt_ethernet, pkt_vlan, pkt_arp))
ethernet_ethernet = self.ethernet.ethernet(
ethertype=pkt_ethernet.ethertype,
dst=pkt_ethernet.src,
src=self.nets[0].mac)
vlan_vlan = self.vlan.vlan(cfi=pkt_vlan.cfi,
ethertype=pkt_vlan.ethertype,
pcp=pkt_vlan.pcp,
vid=pkt_vlan.vid)
arp_arp = self.arp.arp(opcode=self.arp.ARP_REPLY,
src_mac=self.nets[0].mac,
src_ip=pkt_arp.dst_ip,
dst_mac=pkt_arp.src_mac,
dst_ip=pkt_arp.src_ip)
self.fake_add_protocol.assert_has_calls([mock.call(ethernet_ethernet),
mock.call(vlan_vlan),
mock.call(arp_arp)])
send_arp_rep_fn.assert_called_once_with(
self.datapath, port, self.packet_mod)
def _test__respond_arp(self, pkt_arp):
self.arplib._arp_tbl = {
self.nets[0].net: {self.nets[0].ip: self.nets[0].mac}}
port = 3
arptbl = self.arplib._arp_tbl[self.nets[0].net]
pkt_ethernet = mock.Mock()
pkt_vlan = mock.Mock()
self.assertFalse(
self.arplib._respond_arp(self.datapath, port, arptbl,
pkt_ethernet, pkt_vlan, pkt_arp))
def test__respond_arp_non_arp_req(self):
pkt_arp = mock.Mock()
pkt_arp.opcode = self.arp.ARP_REPLY
self._test__respond_arp(pkt_arp)
def test__respond_arp_ip_not_found_in_arptable(self):
pkt_arp = mock.Mock()
pkt_arp.opcode = self.arp.ARP_REQUEST
pkt_arp.dst_ip = self.nets[1].ip
self._test__respond_arp(pkt_arp)
def test_add_arp_table_entry(self):
self.arplib.add_arp_table_entry(self.nets[0].net,
self.nets[0].ip, self.nets[0].mac)
self.assertEqual(
self.arplib._arp_tbl,
{self.nets[0].net: {self.nets[0].ip: self.nets[0].mac}})
def test_add_arp_table_entry_multiple_net(self):
self.arplib.add_arp_table_entry(self.nets[0].net,
self.nets[0].ip, self.nets[0].mac)
self.arplib.add_arp_table_entry(self.nets[2].net,
self.nets[2].ip, self.nets[2].mac)
self.assertEqual(
self.arplib._arp_tbl,
{self.nets[0].net: {self.nets[0].ip: self.nets[0].mac},
self.nets[2].net: {self.nets[2].ip: self.nets[2].mac}})
def test_add_arp_table_entry_multiple_ip(self):
self.arplib.add_arp_table_entry(self.nets[0].net,
self.nets[0].ip, self.nets[0].mac)
self.arplib.add_arp_table_entry(self.nets[0].net,
self.nets[1].ip, self.nets[1].mac)
|
artdent/jgments | lib/pygments-1.2.2-patched/pygments/lexers/web.py | Python | bsd-2-clause | 31,156 | 0.001605 | # -*- coding: utf-8 -*-
"""
pygments.lexers.web
~~~~~~~~~~~~~~~~~~~
Lexers for web-related languages and markup.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
try:
set
except NameError:
from sets import Set as set
from pygments.lexer import RegexLexer, bygroups, using, include, this
from pygments.token import \
Text, Comment, Operator, Keyword, Name, String, Number, Other, Punctuation
from pygments.util import get_bool_opt, get_list_opt, looks_like_xml, \
html_doctype_matches
__all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'CssLexer',
'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer',
'MxmlLexer']
class JavascriptLexer(RegexLexer):
"""
For JavaScript source code.
"""
name = 'JavaScript'
aliases = ['js', 'javascript']
filenames = ['*.js']
mimetypes = ['application/x-javascript', 'text/x-javascript', 'text/javascript']
flags = re.DOTALL
tokens = {
'commentsandwhitespace': [
(r'\s+', Text),
(r'<!--', Comment),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline)
],
'slashstartsregex': [
include('commentsandwhitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
(r'(?=/)', Text, ('#pop', 'badregex')),
(r'', Text, '#pop')
],
'badregex': [
('\n', Text, '#pop')
],
'root': [
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
r'this)\b', Keyword, 'slashstartsregex'),
(r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
(r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
r'extends|final|float|goto|implements|import|int|interface|long|native|'
r'package|private|protected|public|short|static|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Reserved),
(r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|netscape|'
r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
r'window)\b', Name.Builtin),
(r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
]
}
class ActionScriptLexer(RegexLexer):
"""
For ActionScript source code.
*New in Pygments 0.9.*
"""
name = 'ActionScript'
aliases = ['as', 'actionscript']
filenames = ['*.as']
mimetypes = ['application/x-actionscript', 'text/x-actionscript',
'text/actionscript']
flags = re.DOTALL
tokens = {
'root': [
(r'\s+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
(r'[~\^\*!%&<>\|+=:;,/?\\-]+', Operator),
(r'[{}\[\]();.]+', Punctuation),
(r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
r'throw|try|catch|var|with|new|typeof|arguments|instanceof|this|'
r'switch)\b', Keyword),
(r'(class|public|final|internal|native|override|private|protected|'
r'static|import|extends|implements|interface|intrinsic|return|super|'
r'dynamic|function|const|get|namespace|package|set)\b',
Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
Keyword.Constant),
(r'(Accessibility|AccessibilityProperties|ActionScriptVersion|'
r'ActivityEvent|AntiAliasType|ApplicationDomain|AsBroadcaster|Array|'
r'AsyncErrorEvent|AVM1Movie|BevelFilter|Bitmap|BitmapData|'
r'BitmapDataChannel|BitmapFilter|BitmapFilterQuality|BitmapFilterType|'
r'BlendMode|BlurFilter|Boolean|ByteArray|Camera|Capabilities|CapsStyle|'
r'Class|Color|ColorMatrixFilter|ColorTransform|ContextMenu|'
r'ContextMenuBuiltInItems|ContextMenuEvent|ContextMenuItem|'
r'ConvultionFilter|CSMSettings|DataEvent|Date|DefinitionError|'
r'DeleteObjectSample|Dictionary|DisplacmentMapFilter|DisplayObject|'
r'DisplacmentMapFilterMode|DisplayObjectContainer|DropShadowFilter|'
r'Endian|EOFError|Error|ErrorEvent|EvalError|Event|EventDispatcher|'
r'EventPhase|ExternalInterface|FileFilter|FileReference|'
r'FileReferenceList|FocusDirection|FocusEvent|Font|FontStyle|FontType|'
r'FrameLabel|FullScreenEvent|Function|GlowFilter|GradientBevelFilter|'
r'GradientGlowFi | lter|Grad | ientType|Graphics|GridFitType|HTTPStatusEvent|'
r'IBitmapDrawable|ID3Info|IDataInput|IDataOutput|IDynamicPropertyOutput'
r'IDynamicPropertyWriter|IEventDispatcher|IExternalizable|'
r'IllegalOperationError|IME|IMEConversionMode|IMEEvent|int|'
r'InteractiveObject|InterpolationMethod|InvalidSWFError|InvokeEvent|'
r'IOError|IOErrorEvent|JointStyle|Key|Keyboard|KeyboardEvent|KeyLocation|'
r'LineScaleMode|Loader|LoaderContext|LoaderInfo|LoadVars|LocalConnection|'
r'Locale|Math|Matrix|MemoryError|Microphone|MorphShape|Mouse|MouseEvent|'
r'MovieClip|MovieClipLoader|Namespace|NetConnection|NetStatusEvent|'
r'NetStream|NewObjectSample|Number|Object|ObjectEncoding|PixelSnapping|'
r'Point|PrintJob|PrintJobOptions|PrintJobOrientation|ProgressEvent|Proxy|'
r'QName|RangeError|Rectangle|ReferenceError|RegExp|Responder|Sample|Scene|'
r'ScriptTimeoutError|Security|SecurityDomain|SecurityError|'
r'SecurityErrorEvent|SecurityPanel|Selection|Shape|SharedObject|'
r'SharedObjectFlushStatus|SimpleButton|Socket|Sound|SoundChannel|'
r'SoundLoaderContext|SoundMixer|SoundTransform|SpreadMethod|Sprite|'
r'StackFrame|StackOverflowError|Stage|StageAlign|StageDisplayState|'
r'StageQuality|StageScaleMode|StaticText|StatusEvent|String|StyleSheet|'
r'SWFVersion|SyncEvent|SyntaxError|System|TextColorType|TextField|'
r'TextFieldAutoSize|TextFieldType|TextFormat|TextFormatAlign|'
r'TextLineMetrics|TextRenderer|TextSnapshot|Timer|TimerEvent|Transform|'
r'TypeError|uint|URIError|URLLoader|URLLoaderDataFormat|URLRequest|'
r'URLRequestHeader|URLRequestMethod|URLStream|URLVariabeles|VerifyError|'
r'Video|XML|XMLDocument|XMLList|XMLNode|XMLNodeType|XMLSocket|XMLUI)\b',
Name.Builtin),
(r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
r'unescape)\b',Name.Function),
(r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\ |
leppa/home-assistant | homeassistant/components/alexa/config.py | Python | apache-2.0 | 2,151 | 0 | """Config helpers for Alexa."""
from homeassistant.core import callback
from .state_report import async_enable_proactive_mode
class AbstractConfig:
"""Hold the configuration for Alexa."""
_unsub_proactive_report = None
def __init__(self, hass):
"""Initialize abstract config."""
self.hass = hass
@property
def supports_auth(self):
"""Return if config supports auth."""
return False
@property
def should_report_state(self):
"""Return if states should be proactively reported."""
return False
@property
def endpoint(self):
"""Endpoint for report state."""
return None
@property
def entity_config(self):
"""Return entity config."""
return {}
@property
def is_reporting_states(self):
"""Return if proactive mode is enabled."""
return self._unsub_proactive_report is not None
async def async_enable_proactive_mode(self):
"""Enable proactive mode."""
if self._unsub_proactive_report is None:
self._unsub_proactive_report = self.hass.async_create_task(
async_enable_proactive_mode(self.hass, self)
)
try:
await self._unsub_proactive_report
except Exception: # pylint: disable=broad-except
self._unsub_proactive_report = None
raise
async def async_disable_proactive_mode(self):
"""Disable proactive mode."""
unsub_func = await self._unsub_proactive_report
| if unsub_func:
unsub_func()
self._unsub_proactive_report = None
@callback
def should_expose(self, entity_id):
"""If an entity should be exposed."""
# pylint: disable=no-self-use
return False
@callback
def async_invalidate_access_token(self):
"""Invalidate access token."""
raise NotImplementedError
| async def async_get_access_token(self):
"""Get an access token."""
raise NotImplementedError
async def async_accept_grant(self, code):
"""Accept a grant."""
raise NotImplementedError
|
sosey/ginga | ginga/qtw/plugins/Catalogs.py | Python | bsd-3-clause | 19,290 | 0.003784 | #
# Catalogs.py -- Catalogs plugin for fits viewer
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
from __future__ import print_function
from ginga.qtw.QtHelp import QtGui, QtCore
from ginga.qtw import QtHelp
from ginga.qtw import ColorBar
from ginga.misc import Bunch
from ginga.misc.plugins import CatalogsBase
class Catalogs(CatalogsBase.CatalogsBase):
def __init__(self, fv, fitsimage):
super(Catalogs, self).__init__(fv, fitsimage)
def build_gui(self, container, future=None):
vbox1 = QtHelp.VBox()
msgFont = self.fv.getFont("sansFont", 14)
tw = QtGui.QLabel()
tw.setFont(msgFont)
tw.setWordWrap(True)
self.tw = tw
fr = QtHelp.Frame("Instructions")
fr.addWidget(tw, stretch=1, alignment=QtCore.Qt.AlignTop)
vbox1.addWidget(fr, stretch=0, alignment=QtCore.Qt.AlignTop)
nb = QtHelp.TabWidget()
nb.setTabPosition(QtGui.QTabWidget.South)
nb.setUsesScrollButtons(True)
self.w.nb = nb
#vbox1.addWidget(nb, stretch=1, alignment=QtCore.Qt.AlignTop)
vbox1.addWidget(nb, stretch=1)
vbox0 = QtHelp.VBox()
hbox = QtHelp.HBox()
hbox.setSpacing(4)
vbox0.addWidget(hbox, stretch=1, alignment=QtCore.Qt.AlignTop)
vbox = QtHelp.VBox()
fr = QtHelp.Frame(" Image Server ")
fr.addWidget(vbox, stretch=1, alignment=QtCore.Qt.AlignTop)
hbox.addWidget(fr, stretch=0, alignment=QtCore.Qt.AlignLeft)
captions = (('Server', 'xlabel'),
('@Server', 'combobox'),
('Use DSS channel', 'checkbutton'),
('Get Image', 'button'))
w, b = QtHelp.build_info(captions)
self.w.update(b)
self.w.get_image.clicked.connect(self.getimage_cb)
self.w.use_dss_channel.setChecked(self.use_dss_channel)
self.w.use_dss_channel.stateChanged.connect(self.use_dss_channel_cb)
vbox.addWidget(w, stretch=0, alignment=QtCore.Qt.AlignTop)
self.w.img_params = QtHelp.StackedWidget()
vbox.addWidget(self.w.img_params, stretch=1,
alignment=QtCore.Qt.AlignTop)
combobox = self.w.server
index = 0
self.image_server_options = self.fv.imgsrv.getServerNames(kind='image')
for name in self.image_server_options:
| combobox.addItem(name)
index += 1
index = 0
combobox.setCurrentIndex(index)
combobox.activated.connect(self.setup_params_image)
if len(self.image_server_options) > 0:
self.setup_params_image(index, redo=False)
vbox = QtHelp.VBox()
fr = QtHelp.Frame(" Catalog Server ")
fr.addWidget(vbox, stretch=1, alignment=QtCore.Qt.AlignTop)
hbox.addWidget(fr, stret | ch=0, alignment=QtCore.Qt.AlignLeft)
captions = (('Server', 'xlabel'),
('@Server', 'combobox'),
('Limit stars to area', 'checkbutton'),
('Search', 'button'))
w, self.w2 = QtHelp.build_info(captions)
self.w2.search.clicked.connect(self.getcatalog_cb)
self.w2.limit_stars_to_area.setChecked(self.limit_stars_to_area)
self.w2.limit_stars_to_area.stateChanged.connect(self.limit_area_cb)
vbox.addWidget(w, stretch=0, alignment=QtCore.Qt.AlignTop)
self.w2.cat_params = QtHelp.StackedWidget()
vbox.addWidget(self.w2.cat_params, stretch=1,
alignment=QtCore.Qt.AlignTop)
combobox = self.w2.server
index = 0
self.catalog_server_options = self.fv.imgsrv.getServerNames(kind='catalog')
for name in self.catalog_server_options:
combobox.addItem(name)
index += 1
index = 0
combobox.setCurrentIndex(index)
combobox.activated.connect(self.setup_params_catalog)
if len(self.catalog_server_options) > 0:
self.setup_params_catalog(index, redo=False)
btns = QtHelp.HBox()
btns.setSpacing(5)
btn = QtGui.QRadioButton("Rectangle")
if self.drawtype == 'rectangle':
btn.setChecked(True)
btn.toggled.connect(lambda tf: self.set_drawtype_cb(tf, 'rectangle'))
btns.addWidget(btn, stretch=0, alignment=QtCore.Qt.AlignLeft)
btn = QtGui.QRadioButton("Circle")
if self.drawtype == 'circle':
btn.setChecked(True)
btn.toggled.connect(lambda tf: self.set_drawtype_cb(tf, 'circle'))
btns.addWidget(btn, stretch=0, alignment=QtCore.Qt.AlignLeft)
btn = QtGui.QPushButton("Entire image")
btn.clicked.connect(self.setfromimage)
btns.addWidget(btn, stretch=0, alignment=QtCore.Qt.AlignLeft)
vbox0.addWidget(btns, stretch=0, alignment=QtCore.Qt.AlignTop)
self.w.params = vbox0
sw = QtGui.QScrollArea()
sw.setWidgetResizable(True)
sw.setWidget(vbox0)
nb.addTab(sw, "Params")
vbox = QtHelp.VBox()
self.table = CatalogListing(self.logger, vbox)
hbox = QtHelp.HBox()
adj = QtGui.QScrollBar(QtCore.Qt.Horizontal)
adj.setRange(0, 1000)
adj.setSingleStep(1)
adj.setPageStep(10)
#adj.setMaximum(1000)
adj.setValue(0)
#adj.resize(200, -1)
adj.setTracking(True)
adj.setToolTip("Choose subset of stars plotted")
self.w.plotgrp = adj
adj.valueChanged.connect(self.plot_pct_cb)
hbox.addWidget(adj, stretch=1)
sb = QtGui.QSpinBox()
sb.setRange(10, self.plot_max)
sb.setValue(self.plot_limit)
sb.setSingleStep(10)
adj.setPageStep(100)
sb.setWrapping(False)
self.w.plotnum = sb
sb.setToolTip("Adjust size of subset of stars plotted")
sb.valueChanged.connect(self.plot_limit_cb)
hbox.addWidget(sb, stretch=0)
vbox.addWidget(hbox, stretch=0)
self.w.listing = vbox
nb.addTab(vbox, "Listing")
btns = QtHelp.HBox()
btns.setSpacing(3)
#btns.set_child_size(15, -1)
self.w.buttons = btns
btn = QtGui.QPushButton("Close")
btn.clicked.connect(self.close)
btns.addWidget(btn, stretch=0, alignment=QtCore.Qt.AlignLeft)
if future:
btn = QtGui.QPushButton('Ok')
btn.clicked.connect(lambda w: self.ok())
btns.addWidget(btn, stretch=0, alignment=QtCore.Qt.AlignLeft)
btn = QtGui.QPushButton('Cancel')
btn.clicked.connect(lambda w: self.cancel())
btns.addWidget(btn, stretch=0, alignment=QtCore.Qt.AlignLeft)
vbox1.addWidget(btns, stretch=0)
cw = container.get_widget()
cw.addWidget(vbox1, stretch=1)
def limit_area_cb(self, tf):
self.limit_stars_to_area = (tf != 0)
return True
def use_dss_channel_cb(self, tf):
self.use_dss_channel = (tf != 0)
return True
def plot_pct_cb(self):
val = self.w.plotgrp.value()
self.plot_start = int(val)
self.replot_stars()
return True
def _update_plotscroll(self):
num_stars = len(self.starlist)
if num_stars > 0:
adj = self.w.plotgrp
page_size = self.plot_limit
self.plot_start = min(self.plot_start, num_stars-1)
adj.setRange(0, num_stars)
adj.setSingleStep(1)
adj.setPageStep(page_size)
self.replot_stars()
def plot_limit_cb(self):
val = self.w.plotnum.value()
self.plot_limit = int(val)
self._update_plotscroll()
return True
def set_message(self, msg):
self.tw.setText(msg)
def _raise_tab(self, w):
self.w.nb.setCurrentWidget(w)
def _get_cbidx(self, w):
return w.currentIndex()
def _setup_params(self, obj, container):
params = obj.getParams()
captions = []
paramList = so |
MaxHalford/Prince | tests/test_pca.py | Python | mit | 3,331 | 0.002101 | import unittest
import matplotlib as mpl
import numpy as np
import pandas as pd
from sklearn import datasets
from sklearn import decomposition
from sklearn.utils import estimator_checks
import prince
class TestPCA(u | nittest.TestCase):
def setUp(self):
X, _ = datasets.load_iris(return_X_y=True)
columns = ['Sepal length', 'Sepal width', 'Petal length', 'Sepal length']
self.X = pd.DataFrame(X, columns=columns)
def test_fit_pandas_dataframe(self):
pca = prince.PCA(n_components=2, engine='fbpca')
self.assertTrue(isinstance(pca.fit(self.X), prince.PCA))
def test_transform_pandas_dataframe(self):
pca = pri | nce.PCA(n_components=2)
self.assertTrue(isinstance(pca.fit(self.X).transform(self.X), pd.DataFrame))
def test_fit_numpy_array(self):
pca = prince.PCA(n_components=2, engine='fbpca')
self.assertTrue(isinstance(pca.fit(self.X.values), prince.PCA))
def test_transform_numpy_array(self):
pca = prince.PCA(n_components=2)
self.assertTrue(isinstance(pca.fit(self.X.values).transform(self.X.values), pd.DataFrame))
def test_copy(self):
XX = np.copy(self.X)
pca = prince.PCA(n_components=2, copy=True)
pca.fit(XX)
np.testing.assert_array_equal(self.X, XX)
pca = prince.PCA(n_components=2, copy=False)
pca.fit(XX)
self.assertRaises(AssertionError, np.testing.assert_array_equal, self.X, XX)
def test_fit_transform(self):
# Without rescaling
prince_pca = prince.PCA(n_components=3, rescale_with_mean=False, rescale_with_std=False)
pd.testing.assert_frame_equal(
prince_pca.fit_transform(self.X),
prince_pca.fit(self.X).transform(self.X)
)
# With rescaling
prince_pca = prince.PCA(n_components=3, rescale_with_mean=True, rescale_with_std=True)
pd.testing.assert_frame_equal(
prince_pca.fit_transform(self.X),
prince_pca.fit(self.X).transform(self.X)
)
def test_compare_sklearn(self):
n_components = 4
pca_prince = prince.PCA(n_components=n_components, rescale_with_std=False)
pca_sklearn = decomposition.PCA(n_components=n_components)
pca_prince.fit(self.X)
pca_sklearn.fit(self.X)
# Compare eigenvalues
np.testing.assert_array_almost_equal(
pca_prince.eigenvalues_,
np.square(pca_sklearn.singular_values_),
)
# Compare row projections
np.testing.assert_array_almost_equal(
pca_prince.transform(self.X),
pca_sklearn.transform(self.X)
)
# Compare explained inertia
np.testing.assert_array_almost_equal(
pca_prince.explained_inertia_,
pca_sklearn.explained_variance_ratio_
)
def test_explained_inertia_(self):
pca = prince.PCA(n_components=4)
pca.fit(self.X)
self.assertTrue(np.isclose(sum(pca.explained_inertia_), 1))
def test_plot_row_coordinates(self):
pca = prince.PCA(n_components=4)
pca.fit(self.X)
ax = pca.plot_row_coordinates(self.X)
self.assertTrue(isinstance(ax, mpl.axes.Axes))
def test_check_estimator(self):
estimator_checks.check_estimator(prince.PCA)
|
gion86/awlsim | awlsim/gui/hwmodconfig.py | Python | gpl-2.0 | 12,877 | 0.029976 | # -*- coding: utf-8 -*-
#
# AWL simulator - GUI hardware module configuration widget
#
# Copyright 2015 Michael Buesch <m@bues.ch>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from __future__ import division, absolute_import, print_function, unicode_literals
from awlsim.common.compat import *
from awlsim.gui.configdialog import *
from awlsim.gui.util import *
class HwmodParamModel(QAbstractTableModel):
# Signal: Emitted, if a new error appeared or an old error disappeared.
newErrorText = Signal(str)
def __init__(self):
QAbstractTableModel.__init__(self)
self.modDesc = None # HwmodDescriptor
self.modInterface = None # HardwareInterface
@property
def __params(self):
"""Return a list of tuples: (paramName, paramValue)"""
if not self.modDesc:
return []
return sorted(list(self.modDesc.getParameters().items()),
key = lambda p: p[0])
def __getParamDesc(self, paramName):
if self.modInterface:
return self.modInterface.getParamDesc(paramName)
return None
def __verifyParams(self):
if self.modInterface and self.modDesc:
try:
# Create a module instance.
# This will raise errors on invalid parameters.
self.modInterface(None, self.modDesc.getParameters())
except AwlSimError as e:
self.newErrorText.emit(str(e))
return
self.newErrorText.emit("")
def setHwmod(self, modDesc):
self.beginResetModel()
try:
if not modDesc:
raise ValueError
mod = HwModLoader.loadModule(modDesc.getModuleName())
interface = mod.getInterface()
except (AwlSimError, ValueError) as e:
interface = None
self.modInterface = interface
self.modDesc = modDesc
self.endResetModel()
def deleteEntry(self, row):
params = self.__params
if row >= 0 and row < len(params):
self.beginResetModel() # inefficient
pName, pValue = params[row]
self.modDesc.removeParameter(pName)
self.endResetModel()
self.__verifyParams()
def rowCount(self, parent=QModelIndex()):
return len(self.__params) + 1
def columnCount(self, parent=QModelIndex()):
return 2
def data(self, index, role=Qt.DisplayRole):
if not index:
return None
row, column = index.row(), index.column()
if role in (Qt.DisplayRole, Qt.EditRole):
params = self.__params
if row >= len(params):
return None
if column == 0:
return params[row][0]
elif column == 1:
paramDesc = self.__getParamDesc(params[row][0])
if paramDesc and not paramDesc.userEditable:
# Not user editable
if params[row][1] is None:
return "<set by system>"
if params[row][1] is None:
return ""
return params[row][1]
elif role == Qt.BackgroundRole:
params = self.__params
if row < len(params):
paramDesc = self.__getParamDesc(params[row][0])
if paramDesc and not paramDesc.userEditable:
# Not user editable
return QBrush(QColor("darkgrey"))
if column == 0:
if paramDesc:
# This is a standard parameter.
return QBrush(QColor("lightgrey"))
return QBrush(QColor("white"))
elif role in (Qt.ToolTipRole, Qt.WhatsThisRole):
params = self.__params
if row < len(params):
paramDesc = self.__getParamDesc(params[row][0])
if column == 0:
if paramDesc and paramDesc.description:
return paramDesc.description
return "The parameter's name"
elif column == 1:
return "Value for '%s'" %\
params[row][0]
else:
if column == 0:
return "New parameter name"
return None
def headerData(self, section, orientation, role=Qt.DisplayRole):
if role != Qt.DisplayRole:
return None
if orientation == Qt.Horizontal:
return ("Parameter", "Value")[section]
else:
params = self.__params
if section >= len(params):
return "new"
return "%d" % (section + 1)
def setData(self, index, value, role=Qt.EditRole):
if not index:
return False
row, column = index.row(), index.column()
if role == Qt.EditRole:
if not self.modDesc:
return False
params = self.__params
if row >= len(params):
if column == 0:
# Add parameter
value = value.strip()
for pName, pValue in params:
if pName == value:
# Parameter does already exist
return False
self.beginResetModel() # inefficient
self.modDesc.addParameter(value, "")
self.endResetModel()
self.__verifyParams()
return True
else:
paramName = params[row][0]
if column == 0:
# Rename parameter
for pName, pValue in params:
if pName == value:
# Parameter does already exist
return False
self.beginResetModel() # inefficient
savedValue = params[row][1]
self.modDesc.removeParameter(paramName)
self.modDesc.addParameter(value, savedValue)
self.endResetModel()
self.__verifyParams()
return True
elif column == 1:
# Set parameter value
self.modDesc.setParameterValue(paramName, value)
self.__verifyParams()
return True
return False
def flags(self, index):
if not index:
return Qt.ItemIsEnabled
row, column = index.row(), index.column()
params = self.__params
if row < len(params):
paramDesc = self.__getParamDesc(params[row][0])
if paramDesc and not paramDesc.userEditable:
return Qt.ItemIsEnabled | Qt.ItemIsSelectable
else:
if column != 0:
return Qt.ItemIsEnabled
return Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsEditable
class HwmodParamView(QTableView):
def __init__(self, model=None, parent=None):
QTableView.__init__(self, parent)
if not model:
model = HwmodParamModel()
self.setModel(model)
self.setColumnWidth(0, 170)
self.setColumnWidth(1, 80)
def deleteEntry(self, index=None):
if not index:
index = self.currentIndex()
if not index:
return
self.model().deleteEntry(index.row())
def keyPressEvent(self, ev):
QTableView.keyPressEvent(self, ev)
if ev.key() == Qt.Key_Delete:
self.deleteEntry()
def setHwmod(self, modDesc):
self.model().setHwmod(modDesc)
class HwmodConfigWidget(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.setLayout(QGridLayout())
self.layout().setContentsMargins(QMargins())
self.__loadedModDescs = []
self.__modSelectChangeBlocked = Blocker()
group = QGroupBox(self)
group.setLayout(QGridLayout())
label = QLabel("Available modules:", self)
group.layout().addWidget(label, 0, 0)
self.availList = QListWidget(self)
self.availList.setMaximumWidth(180)
group.layout().addWidget(self.availList, 1, 0)
self.manualModName = QLineEdit(self)
self.manualModName.setToolTip("Name of another module to add.\n"
"Note: Typos in the module name will result in "
"errors on CPU startup.")
self.manualModName.setMaximumWidth(180)
group.layout().addWidget(self.manualModName, 2, 0)
vbox = QVBoxLayout()
self.addButton = QPushButton(self)
self.addButton.setIcon( | getIcon("next"))
self.addButton.setToolTip("A | dd the selected module to the "
"project\nand mark it for download to the CPU.")
vbox.addWidget(self.addButton)
self.delButton = QPushButton(self)
self.delButton.setIcon(getIcon("previous"))
self.delButton.setToolTip("Remove the selected module from "
"the project\nand mark it for removal from the CPU.")
vbox.addWidget(self.delButton)
group.layout().addLayout(vbox, 0, 1, 3, 1)
label = QLabel("Loaded modules:", self)
group.layout().addWidget(label, 0, 2)
self.loadedList = QListWidget(self)
self.loadedList.setMaximumWidth(180)
group.layout().addWidget(self.loadedList, 1, 2, 2, 1)
s |
dharmabumstead/ansible | lib/ansible/modules/system/firewalld.py | Python | gpl-3.0 | 35,378 | 0.001555 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Adam Miller (maxamillion@fedoraproject.org)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: firewalld
short_description: Manage arbitrary ports/services with firewalld
description:
- This module allows for addition or deletion of services and ports either tcp or udp in either running or permanent firewalld rules.
version_added: "1.4"
options:
service:
description:
- "Name of a service to add/remove to/from firewalld - service must be listed in output of firewall-cmd --get-services."
port:
description:
- "Name of a port or port range to add/remove to/from firewalld. Must be in the form PORT/PROTOCOL or PORT-PORT/PROTOCOL for port ranges."
rich_rule:
description:
- "Rich rule to add/remove to/from firewalld."
source:
description:
- 'The source/network you would like to add/remove to/from firewalld'
version_added: "2.0"
interface:
description:
- 'The interface you would like to add/remove to/from a zone in firewalld'
version_added: "2.1"
zone:
description:
- >
The firewalld zone to add/remove to/from (NOTE: default zone can be configured per system but "public" is default from upstream. Available choices
can be extended based on per-system configs, listed here are "out of the box" defaults).
default: system-default(public)
choices: [ "work", "drop", "internal", "external", "trusted", "home", "dmz", "public", "block" ]
permanent:
description:
- >
Should this configuration be in the running firewalld configuration or persist across reboots. As of Ansible version 2.3, permanent operations can
operate on firewalld configs when it's not running (requires firewalld >= 3.0.9). (NOTE: If this is false, immediate is assumed true.)
immediate:
description:
- "Should this configuration be applied immediately, if set as permanent"
type: bool
default: 'no'
version_added: "1.9"
state:
description:
- >
Enable or disable a setting.
For ports: Should this port accept(enabled) or reject(disabled) connections.
The states "present" and "absent" can only be used in zone level operations (i.e. when no other parameters but zone and state are set).
required: true
choices: [ "enabled", "disabled", "present", "absent" ]
timeout:
description:
- "The amount of time the rule should be in effect for when non-permanent."
default: 0
masquerade:
description:
- 'The masquerade setting you would like to enable/disable to/from zones within firewalld'
version_added: "2.1"
notes:
- Not tested on any Debian based system.
- Requires the python2 bindings of firewalld, which may not be installed by default if the distribution switched to python 3
- Zone transactions (creating, deleting) can be performed by using only the zone and state parameters "present" or "absent".
Note that zone transactions must explicitly be permanent. This is a limitation in firewalld.
This also means that you will have to reload firewalld after adding a zone that you wish to perfom immediate actions on.
The module will not take care of this for you implicitly because that would undo any previously performed immediate actions which were not
permanent. Therefor, if you require immediate access to a newly created zone it is recommended you reload firewalld immediately after the zone
creation returns with a changed state and before you perform any other immediate, non-permanent actions on that zone.
requirements: [ 'firewalld >= 0.2.11' ]
author: "Adam Miller (@maxamillion)"
'''
EXAMPLES = '''
- firewalld:
service: https
permanent: true
state: enabled
- firewalld:
port: 8081/tcp
permanent: true
state: disabled
- firewalld:
port: 161-162/udp
permanent: true
state: enabled
- firewalld:
zone: dmz
service: http
permanent: true
state: enabled
- firewalld:
rich_rule: 'rule service name="ftp" audit limit value="1/m" accept'
permanent: true
state: enabled
- firewalld:
source: 192.0.2.0/24
zone: internal
state: enabled
- firewalld:
zone: trusted
interface: eth2
permanent: true
state: enabled
- firewalld:
masquerade: yes
state: enabled
permanent: true
zone: dmz
- firewalld:
zone: custom
state: present
permanent: true
'''
from ansible.module_utils.basic import AnsibleModule
# globals
module = None
# Imports
try:
import firewall.config
FW_VERSION = firewall.config.VERSION
from firewall.client import Rich_Rule
from firewall.client import FirewallClient
from firewall.client import FirewallClientZoneSettings
from firewall.errors import FirewallError
fw = None
fw_offline = False
import_failure = False
try:
fw = FirewallClient()
fw.getDefaultZone()
except (AttributeError, FirewallError):
# Firewalld is not currently running, permanent-only operations
fw_offline = True
# Import other required parts of the firewalld API
#
# NOTE:
# online and offline operations do not share a common firewalld API
from firewall.core.fw_test import Firewall_test
fw = Firewall_test()
fw.start()
except ImportError:
import_failure = True
class FirewallTransaction(object):
"""
FirewallTransaction
This is the base class for all firewalld transactions we might want to have
"""
global module
def __init__(self, fw, action_args=(), zone=None, desired_state=None,
permanent=False, immediate=False, fw_offline=False,
enabled_values=None, disabled_values=None):
# type: (firewall.client, tuple, str, bool, bool, bool)
"""
initializer the transaction
:fw: firewall client instance
:action_args: tuple, args to pass for the action to take place
:zone: str, firewall zone
:desired_state: str, the desired state (enabled, disabled, etc | )
:permanent: bool, action should be permanent
:immediate: bool, action should take place immediately
:fw_offline: bool, action takes place as if the firewall were offline
:enabled_values: str[], acceptable values for enabling something (default: enabled)
:disabled_values: str[], accep | table values for disabling something (default: disabled)
"""
self.fw = fw
self.action_args = action_args
self.zone = zone
self.desired_state = desired_state
self.permanent = permanent
self.immediate = immediate
self.fw_offline = fw_offline
self.enabled_values = enabled_values or ["enabled"]
self.disabled_values = disabled_values or ["disabled"]
# List of messages that we'll call module.fail_json or module.exit_json
# with.
self.msgs = []
# Allow for custom messages to be added for certain subclass transaction
# types
self.enabled_msg = None
self.disabled_msg = None
#####################
# exception handling
#
def action_handler(self, action_func, action_func_args):
"""
Function to wrap calls to make actions on firewalld in try/except
logic and emit (hopefully) useful error messages
"""
try:
return action_func(*action_func_args)
except Exception as e:
# If there are any commonly known errors that we should provide more
# context for to help the users diagnose what's wrong. Handle that here
if "INVALID_SERVICE" in "%s" % e:
self.msgs.append("Services are defined by port/tcp relationship and named as th |
wzhy90/git-repo | subcmds/sync.py | Python | apache-2.0 | 27,721 | 0.010065 | #
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import json
import netrc
f | rom optparse import SUPPRESS_HELP
import os
import re
import shutil
import socket
| import subprocess
import sys
import time
from pyversion import is_python3
if is_python3():
import urllib.parse
import xmlrpc.client
else:
import imp
import urlparse
import xmlrpclib
urllib = imp.new_module('urllib')
urllib.parse = urlparse
xmlrpc = imp.new_module('xmlrpc')
xmlrpc.client = xmlrpclib
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
try:
import resource
def _rlimit_nofile():
return resource.getrlimit(resource.RLIMIT_NOFILE)
except ImportError:
def _rlimit_nofile():
return (256, 256)
try:
import multiprocessing
except ImportError:
multiprocessing = None
from git_command import GIT, git_require
from git_refs import R_HEADS, HEAD
from project import Project
from project import RemoteSpec
from command import Command, MirrorSafeCommand
from error import RepoChangedException, GitError, ManifestParseError
from project import SyncBuffer
from progress import Progress
from wrapper import Wrapper
_ONE_DAY_S = 24 * 60 * 60
class _FetchError(Exception):
"""Internal error thrown in _FetchHelper() when we don't want stack trace."""
pass
class Sync(Command, MirrorSafeCommand):
jobs = 1
common = True
helpSummary = "Update working tree to the latest revision"
helpUsage = """
%prog [<project>...]
"""
helpDescription = """
The '%prog' command synchronizes local project directories
with the remote repositories specified in the manifest. If a local
project does not yet exist, it will clone a new local directory from
the remote repository and set up tracking branches as specified in
the manifest. If the local project already exists, '%prog'
will update the remote branches and rebase any new local changes
on top of the new remote changes.
'%prog' will synchronize all projects listed at the command
line. Projects can be specified either by name, or by a relative
or absolute path to the project's local directory. If no projects
are specified, '%prog' will synchronize all projects listed in
the manifest.
The -d/--detach option can be used to switch specified projects
back to the manifest revision. This option is especially helpful
if the project is currently on a topic branch, but the manifest
revision is temporarily needed.
The -s/--smart-sync option can be used to sync to a known good
build as specified by the manifest-server element in the current
manifest. The -t/--smart-tag option is similar and allows you to
specify a custom tag/label.
The -u/--manifest-server-username and -p/--manifest-server-password
options can be used to specify a username and password to authenticate
with the manifest server when using the -s or -t option.
If -u and -p are not specified when using the -s or -t option, '%prog'
will attempt to read authentication credentials for the manifest server
from the user's .netrc file.
'%prog' will not use authentication credentials from -u/-p or .netrc
if the manifest server specified in the manifest file already includes
credentials.
The -f/--force-broken option can be used to proceed with syncing
other projects if a project sync fails.
The --no-clone-bundle option disables any attempt to use
$URL/clone.bundle to bootstrap a new Git repository from a
resumeable bundle file on a content delivery network. This
may be necessary if there are problems with the local Python
HTTP client or proxy configuration, but the Git binary works.
The --fetch-submodules option enables fetching Git submodules
of a project from server.
SSH Connections
---------------
If at least one project remote URL uses an SSH connection (ssh://,
git+ssh://, or user@host:path syntax) repo will automatically
enable the SSH ControlMaster option when connecting to that host.
This feature permits other projects in the same '%prog' session to
reuse the same SSH tunnel, saving connection setup overheads.
To disable this behavior on UNIX platforms, set the GIT_SSH
environment variable to 'ssh'. For example:
export GIT_SSH=ssh
%prog
Compatibility
~~~~~~~~~~~~~
This feature is automatically disabled on Windows, due to the lack
of UNIX domain socket support.
This feature is not compatible with url.insteadof rewrites in the
user's ~/.gitconfig. '%prog' is currently not able to perform the
rewrite early enough to establish the ControlMaster tunnel.
If the remote SSH daemon is Gerrit Code Review, version 2.0.10 or
later is required to fix a server side protocol bug.
"""
def _Options(self, p, show_smart=True):
try:
self.jobs = self.manifest.default.sync_j
except ManifestParseError:
self.jobs = 1
p.add_option('-f', '--force-broken',
dest='force_broken', action='store_true',
help="continue sync even if a project fails to sync")
p.add_option('-l', '--local-only',
dest='local_only', action='store_true',
help="only update working tree, don't fetch")
p.add_option('-n', '--network-only',
dest='network_only', action='store_true',
help="fetch only, don't update working tree")
p.add_option('-d', '--detach',
dest='detach_head', action='store_true',
help='detach projects back to manifest revision')
p.add_option('-c', '--current-branch',
dest='current_branch_only', action='store_true',
help='fetch only current branch from server')
p.add_option('-q', '--quiet',
dest='quiet', action='store_true',
help='be more quiet')
p.add_option('-j', '--jobs',
dest='jobs', action='store', type='int',
help="projects to fetch simultaneously (default %d)" % self.jobs)
p.add_option('-m', '--manifest-name',
dest='manifest_name',
help='temporary manifest to use for this sync', metavar='NAME.xml')
p.add_option('--no-clone-bundle',
dest='no_clone_bundle', action='store_true',
help='disable use of /clone.bundle on HTTP/HTTPS')
p.add_option('-u', '--manifest-server-username', action='store',
dest='manifest_server_username',
help='username to authenticate with the manifest server')
p.add_option('-p', '--manifest-server-password', action='store',
dest='manifest_server_password',
help='password to authenticate with the manifest server')
p.add_option('--fetch-submodules',
dest='fetch_submodules', action='store_true',
help='fetch submodules from server')
p.add_option('--no-tags',
dest='no_tags', action='store_true',
help="don't fetch tags")
if show_smart:
p.add_option('-s', '--smart-sync',
dest='smart_sync', action='store_true',
help='smart sync using manifest from a known good build')
p.add_option('-t', '--smart-tag',
dest='smart_tag', action='store',
help='smart sync using manifest from a known tag')
g = p.add_option_group('repo Version options')
g.add_option('--no-repo-verify',
dest='no_repo_verify', action='store_true',
help='do not verify repo source code')
g.add_option('--repo-upgraded',
dest='repo_upgraded', action='store_true',
help=SUPPRESS_HELP)
def _Fet |
reubano/tabutils | meza/io.py | Python | mit | 52,280 | 0.000134 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
meza.io
~~~~~~~
Provides methods for reading/writing/processing tabular formatted files
Examples:
basic usage::
>>> from meza.io import read_csv
>>>
>>> path = p.join(DATA_DIR, 'test.csv')
>>> csv_records = read_csv(path)
>>> csv_header = next(csv_records).keys()
>>> next(csv_records)['Some Value'] == '100'
True
"""
import itertools as it
import sys
import hashlib
import sqlite3
import json
import os
from os import path as p
from datetime import time
from mmap import mmap
from collections import deque
from subprocess import check_output, check_call, Popen, PIPE, CalledProcessError
from http import client
from csv import Error as csvError
from functools import partial
from codecs import iterdecode, iterencode, StreamReader
from itertools import zip_longest
import yaml
import xlrd
import pygogo as gogo
from bs4 import BeautifulSoup, FeatureNotFound
from ijson import items
from chardet.universaldetector import UniversalDetector
from xlrd import (
XL_CELL_DATE, XL_CELL_EMPTY, XL_CELL_NUMBER, XL_CELL_BOOLEAN,
XL_CELL_ERROR)
from xlrd.xldate import xldate_as_datetime as xl2dt
from io import StringIO, TextIOBase, BytesIO, open
from . import (
fntoo | ls as ft, process as pr, unicsv as csv, dbf, ENCODING, BOM, DATA_DIR)
# pylint: disable=C0103
logger = gogo.Gogo(__name__, monolog=True, verbose=True).logger
# pylint: disable=C0103
encode = lambda iterable: (s.encode(ENCODING) for s in iterable)
chain = lambda iterable: it.chain.from_iterable(iterable or [])
NEWLINES = {b'\n', b'\r', b'\r\n', '\n', '\r', '\r\n'}
def groupby_line(iterable):
return it.groupby(i | terable, lambda s: s not in NEWLINES)
class IterStringIO(TextIOBase):
"""A lazy StringIO that reads a generator of strings.
https://stackoverflow.com/a/32020108/408556
https://stackoverflow.com/a/20260030/408556
"""
# pylint: disable=super-init-not-called
def __init__(self, iterable=None, bufsize=4096, decode=False, **kwargs):
""" IterStringIO constructor
Args:
iterable (Seq[str]): Iterable of strings or bytes
bufsize (Int): Buffer size for seeking
decode (bool): Decode the text into a string (default: False)
Examples:
>>> StringIO(iter('Hello World')).read(5) # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError:...
>>> IterStringIO(iter('Hello World')).read(5)
b'Hello'
>>> i = IterStringIO(iter('one\\ntwo\\n'))
>>> list(next(i.lines)) == [b'o', b'n', b'e']
True
>>> decoded = IterStringIO(iter('Hello World'), decode=True)
>>> decoded.read(5) == 'Hello'
True
"""
iterable = iterable if iterable else []
chained = chain(iterable)
self.iter = encode(chained)
self.decode = decode
self.bufsize = bufsize
self.last = deque(bytearray(), self.bufsize)
self.pos = 0
def __next__(self):
return self._read(next(self.lines))
def __iter__(self):
return self
@property
def lines(self):
"""Read all the lines of content"""
# TODO: what about a csv with embedded newlines?
groups = groupby_line(self.iter)
return (g for k, g in groups if k)
def _read(self, iterable, num=None, newline=True):
"""Helper method used to read content"""
content = it.islice(iterable, num) if num else iterable
byte = ft.byte(content)
self.last.extend(byte)
self.pos += num or len(byte)
if newline:
self.last.append('\n')
return byte.decode(ENCODING) if self.decode else bytes(byte)
def write(self, iterable):
"""Write the content"""
chained = chain(iterable)
self.iter = it.chain(self.iter, encode(chained))
def read(self, num=None):
"""Read the content"""
return self._read(self.iter, num, False)
def readline(self, num=None):
"""Read a line of content"""
return self._read(next(self.lines), num)
def readlines(self):
"""Read all the lines of content"""
return map(self._read, self.lines)
def seek(self, num):
"""Go to a specific position within a file"""
next_pos = self.pos + 1
beg_buf = max([0, self.pos - self.bufsize])
if num <= beg_buf:
self.iter = it.chain(self.last, self.iter)
self.last = deque([], self.bufsize)
elif self.pos > num > beg_buf:
extend = [self.last.pop() for _ in range(self.pos - num)]
self.iter = it.chain(reversed(extend), self.iter)
elif num == self.pos:
pass
elif num == next_pos:
self.last.append(next(self.iter))
elif num > next_pos:
pos = num - self.pos
[self.last.append(x) for x in it.islice(self.iter, 0, pos)]
self.pos = beg_buf if num < beg_buf else num
def tell(self):
"""Get the current position within a file"""
return self.pos
class Reencoder(StreamReader):
"""Recodes a file like object from one encoding to another.
"""
def __init__(self, f, fromenc=ENCODING, toenc=ENCODING, **kwargs):
""" Reencoder constructor
Args:
f (obj): File-like object
fromenc (str): The input encoding.
toenc (str): The output encoding.
Kwargs:
remove_BOM (bool): Remove Byte Order Marker (default: True)
decode (bool): Decode the text into a string (default: False)
Examples:
>>> encoding = 'utf-16-be'
>>> eff = p.join(DATA_DIR, 'utf16_big.csv')
>>>
>>> with open(eff, 'rb') as f:
... reenc = Reencoder(f, encoding)
... first = reenc.readline(keepends=False)
... first.decode('utf-8') == '\ufeffa,b,c'
... reenc.readlines()[1].decode('utf-8') == '4,5,ʤ'
True
True
>>> with open(eff, 'rb') as f:
... reenc = Reencoder(f, encoding, decode=True)
... reenc.readline(keepends=False) == '\ufeffa,b,c'
True
>>> with open(eff, 'rU', encoding=encoding) as f:
... reenc = Reencoder(f, remove_BOM=True)
... reenc.readline(keepends=False) == b'a,b,c'
... reenc.readline() == b'1,2,3\\n'
... reenc.readline().decode('utf-8') == '4,5,ʤ'
True
True
True
"""
self.fileno = f.fileno
first_line = next(f)
bytes_mode = isinstance(first_line, bytes)
decode = kwargs.get('decode')
rencode = not decode
if kwargs.get('remove_BOM'):
strip = BOM.encode(fromenc) if bytes_mode else BOM
first_line = first_line.lstrip(strip)
chained = it.chain([first_line], f)
if bytes_mode:
decoded = iterdecode(chained, fromenc)
self.binary = rencode
proper_newline = first_line.endswith(os.linesep.encode(fromenc))
else:
decoded = chained
self.binary = bytes_mode or rencode
proper_newline = first_line.endswith(os.linesep)
stream = iterencode(decoded, toenc) if rencode else decoded
if proper_newline:
self.stream = stream
else:
# TODO: make sure the read methods are consistent with
# proper_newline, e.g., `keepends`.
#
# TODO: since the newline isn't recognized, `stream` is contains
# just one (very long) line. we pass in this line to iterate
# over the chars
groups = groupby_line(next(stream))
if self.binary:
self.stream = (b''.join(g) for k, g in groups if k)
else:
self.stream = (''.join(g) for k, g in groups if k)
def __next__(self):
ret |
tavaresdong/courses | ucb_cs61A/lab/lab10/reader.py | Python | mit | 3,571 | 0.00308 | import string
from buffer import Buffer
from expr import *
SYMBOL_STARTS = set(string.ascii_lowercase + string.ascii_uppercase + '_')
SYMBOL_INNERS = SYMBOL_STARTS | set(string.digits)
NUMERAL = set(string.digits + '-.')
WHITESPACE = set(' \t\n\r')
DELIMITERS = set('(),:')
def tokenize(s): |
"""Splits the string s into tokens and returns a list of them.
>>> tokenize('lambda f: f(0, 4.2)')
['lambda', 'f', ':', 'f', '(', 0, ',', 4.2, ')']
"""
src = Buffer(s)
tokens = []
while True:
token = next_token(src)
if token is None:
return tokens
tokens.append(token)
def take(src, allowed_characters):
result = ''
while src.curr | ent() in allowed_characters:
result += src.pop()
return result
def next_token(src):
take(src, WHITESPACE) # skip whitespace
c = src.current()
if c is None:
return None
elif c in NUMERAL:
literal = take(src, NUMERAL)
try:
return int(literal)
except ValueError:
try:
return float(literal)
except ValueError:
raise SyntaxError("'{}' is not a numeral".format(literal))
elif c in SYMBOL_STARTS:
return take(src, SYMBOL_INNERS)
elif c in DELIMITERS:
src.pop()
return c
else:
raise SyntaxError("'{}' is not a token".format(c))
def is_literal(s):
return isinstance(s, int) or isinstance(s, float)
def is_name(s):
return isinstance(s, str) and s not in DELIMITERS and s != 'lambda'
def read(s):
"""Parse an expression from a string. If the string does not contain an
expression, None is returned. If the string cannot be parsed, a SyntaxError
is raised.
>>> read('lambda f: f(0)')
LambdaExpr(['f'], CallExpr(Name('f'), [Literal(0)]))
>>> read('(lambda x: x)(5)')
CallExpr(LambdaExpr(['x'], Name('x')), [Literal(5)])
>>> read('(lambda: 5)()')
CallExpr(LambdaExpr([], Literal(5)), [])
>>> read('lambda x y: 10')
Traceback (most recent call last):
...
SyntaxError: expected ':' but got 'y'
>>> read(' ') # returns None
"""
src = Buffer(tokenize(s))
if src.current() is not None:
return read_expr(src)
def read_expr(src):
token = src.pop()
if token is None:
raise SyntaxError('Incomplete expression')
elif is_literal(token):
return read_call_expr(src, Literal(token))
elif is_name(token):
return read_call_expr(src, Name(token))
elif token == 'lambda':
params = read_comma_separated(src, read_param)
src.expect(':')
body = read_expr(src)
return LambdaExpr(params, body)
elif token == '(':
inner_expr = read_expr(src)
src.expect(')')
return read_call_expr(src, inner_expr)
else:
raise SyntaxError("'{}' is not the start of an expression".format(token))
def read_comma_separated(src, reader):
if src.current() in (':', ')'):
return []
else:
s = [reader(src)]
while src.current() == ',':
src.pop()
s.append(reader(src))
return s
def read_call_expr(src, operator):
while src.current() == '(':
src.pop()
operands = read_comma_separated(src, read_expr)
src.expect(')')
operator = CallExpr(operator, operands)
return operator
def read_param(src):
token = src.pop()
if is_name(token):
return token
else:
raise SyntaxError("Expected parameter name but got '{}'".format(token))
|
alexholcombe/dot-jump | dataRaw/Fixed Cue/test_dot-jump25Oct2016_10-53.py | Python | gpl-3.0 | 25,090 | 0.019131 | from __future__ import print_function
__author__ = """Alex "O." Holcombe, Charles Ludowici, """ ## double-quotes will be silently removed, single quotes will be left, eg, O'Connor
import time, sys, platform, os
from math import atan, atan2, pi, cos, sin, sqrt, ceil, radians, degrees
import numpy as np
import psychopy, psychopy.info
import copy
from psychopy import visual, sound, monitors, logging, gui, event, core, data
try:
from helpersAOH import accelerateComputer, openMyStimWindow
except Exception as e:
print(e); print('Problem loading helpersAOH. Check that the file helpersAOH.py in the same directory as this file')
print('Current directory is ',os.getcwd())
eyeTracking = False
if eyeTracking:
try:
import eyelinkEyetrackerForPsychopySUPA3
except Exception as e:
print(e)
print('Problem loading eyelinkEyetrackerForPsychopySUPA3. Check that the file eyelinkEyetrackerForPsychopySUPA3.py in the same directory as this file')
print('While a different version of pylink might make your eyetracking code work, your code appears to generally be out of date. Rewrite your eyetracker code based on the SR website examples')
#Psychopy v1.83.01 broke this, pylink version prevents EyelinkEyetrackerForPsychopySUPA3 stuff from importing. But what really needs to be done is to change eyetracking code to more modern calls, as indicated on SR site
eyeTracking = False
expname= "dot-jump"
demo = False; exportImages = False
autopilot = False
subject='test'
###############################
### Setup the screen parameters ##############################################################################################
##
allowGUI = False
units='deg' #'cm'
fullscrn=False
waitBlank=False
if True: #just so I can indent all the below
refreshRate= 85 *1.0; #160 #set to the framerate of the monitor
fullscrn=True; #show in small window (0) or full screen (1)
scrn=True #which screen to display the stimuli. 0 is home screen, 1 is second screen
# create a dialog from dictionary
infoFirst = { 'Autopilot':autopilot, 'Check refresh etc':True, 'Use second screen':scrn, 'Fullscreen (timing errors if not)': fullscrn, 'Screen refresh rate': refreshRate }
OK = gui.DlgFromDict(dictionary=infoFirst,
title='MOT',
order=['Autopilot','Check refresh etc', 'Use second screen', 'Screen refresh rate', 'Fullscreen (timing errors if not)'],
tip={'Check refresh etc': 'To confirm refresh rate and that can keep up, at least when drawing a grating',
'Use second Screen': ''},
)
if not OK.OK:
print('User cancelled from dialog box'); logging.info('User cancelled from dialog box'); core.quit()
autopilot = infoFirst['Autopilot']
checkRefreshEtc = infoFirst['Check refresh etc']
scrn = infoFirst['Use second screen']
print('scrn = ',scrn, ' from dialog box')
fullscrn = infoFirst['Fullscreen (timing errors if not)']
refresh | Rate = infoFirst['Screen refresh rate']
#monitor parameters
widthPix = 1280 #1440 #monitor width in pixels
heightPix =1024 #900 #monitor height in pixels
monitorwidth = 40.5 #28.5 #monitor width in centimeters
viewdist = 55.; #cm
pixelperdegree = widthPix/ (atan(monitorwidth/viewdist) /np.pi*180)
bgColor = | [-1,-1,-1] #black background
monitorname = 'testMonitor' # 'mitsubishi' #in psychopy Monitors Center
mon = monitors.Monitor(monitorname,width=monitorwidth, distance=viewdist)#fetch the most recent calib for this monitor
mon.setSizePix( (widthPix,heightPix) )
myWin = openMyStimWindow(mon,widthPix,heightPix,bgColor,allowGUI,units,fullscrn,scrn,waitBlank)
myWin.setRecordFrameIntervals(False)
trialsPerCondition = 2 #default value
refreshMsg2 = ''
if not checkRefreshEtc:
refreshMsg1 = 'REFRESH RATE WAS NOT CHECKED'
refreshRateWrong = False
else: #checkRefreshEtc
runInfo = psychopy.info.RunTimeInfo(
win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips()
refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen)
verbose=True, ## True means report on everything
userProcsDetailed=True ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes
)
print('Finished runInfo- which assesses the refresh and processes of this computer')
refreshMsg1 = 'Median frames per second ='+ str( np.round(1000./runInfo["windowRefreshTimeMedian_ms"],1) )
refreshRateTolerancePct = 3
pctOff = abs( (1000./runInfo["windowRefreshTimeMedian_ms"]-refreshRate) / refreshRate)
refreshRateWrong = pctOff > (refreshRateTolerancePct/100.)
if refreshRateWrong:
refreshMsg1 += ' BUT'
refreshMsg1 += ' program assumes ' + str(refreshRate)
refreshMsg2 = 'which is off by more than' + str(round(refreshRateTolerancePct,0)) + '%!!'
else:
refreshMsg1 += ', which is close enough to desired val of ' + str( round(refreshRate,1) )
myWinRes = myWin.size
myWin.allowGUI =True
myWin.close() #have to close window to show dialog box
##
### END Setup of the screen parameters ##############################################################################################
####################################
askUserAndConfirmExpParams = True
if autopilot:
subject = 'autoTest'
###############################
### Ask user exp params ##############################################################################################
## askUserAndConfirmExpParams
if askUserAndConfirmExpParams:
dlgLabelsOrdered = list() #new dialog box
myDlg = gui.Dlg(title=expname, pos=(200,400))
if not autopilot:
myDlg.addField('Subject code :', subject)
dlgLabelsOrdered.append('subject')
else:
myDlg.addField('Subject code :', subject)
dlgLabelsOrdered.append('subject')
myDlg.addField('autoPilotTime:', 0, tip='Auto response time relative to cue')
myDlg.addField('randomTime:',False, tip = 'Add (rounded) gaussian N(0,2) error to time offset?')
myDlg.addField('autoPilotSpace:',0, tip='Auto response position relative to cue')
myDlg.addField('randomSpace:',False, tip = 'Add (rounded) gaussian N(0,2) error to space offset?')
dlgLabelsOrdered.append('autoPilotTime')
dlgLabelsOrdered.append('randomTime')
dlgLabelsOrdered.append('autoPilotSpace')
dlgLabelsOrdered.append('randomSpace')
myDlg.addField('Trials per condition (default=' + str(trialsPerCondition) + '):', trialsPerCondition, tip=str(trialsPerCondition))
dlgLabelsOrdered.append('trialsPerCondition')
pctCompletedBreak = 50
myDlg.addText(refreshMsg1, color='Black')
if refreshRateWrong:
myDlg.addText(refreshMsg2, color='Red')
msgWrongResolution = ''
if checkRefreshEtc and (not demo) and (myWinRes != [widthPix,heightPix]).any():
msgWrongResolution = 'Instead of desired resolution of '+ str(widthPix)+'x'+str(heightPix)+ ' pixels, screen apparently '+ str(myWinRes[0])+ 'x'+ str(myWinRes[1])
myDlg.addText(msgWrongResolution, color='Red')
print(msgWrongResolution); logging.info(msgWrongResolution)
myDlg.addText('Note: to abort press ESC at response time', color='DimGrey') #works in PsychoPy1.84
#myDlg.addText('Note: to abort press ESC at a trials response screen', color=[-1.,1.,-1.]) #color names not working for some pre-1.84 versions
myDlg.show()
if myDlg.OK: #unpack information from dialogue box
thisInfo = myDlg.data #this will be a list of data returned from each field added in order
if autopilot:
name=thisInfo[dlgLabelsOrdered.index('subject')]
if len(name) > 0: #if entere |
Distrotech/samba | wintest/test-s4-howto.py | Python | gpl-3.0 | 29,309 | 0.005357 | #!/usr/bin/env python
'''automated testing of the steps of the Samba4 HOWTO'''
import sys, os
import wintest, pexpect, time, subprocess
def set_krb5_conf(t):
t.putenv("KRB5_CONFIG", '${PREFIX}/private/krb5.conf')
def build_s4(t):
'''build samba4'''
t.info('Building s4')
t.chdir('${SOURCETREE}')
t.putenv('CC', 'ccache gcc')
t.run_cmd('make reconfigure || ./configure --enable-auto-reconfigure --enable-developer --prefix=${PREFIX} -C')
t.run_cmd('make -j')
t.run_cmd('rm -rf ${PREFIX}')
t.run_cmd('make -j install')
def provision_s4(t, func_level="2008"):
'''provision s4 as a DC'''
t.info('Provisioning s4')
t.chdir('${PREFIX}')
t.del_files(["var", "private"])
t.run_cmd("rm -f etc/smb.conf")
provision=['sbin/provision',
'--realm=${LCREALM}',
'--domain=${DOMAIN}',
'--adminpass=${PASSWORD1}',
'--server-role=domain controller',
'--function-level=%s' % func_level,
'-d${DEBUGLEVEL}',
'--option=interfaces=${INTERFACE}',
'--host-ip=${INTERFACE_IP}',
'--option=bind interfaces only=yes',
'--option=rndc command=${RNDC} -c${PREFIX}/etc/rndc.conf']
if t.getvar('INTERFACE_IPV6'):
provision.append('--host-ip6=${INTERFACE_IPV6}')
t.run_cmd(provision)
t.run_cmd('bin/samba-tool newuser testallowed ${PASSWORD1}')
t.run_cmd('bin/samba-tool newuser testdenied ${PASSWORD1}')
t.run_cmd('bin/samba-tool group addmembers "Allowed RODC Password Replication Group" testallowed')
def start_s4(t):
'''startup samba4'''
t.info('Starting Samba4')
t.chdir("${PREFIX}")
t.run_cmd('killall -9 -q samba smbd nmbd winbindd', checkfail=False)
t.run_cmd(['sbin/samba',
'--option', 'panic action=gnome-terminal -e "gdb --pid %PID%"'])
t.port_wait("${INTERFACE_IP}", 139)
def test_smbclient(t):
'''test smbclient against localhost'''
t.info('Testing smbclient')
t.chdir('${PREFIX}')
t.cmd_contains("bin/smbclient --version", ["Version 4.0"])
t.retry_cmd('bin/smbclient -L ${INTERFACE_IP} -U%', ["netlogon", "sysvol", "IPC Service"])
child = t.pexpect_spawn('bin/smbclient //${INTERFACE_IP}/netlogon -Uadministrator%${PASSWORD1}')
child.expect("smb:")
child.sendline("dir")
child.expect("blocks available")
child.sendline("mkdir testdir")
child.expect("smb:")
child.sendline("cd testdir")
child.expect('testdir')
child.sendline("cd ..")
child.sendline("rmdir testdir")
def create_shares(t):
'''create some test shares'''
t.info("Adding test shares")
t.chdir('${PREFIX}')
t.write_file("etc/smb.conf", '''
[test]
path = ${PREFIX}/test
read only = no
[profiles]
path = ${PREFIX}/var/profiles
read only = no
''',
mode='a')
t.run_cmd("mkdir -p test")
t.run_cmd("mkdir -p var/profiles")
def test_dns(t):
'''test that DNS is OK'''
t.info("Testing DNS")
t.cmd_contains("host -t SRV _ldap._tcp.${LCREALM}.",
['_ldap._tcp.${LCREALM} has SRV record 0 100 389 ${HOSTNAME}.${LCREALM}'])
t.cmd_contains("host -t SRV _kerberos._udp.${LCREALM}.",
['_kerberos._udp.${LCREALM} has SRV record 0 100 88 ${HOSTNAME}.${LCREALM}'])
t.cmd_contains("host -t A ${HOSTNAME}.${LCREALM}",
['${HOSTNAME}.${LCREALM} has address'])
def test_kerberos(t):
'''test that kerberos is OK'''
t.info("Testing kerberos")
t.run_cmd("kdestroy")
t.kinit("administrator@${REALM}", "${PASSWORD1}")
# this copes with the differences between MIT and Heimdal klist
t.cmd_contains("klist", ["rincipal", "administrator@${REALM}"])
def test_dyndns(t):
'''test that dynamic DNS is working'''
t.chdir('${PREFIX}')
t.run_cmd("sbin/samba_dnsupdate --fail-immediately")
t.rndc_cmd("flush")
def run_winjoin(t, vm):
'''join a windows box to our domain'''
t.setwinvars(vm)
t.run_winjoin(t, "${LCREALM}")
def test_winjoin(t, vm):
t.info("Checking the windows join is OK")
t.chdir('${PREFIX}')
t.port_wait("${WIN_IP}", 139)
t.retry_cmd('bin/smbclient -L ${WIN_HOSTNAME}.${LCREALM} -Uadministrator@${LCREALM}%${PASSWORD1}', ["C$", "IPC$", "Sharename"], retries=100)
t.cmd_contains("host -t A ${WIN_HOSTNAME}.${LCREALM}.", ['has address'])
t.cmd_contains('bin/smbclient -L ${WIN_HOSTNAME}.${LCREALM} -Utestallowed@${LCREALM}%${PASSWORD1}', ["C$", "IPC$", "Shar | ename"])
t.cmd_co | ntains('bin/smbclient -L ${WIN_HOSTNAME}.${LCREALM} -k no -Utestallowed@${LCREALM}%${PASSWORD1}', ["C$", "IPC$", "Sharename"])
t.cmd_contains('bin/smbclient -L ${WIN_HOSTNAME}.${LCREALM} -k yes -Utestallowed@${LCREALM}%${PASSWORD1}', ["C$", "IPC$", "Sharename"])
child = t.open_telnet("${WIN_HOSTNAME}", "${DOMAIN}\\administrator", "${PASSWORD1}")
child.sendline("net use t: \\\\${HOSTNAME}.${LCREALM}\\test")
child.expect("The command completed successfully")
def run_dcpromo(t, vm):
'''run a dcpromo on windows'''
t.setwinvars(vm)
t.info("Joining a windows VM ${WIN_VM} to the domain as a DC using dcpromo")
child = t.open_telnet("${WIN_HOSTNAME}", "administrator", "${WIN_PASS}", set_ip=True, set_noexpire=True)
child.sendline("copy /Y con answers.txt")
child.sendline('''
[DCINSTALL]
RebootOnSuccess=Yes
RebootOnCompletion=Yes
ReplicaOrNewDomain=Replica
ReplicaDomainDNSName=${LCREALM}
SiteName=Default-First-Site-Name
InstallDNS=No
ConfirmGc=Yes
CreateDNSDelegation=No
UserDomain=${LCREALM}
UserName=${LCREALM}\\administrator
Password=${PASSWORD1}
DatabasePath="C:\Windows\NTDS"
LogPath="C:\Windows\NTDS"
SYSVOLPath="C:\Windows\SYSVOL"
SafeModeAdminPassword=${PASSWORD1}
''')
child.expect("copied.")
child.expect("C:")
child.expect("C:")
child.sendline("dcpromo /answer:answers.txt")
i = child.expect(["You must restart this computer", "failed", "Active Directory Domain Services was not installed", "C:"], timeout=120)
if i == 1 or i == 2:
child.sendline("echo off")
child.sendline("echo START DCPROMO log")
child.sendline("more c:\windows\debug\dcpromoui.log")
child.sendline("echo END DCPROMO log")
child.expect("END DCPROMO")
raise Exception("dcpromo failed")
t.wait_reboot()
def test_dcpromo(t, vm):
'''test that dcpromo worked'''
t.info("Checking the dcpromo join is OK")
t.chdir('${PREFIX}')
t.port_wait("${WIN_IP}", 139)
t.retry_cmd("host -t A ${WIN_HOSTNAME}.${LCREALM}. ${INTERFACE_IP}",
['${WIN_HOSTNAME}.${LCREALM} has address'],
retries=30, delay=10, casefold=True)
t.retry_cmd('bin/smbclient -L ${WIN_HOSTNAME}.${LCREALM} -Uadministrator@${LCREALM}%${PASSWORD1}', ["C$", "IPC$", "Sharename"])
t.cmd_contains("host -t A ${WIN_HOSTNAME}.${LCREALM}.", ['has address'])
t.cmd_contains('bin/smbclient -L ${WIN_HOSTNAME}.${LCREALM} -Utestallowed@${LCREALM}%${PASSWORD1}', ["C$", "IPC$", "Sharename"])
t.cmd_contains("bin/samba-tool drs kcc ${HOSTNAME}.${LCREALM} -Uadministrator@${LCREALM}%${PASSWORD1}", ['Consistency check', 'successful'])
t.retry_cmd("bin/samba-tool drs kcc ${WIN_HOSTNAME}.${LCREALM} -Uadministrator@${LCREALM}%${PASSWORD1}", ['Consistency check', 'successful'])
t.kinit("administrator@${REALM}", "${PASSWORD1}")
# the first replication will transfer the dnsHostname attribute
t.cmd_contains("bin/samba-tool drs replicate ${HOSTNAME}.${LCREALM} ${WIN_HOSTNAME} CN=Configuration,${BASEDN} -k yes", ["was successful"])
for nc in [ '${BASEDN}', 'CN=Configuration,${BASEDN}', 'CN=Schema,CN=Configuration,${BASEDN}' ]:
t.cmd_contains("bin/samba-tool drs replicate ${HOSTNAME}.${LCREALM} ${WIN_HOSTNAME}.${LCREALM} %s -k yes" % nc, ["was successful"])
t.cmd_contains("bin/samba-tool drs replicate ${WIN_HOSTNAME}.${LCREALM} ${HOSTNAME}.${LCREALM} %s -k yes" % nc, ["was successful"])
t.cmd_contains("bin/samba-tool drs showrepl ${HOSTNAME}.${LCREALM} -k yes",
[ "INBOUND NEIGHBORS",
"${BASEDN}",
|
enormandeau/Scripts | fastq_split.py | Python | gpl-3.0 | 2,096 | 0.00334 | #!/usr/bin/env python2
"""Split a fastq file in n files of approximately the same number of sequences
Usage:
python fastq_remove.py input_file num_seq_per_file
input_file = input Fastq file
num_seq_per_file = number of sequences to put in each output files
"""
# Importing modules
import sys
# Defining classes
class Fastq(object):
"""Fastq object with name and sequence
"""
def __init__(self, name, seq, name2, qual):
self.name = name
self.seq = seq
self.name2 = name2
self.qual = qual
def write_to_file(self, handle):
handle.write("@" + self.name + "\n")
handle.write(self.seq + "\n")
handle.write("+" + self.name2 + "\n")
handle.write(self.qual + "\n")
# Defining functions
def fastq_parser(input_file):
"""Takes a fastq file infile and returns a fastq object iterator
"""
with open(input_file) as f:
while True:
name = f.readline().strip()[1:]
if not name:
break
seq = f.readline().strip()
name2 = f.readline().strip()[1:]
qual = f.readline().strip()
yield Fastq(name, seq, name2, qual)
# Main
if __name__ == '__main__':
try:
input_file = sys.argv[1]
num_seq_per_file = int(sys.argv[2])
except:
print __doc__
exit(1)
# Test input parameters
assert num_seq_per_file >= 1, "num_seq_per_file must be a non-zero positive integer"
# Write sequences to ouptut files
sequences = fastq_parser(input_file)
output_file_number = 1
seq_num = 0
output_file_name = input_file + "_split_" + str(output_file_number)
# TODO open first file
output_file = open(outp | ut_file_name, "w")
for s in sequences:
seq_num += 1
if seq_num > num_seq_per_file:
output_file_number += 1
seq_num = 1
output_file.close()
| output_file_name = input_file + "_split_" + str(output_file_number)
output_file = open(output_file_name, "w")
s.write_to_file(output_file)
|
hagabbar/pycbc_copy | pycbc/workflow/configuration.py | Python | gpl-3.0 | 38,216 | 0.004344 | # Copyright (C) 2013,2017 Ian Harry, Duncan Brown
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# =============================================================================
#
# Preamble
#
# =============================================================================
#
"""
This module provides a wrapper to the ConfigParser utilities for pycbc
workflow construction. This module is described in the page here:
https://ldas-jobs.ligo.caltech.edu/~cbc/docs/pycbc/ahope/initialization_inifile.html
"""
import os
import re
import stat
import shutil
import time
import logging
import urlparse
import cookielib
import requests
import distutils.spawn
import ConfigParser
import itertools
import pycbc_glue.pipeline
from cookielib import (_warn_unhandled_exception, LoadError, Cookie)
from bs4 import BeautifulSoup
def _really_load(self, f, filename, ignore_discard, ignore_expires):
"""
This function is required to monkey patch MozillaCookieJar's _really_load
function which does not understand the curl format cookie file created
by ecp-cookie-init. It patches the code so that #HttpOnly_ get loaded.
https://bugs.python.org/issue2190
https://bugs.python.org/file37625/httponly.patch
"""
now = time.time()
magic = f.readline()
if not re.search(self.magic_re, magic):
f.close()
raise LoadError(
"%r does not look like a Netscape format cookies file" %
filename)
try:
while 1:
line = f.readline()
if line == "": break
# last field may be absent, so keep any trailing tab
if line.endswith("\n"): line = line[:-1]
sline = line.strip()
# support HttpOnly cookies (as stored by curl or old Firefox).
if sline.startswith("#HttpOnly_"):
line = sline[10:]
# skip comments and blank lines XXX what is $ for?
elif (sline.startswith(("#", "$")) or sline == ""):
continue
domain, domain_specified, path, secure, expires, name, value = \
line.split("\t")
secure = (secure == "TRUE")
domain_specified = (domain_specified == "TRUE")
if name == "":
# cookies.txt regards 'Set-Cookie: foo' as a cookie
# with no name, whereas cookielib regards it as a
# cookie with no value.
name = value
value = None
initial_dot = domain.startswith(".")
assert domain_specified == initial_dot
discard = False
if expires == "":
expires = None
discard = True
# assume path_specified is false
c = Cookie(0, name, value,
None, False,
domain, domain_specified, initial_dot,
path, False,
secure,
expires,
discard,
None,
None,
{})
if not ignore_discard and c.discard:
continue
if not ignore_expires and c.is_expired(now):
continue
self.set_cookie(c)
except IOError:
raise
except Exception:
_warn_unhandled_exception()
raise LoadError("invalid Netscape format cookies file %r: %r" %
(filename, line))
# Now monkey patch the code
cookielib.MozillaCookieJar._really_load = _really_load # noqa
ecp_cookie_error = """The attempt to download the file at
{}
was redirected to the git.ligo.org sign-in page. This means that you likely
forgot to initialize your ECP cookie or that your LIGO.ORG credentials are
otherwise invalid. Create a valid ECP cooki | e for git.ligo.org by running
ecp-cookie-init LIGO.ORG https://git.ligo.org/users/auth/shibboleth/callback albert.einstein
before attempting to download files from git.ligo.org.
"""
def resolve_url(url, directory=None, permissions=None):
"""
Resolves a URL to a local file, and returns the path to
that file.
"""
u = urlparse.urlparse(url)
# create the na | me of the destination file
if directory is None:
directory = os.getcwd()
filename = os.path.join(directory,os.path.basename(u.path))
if u.scheme == '' or u.scheme == 'file':
# for regular files, make a direct copy
if os.path.isfile(u.path):
if os.path.isfile(filename):
# check to see if src and dest are the same file
src_inode = os.stat(u.path)[stat.ST_INO]
dst_inode = os.stat(filename)[stat.ST_INO]
if src_inode != dst_inode:
shutil.copy(u.path, filename)
else:
shutil.copy(u.path, filename)
else:
errmsg = "Cannot open file %s from URL %s" % (u.path, url)
raise ValueError(errmsg)
elif u.scheme == 'http' or u.scheme == 'https':
s = requests.Session()
s.mount(str(u.scheme)+'://',
requests.adapters.HTTPAdapter(max_retries=5))
# look for an ecp cookie file and load the cookies
cookie_dict = {}
ecp_file = '/tmp/ecpcookie.u%d' % os.getuid()
if os.path.isfile(ecp_file):
cj = cookielib.MozillaCookieJar()
cj.load(ecp_file, ignore_discard=True, ignore_expires=True)
else:
cj = []
for c in cj:
if c.domain == u.netloc:
# load cookies for this server
cookie_dict[c.name] = c.value
elif u.netloc == "code.pycbc.phy.syr.edu" and \
c.domain == "git.ligo.org":
# handle the redirect for code.pycbc to git.ligo.org
cookie_dict[c.name] = c.value
r = s.get(url, cookies=cookie_dict, allow_redirects=True)
if r.status_code != 200:
errmsg = "Unable to download %s\nError code = %d" % (url,
r.status_code)
raise ValueError(errmsg)
# if we are downloading from git.ligo.org, check that we
# did not get redirected to the sign-in page
if u.netloc == 'git.ligo.org' or u.netloc == 'code.pycbc.phy.syr.edu':
soup = BeautifulSoup(r.content, 'html.parser')
desc = soup.findAll(attrs={"property":"og:url"})
if len(desc) and \
desc[0]['content'] == 'https://git.ligo.org/users/sign_in':
raise ValueError(ecp_cookie_error.format(url))
output_fp = open(filename, 'w')
output_fp.write(r.content)
output_fp.close()
else:
# TODO: We could support other schemes such as gsiftp by
# calling out to globus-url-copy
errmsg = "Unknown URL scheme: %s\n" % (u.scheme)
errmsg += "Currently supported are: file, http, and https."
raise ValueError(errmsg)
if not os.path.isfile(filename):
errmsg = "Error trying to create file %s from %s" % (filename,url)
raise ValueError(errmsg)
if permissions:
if os.access(filename, os.W_OK):
os.chmod(filename, permissions)
else:
# check that the file has at least the permissions requested
s = os.stat(filename)[stat.ST_MODE]
if (s & permissions) != permissions:
errmsg = "Could not change permissions on %s (re |
googleinterns/data-dependency-graph-analysis | deprecated/process_config.py | Python | apache-2.0 | 4,187 | 0.005254 | """Graph Generation Config Reader
This script reads the config from full_config.yaml file and processes it to be usable.
It saves config to variables, and converts map strings to python dictionaries.
"""
import yaml
from graph_generation.connection_generator import ConnectionGenerator
from graph_generation.config_params.collection_params import CollectionParams
from graph_generation.config_params.data_integrity_params import DataIntegrityParams
from graph_generation.config_params.dataset_params import DatasetParams
from graph_generation.config_params.dataset_to_system_params import DatasetToSystemParams
from graph_generation.config_params.processing_params import ProcessingParams
from graph_generation.config_params.system_params import SystemParams
def process_map(config_map, proba=False, enum=False):
"""Converts string map in format [key1:value1 key2:value2] into a dictionary.
key - represents a count of connections (integer) or enums (string).
value - represents a count of occurrences (integer) or probability (float).
Args:
config_map: string that contains the map
proba: boolean, true if values are probabilities, not counts.
enum: boolean, true if keys are enums, not integers.
Returns:
A dictionary of keys and values in converted format.
"""
processed_map = {j[0]: j[1] for j in [i.split(":") for i in config_map.strip("[]").split()]}
keys = [int(k) if not enum else k for k in processed_map.keys()]
values = [int(v) if not proba else float(v) for v in processed_map. | values()]
| return {keys[i]: values[i] for i in range(len(keys))}
if __name__ == '__main__':
# Read config file
with open('configs/full_config.yaml', 'r') as f:
config = yaml.load(f, Loader=yaml.FullLoader)
dataset_params = DatasetParams(
dataset_count=config["dataset"]["dataset_count"],
dataset_env_count_map=process_map(config["dataset"]["dataset_env_count_map"], enum=True),
dataset_slo_range=config["dataset"]["dataset_slo_range_seconds"]
)
system_params = SystemParams(
system_count=config["system"]["system_count"],
system_env_count_map=process_map(config["system"]["system_env_count_map"], enum=True),
system_criticality_proba_map=process_map(config["system"]["system_criticality_proba_map"], enum=True,
proba=True)
)
dataset_to_system_params = DatasetToSystemParams(
dataset_read_count_map=process_map(config["dataset"]["dataset_read_count_map"]),
dataset_write_count_map=process_map(config["dataset"]["dataset_write_count_map"]),
system_input_count_map=process_map(config["system"]["system_inputs_count_map"]),
system_output_count_map=process_map(config["system"]["system_outputs_count_map"])
)
collection_params = CollectionParams(
dataset_count_map=process_map(config["dataset_collection"]["dataset_count_map"]),
system_count_map=process_map(config["system_collection"]["system_count_map"])
)
processing_params = ProcessingParams(
dataset_criticality_proba_map=process_map(config["data_processing"]["dataset_criticality_proba_map"],
proba=True, enum=True),
dataset_impact_proba_map=process_map(config["data_processing"]["dataset_impact_proba_map"], proba=True,
enum=True)
)
data_integrity_params = DataIntegrityParams(
data_restoration_range_seconds=config["data_integrity"]["restoration_range_seconds"],
data_regeneration_range_seconds=config["data_integrity"]["regeneration_range_seconds"],
data_reconstruction_range_seconds=config["data_integrity"]["reconstruction_range_seconds"],
data_volatility_proba_map=process_map(config["data_integrity"]["volatality_proba_map"], proba=True)
)
graph_connections = ConnectionGenerator(
dataset_params=dataset_params,
system_params=system_params,
dataset_to_system_params=dataset_to_system_params,
collection_params=collection_params
)
graph_connections.generate()
|
peterstace/project-euler | OLD_PY_CODE/project_euler_old_old/Resources/graph_theory.py | Python | unlicense | 3,807 | 0.00394 | """This module provides graph theory functionality."""
from heapq import heapify, heappop, heappush
def dijkstra(nodes, edges, startNode, directed):
"""Finds the length between each node in the graph and the startNode.
Arguments:
nodes - the set of nodes in the graph.
edges - the set of edges in the graph. Each edge should be a 3-tuple
containing the source and destination nodes, as well as the
(non-negative) weight of that edge (in that order).
startNode - the starting node for the search.
directed - boolean, should the graph be treated as directed,
or instead indirected?
Returns: a dict whose keys are nodes and whose values are the smallest cost
to get from startNode to that particular node.
E.g.
# example: from wikipedia
nodes = ["1", "2", "3", "4", "5", "6"]
edges = [("1", "2", 7), ("1", "3", 9), ("1", "6", 14), ("2", "3", 10),
("2", "4", 15), ("3", "4", 11), ("3", "6", 2), ("4", "5", 6),
("5", "6", 9)]
d = dijkstra(set(nodes), set(edges), "1", True)
print d
"""
#construct a dict holding each nodes' neighbours and the cost to them
neighbours = dict([(node, []) for node in nodes])
for edge in edges:
neighbours[edge[0]].append((edge[1], edge[2]))
if not directed:
neighbours[edge[1]].append((edge[0], edge[2]))
#to every node assign a distance (starting with +inf and zero for startNode)
di | stance = dict([(node, float("inf")) for node in nodes])
distance[startNode] = 0
#mark every node as unvisited
visited = dict([(node, False) for node in nodes])
#main part of algorithm
unvisitedQ = [(0, startNode)]
unvisitedQSet = set([startNode])
while len(unvi | sitedQ) != 0:
currentNode = heapq.heappop(unvisitedQ)[1]
unvisitedQSet.remove(currentNode)
for (node, edgeWeight) in neighbours[currentNode]:
if not visited[node]:
if distance[currentNode] + edgeWeight < distance[node]:
distance[node] = distance[currentNode] + edgeWeight
if node not in unvisitedQSet:
heapq.heappush(unvisitedQ, (distance[node], node))
unvisitedQSet.add(node)
visited[currentNode] = True
return distance
def prim(adj_list, start_v):
"""Finds a minimal spanning tree given a graph's adjacency list. The list
should be a dictionary whose keys are vertices. The values should be lists
of 2-tuples. Each 2-tuple should contain a 'to vertex' and a weight.
Returned is a list of edges in the minimal spanning tree, each a 3-tuple
containing the 'to vertex', 'from vertex', and weight.
E.g. from wikipedia:
a = {'A': [('B', 7), ('D', 5)],
'B': [('A', 7), ('C', 8), ('D', 9), ('E', 7)],
'C': [('B', 8), ('E', 5)],
'D': [('A', 5), ('B', 9), ('E', 15), ('F', 6)],
'E': [('B', 7), ('C', 5), ('D', 15), ('F', 8), ('G', 9)],
'F': [('D', 6), ('E', 8), ('G', 11)],
'G': [('E', 9), ('F', 11)]}
for from_v, to_v, weight in prim(a, 'A'):
print from_v, to_v, weight
"""
n = len(adj_list)
v_new = [start_v]
e_new = []
q = [(weight, start_v, to_v) for to_v, weight in adj_list[start_v]]
heapify(q)
while len(v_new) != n:
while True:
weight, from_v, to_v = heappop(q)
if from_v not in v_new or to_v not in v_new:
break
v_new.append(to_v)
e_new.append((from_v, to_v, weight))
for next_to_v, weight in adj_list[to_v]:
if next_to_v not in v_new:
heappush(q, (weight, to_v, next_to_v))
return e_new
|
varlib1/servermall | marathon/test_marathon.py | Python | bsd-3-clause | 2,285 | 0.001751 | # (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
# stdlib
import os
# project
from tests.checks.common import AgentCheckTest, Fixtures
DEPLOYMENT_METRICS_CONFIG = {
'init_config': {
'default_timeout': 5
},
'instances': [
{
'url': 'http://localhost:8080',
'enable_deployment_metrics': True
}
]
}
DEFAULT_CONFIG = {
'init_config': {
'default_timeout': 5
},
'instances': [
{
'url': 'http://localhost:8080'
}
]
}
Q_METRICS = [
'marathon.queue.count',
'marathon.queue.delay',
'marathon.queue.offers.processed',
'marathon.queue.offers.unused',
'marathon.queue.offers.reject.last',
'marathon.queue.offers.reject.launch',
]
class MarathonCheckTest(AgentCheckTest):
CHECK_NAME = 'marathon'
def test_default_configuration(self):
ci_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "ci")
def side_effect(url, timeout, auth, acs_url, verify):
if "v2/apps" in url:
return Fixtures.read_json_file("apps.json", sdk_dir=ci_dir)
elif "v2/deployments" in url:
return Fixtures.read_json_file("deployments.json", sdk_dir=ci_dir)
elif "v2/queue" in url:
return | Fixtures.read_json_file("queue.json", sdk_dir=ci_dir)
else:
raise Exception("unknown url:" + url)
self.run_check(DEFAULT_CONFIG, mocks={"get_json": side_effect})
self.assertMetric('mar | athon.apps', value=1)
self.assertMetric('marathon.deployments', value=1)
for metric in Q_METRICS:
self.assertMetric(metric, at_least=1)
def test_empty_responses(self):
def side_effect(url, timeout, auth, acs_url, verify):
if "v2/apps" in url:
return {"apps": []}
elif "v2/deployments" in url:
return {"deployments": []}
elif "v2/queue" in url:
return {"queue": []}
else:
raise Exception("unknown url:" + url)
self.run_check(DEFAULT_CONFIG, mocks={"get_json": side_effect})
self.assertMetric('marathon.apps', value=0)
|
oppianmatt/django-loki | src/loki/models.py | Python | gpl-3.0 | 9,165 | 0.003273 | # Copyright 2008-20 | 10, Red Hat, Inc
# Dan Radez <dradez@redhat.com>
#
# This software may be freely redistributed under the terms of the GNU
# general public license.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import os
from django.db import m | odels
from django.db.models.signals import post_save
from django.db.models.signals import post_delete
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from loki.settings import *
from loki.bind_administration import bind_administration
from loki.signal_receivers import post_save_bot
from loki.signal_receivers import post_delete_bot
from loki.signal_receivers import post_save_config
from loki.model_helpers import _template
from loki.model_helpers import _generate_class
from loki.model_helpers import build_bot_run
# these are for filters later
# need to try catch them because syncdb
# will fail because it hasn't created the content_type table
# but needs it to ge these types
try:
status_content_type = ContentType.objects.get(
app_label="loki", model="status")
step_content_type = ContentType.objects.get(
app_label="loki", model="step")
scheduler_content_type = ContentType.objects.get(
app_label="loki", model="scheduler")
except:
status_content_type = step_content_type = scheduler_content_type = 0
class Host(models.Model):
hostname = models.CharField(max_length=200, unique=True)
def __unicode__(self):
return self.hostname
class Bot(models.Model):
class Meta(object):
"""
Meta attributes for Package.
"""
abstract = True
def buildbot_run(self, action):
build_bot_run([action, self.path])
def pid(self):
pid = 0
pid_file = os.path.join(self.path, 'twistd.pid')
if os.path.exists(pid_file):
pid_fd = open(pid_file, 'r')
pid = pid_fd.read()
pid_fd.close()
return int(pid)
alive = property(lambda self: self.pid() and \
os.path.exists(os.path.join('/proc', str(self.pid()))))
class Master(Bot):
host = models.ForeignKey(Host, related_name='masters')
name = models.SlugField(max_length=25)
slave_port = models.IntegerField(max_length=5)
web_port = models.IntegerField(max_length=5)
path = property(lambda self: os.path.join(BUILDBOT_MASTERS, self.name))
buildbot_create = property(lambda self: ['create-master', self.path])
def __unicode__(self):
return self.name
def generate_cfg(self):
buildslaves = ''
factories = ''
statuses = ''
schedulers = ''
imports = ''
builders = []
modules = []
ct = 1
for slave in self.slaves.all():
#generate the BuildSlave objects
buildslaves += "\n BuildSlave('%s', '%s')," % \
(slave.name, slave.passwd)
#create buildfactory
b = '%s_%s' % (self.name, slave.name)
b = b.replace('-', '__dash__')
factories += '%s = factory.BuildFactory()\n' % b
for step in slave.steps.all():
if step.type not in modules:
modules.append(step.type)
factories += "%s.addStep(%s)\n" % (b,
_generate_class(step))
#create builder from factory
factories += "b%s = {'name': '%s',\n" % (ct, slave.name)
factories += " 'slavename': '%s',\n" % slave.name
factories += " 'builddir': '%s',\n" % slave.name
factories += " 'factory': %s, }\n\n" % b
# remember the builders
builders.append('b%s' % ct)
ct += 1
#generate status
for status in self.status.all():
statuses += "c['status'].append(%s)" % _generate_class(status)
modules.append(status.type)
#generate schedulers
for scheduler in self.schedulers.all():
schedulers += "c['scheduler'].append(%s)" % _generate_class(scheduler)
modules.append(scheduler.type)
#restructure the imports
for x in modules:
imports += 'from %s import %s\n' % (
'.'.join(x.module.split('.')[:-1]),
x.module.split('.')[-1])
#generate the template
t = _template('%smaster.cfg.tpl' % BUILDBOT_TMPLS,
botname=self.name,
webhost=self.host,
webport=self.web_port,
slaveport=self.slave_port,
buildslaves=buildslaves,
imports=imports,
factories=factories,
builders=','.join(builders),
statuses=statuses,
schedulers=schedulers)
cfg = open(os.path.join(self.path, 'master.cfg'), 'w')
cfg.write(t)
cfg.close()
class Slave(Bot):
host = models.ForeignKey(Host, related_name='slaves')
master = models.ForeignKey(Master, related_name='slaves')
name = models.SlugField(max_length=25)
passwd = models.SlugField(max_length=25)
path = property(lambda self: os.path.join(BUILDBOT_SLAVES, self.name))
buildbot_create = property(lambda self: ['create-slave', self.path,
'%s:%s' % (self.master.host, self.master.slave_port),
self.name, self.passwd])
def __unicode__(self):
return self.name
def generate_cfg(self):
t = _template('%sslave.cfg.tpl' % BUILDBOT_TMPLS,
basedir=os.path.abspath(self.path),
masterhost=self.master.host,
slavename=self.name,
slaveport=self.master.slave_port,
slavepasswd=self.passwd)
cfg = open(os.path.join(self.path, 'buildbot.tac'), 'w')
cfg.write(t)
cfg.close()
class Config(models.Model):
"""
A definition of what configs are available
"""
name = models.CharField(max_length=25)
module = models.CharField(max_length=200, unique=True)
content_type = models.ForeignKey(ContentType)
def __unicode__(self):
return self.name
class ConfigParam(models.Model):
name = models.CharField(max_length=25)
type = models.ForeignKey(Config, related_name='params')
default = models.CharField(max_length=200, blank=True, null=True)
required = models.BooleanField(default=False)
def __unicode__(self):
req = ''
if self.required:
req = ' *'
return '%s :: %s%s' % (self.type, self.name, req)
class Status(models.Model):
master = models.ForeignKey(Master, related_name='status')
type = models.ForeignKey(Config, related_name='status_type',
limit_choices_to={
'content_type': status_content_type})
def __unicode__(self):
return '%s :: %s' % (self.master, self.type)
class StatusParam(models.Model):
status = models.ForeignKey(Status, related_name='params')
type = models.ForeignKey(ConfigParam)
val = models.CharField(max_length=200)
def __unicode__(self):
return '%s :: %s' % (self.status, self.val)
class Step(models.Model):
slave = models.ForeignKey(Slave, related_name='steps')
type = models.ForeignKey(Config, related_name='step_type',
limit_choices_to={
'content_type': step_content_type})
num = models.IntegerField()
class Meta:
ordering = ('num', )
def __unicode__(self):
return '%s :: %s' % (self.slave, self.type)
class StepParam(models.Model):
step = models.ForeignKey(Step, related_name='params')
type = models.ForeignKey(ConfigParam)
val = models.CharField(max_length=200)
def __unicode__(self):
return '%s :: %s' % (self.step, self.val)
class Scheduler(models.Model):
master = models.Foreign |
yershalom/python-jenkins | tests/test_jenkins.py | Python | bsd-3-clause | 75,211 | 0.000186 | import json
import socket
import sys
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
from mock import patch, Mock
import six
from six.moves.urllib.error import HTTPError
from six.moves.urllib.request import build_opener
import jenkins
def get_mock_urlopen_return_value(a_dict=None):
if a_dict is None:
a_dict = {}
return six.BytesIO(json.dumps(a_dict).encode('utf-8'))
class JenkinsTest(unittest.TestCase):
plugin_info_json = {
u"plugins":
[
{
u"active": u'true',
u"backupVersion": u'null',
u"bundled": u'true',
u"deleted": u'false',
u"dependencies": [],
u"downgradable": u'false',
u"enabled": u'true',
u"hasUpdate": u'true',
u"longName": u"Jenkins Mailer Plugin",
u"pinned": u'false',
u"shortName": u"mailer",
u"supportsDynamicLoad": u"MAYBE",
u"url": u"http://wiki.jenkins-ci.org/display/JENKINS/Mailer",
u"version": u"1.5"
}
]
}
def setUp(self):
super(JenkinsTest, self).setUp()
self.opener = build_opener()
def _check_requests(self, requests):
for req in requests:
self._check_request(req[0][0])
def _check_request(self, request):
# taken from opener.open() in request
# attribute request.type is only set automatically for python 3
# requests, must use request.get_type() for python 2.7
protocol = request.type or request.get_type()
# check that building the request doesn't throw any exception
meth_name = protocol + "_request"
for processor in self.opener.process_request.get(protocol, []):
meth = getattr(processor, meth_name)
request = meth(request)
def test_constructor_url_with_trailing_slash(self):
j = jenkins.Jenkins('http://example.com/', 'test', 'test')
self.assertEqual(j.server, 'http://example.com/')
self.assertEqual(j.auth, b'Basic dGVzdDp0ZXN0')
self.assertEqual(j.crumb, None)
def test_constructor_url_without_trailing_slash(self):
j = jenkins.Jenkins('http://example.com', 'test', 'test')
self.assertEqual(j.server, 'http://example.com/')
self.assertEqual(j.auth, b'Basic dGVzdDp0ZXN0')
self.assertEqual(j.crumb, None)
def test_constructor_without_user_or_password(self):
j = jenkins.Jenkins('http://example.com')
self.assertEqual(j.server, 'http://example.com/')
self.assertEqual(j.auth, None)
self.assertEqual(j.crumb, None)
def test_constructor_unicode_password(self):
j = jenkins.Jenkins('http://example.com',
six.u('nonascii'),
six.u('\xe9\u20ac'))
self.assertEqual(j.server, 'http://example.com/')
self.assertEqual(j.auth, b'Basic bm9uYXNjaWk6w6nigqw=')
self.assertEqual(j.crumb, None)
def test_constructor_long_user_or_password(self):
long_str = 'a' * 60
long_str_b64 = 'YWFh' * 20
j = jenkins.Jenkins('http://example.com', long_str, long_str)
self.assertNotIn(b"\n", j.auth)
self.assertEqual(j.auth.decode('utf-8'), 'Basic %s' % (
long_str_b64 + 'Om' + long_str_b64[2:] + 'YQ=='))
def test_constructor_default_timeout(self):
j = jenkins.Jenkins('http://example.com')
self.assertEqual(j.timeout, socket._GLOBAL_DEFAULT_TIMEOUT)
def test_constructor_custom_timeout(self):
j = jenkins.Jenkins('http://example.com', timeout=300)
self.assertEqual(j.timeout, 300)
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_get_job_config_encodes_job_name(self, jenkins_mock):
j = jenkins.Jenkins('http://example.com/', 'test', 'test')
j.get_job_config(u'Test Job')
self.assertEqual(
jenkins_mock.call_args[0][0].get_full_url(),
u'http://example.com/job/Test%20Job/config.xml')
self._check_requests(jenkins_mock.call_args_list)
@patch('jenkins.urlopen')
def test_maybe_add_crumb(self, jenkins_mock):
jenkins_mock.side_effect = jenkins.NotFoundException()
j = jenkins.Jenkins('http://example.com/', 'test', 'test')
request = jenkins.Request('http://example.com/job/TestJob')
j.m | aybe_add_crumb(request)
self.assertEqual(
jenkin | s_mock.call_args[0][0].get_full_url(),
'http://example.com/crumbIssuer/api/json')
self.assertFalse(j.crumb)
self.assertFalse('.crumb' in request.headers)
self._check_requests(jenkins_mock.call_args_list)
@patch('jenkins.urlopen')
def test_maybe_add_crumb__with_data(self, jenkins_mock):
crumb_data = {
"crumb": "dab177f483b3dd93483ef6716d8e792d",
"crumbRequestField": ".crumb",
}
jenkins_mock.return_value = get_mock_urlopen_return_value(crumb_data)
j = jenkins.Jenkins('http://example.com/', 'test', 'test')
request = jenkins.Request('http://example.com/job/TestJob')
j.maybe_add_crumb(request)
self.assertEqual(
jenkins_mock.call_args[0][0].get_full_url(),
'http://example.com/crumbIssuer/api/json')
self.assertEqual(j.crumb, crumb_data)
self.assertEqual(request.headers['.crumb'], crumb_data['crumb'])
self._check_requests(jenkins_mock.call_args_list)
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_maybe_add_crumb__empty_response(self, jenkins_mock):
"Don't try to create crumb header from an empty response"
jenkins_mock.side_effect = jenkins.EmptyResponseException("empty response")
j = jenkins.Jenkins('http://example.com/', 'test', 'test')
request = jenkins.Request('http://example.com/job/TestJob')
j.maybe_add_crumb(request)
self.assertEqual(
jenkins_mock.call_args[0][0].get_full_url(),
'http://example.com/crumbIssuer/api/json')
self.assertFalse(j.crumb)
self.assertFalse('.crumb' in request.headers)
self._check_requests(jenkins_mock.call_args_list)
@patch('jenkins.urlopen')
def test_jenkins_open(self, jenkins_mock):
crumb_data = {
"crumb": "dab177f483b3dd93483ef6716d8e792d",
"crumbRequestField": ".crumb",
}
data = {'foo': 'bar'}
jenkins_mock.side_effect = [
get_mock_urlopen_return_value(crumb_data),
get_mock_urlopen_return_value(data),
]
j = jenkins.Jenkins('http://example.com/', 'test', 'test')
request = jenkins.Request('http://example.com/job/TestJob')
response = j.jenkins_open(request)
self.assertEqual(
jenkins_mock.call_args[0][0].get_full_url(),
'http://example.com/job/TestJob')
self.assertEqual(response, json.dumps(data))
self.assertEqual(j.crumb, crumb_data)
self.assertEqual(request.headers['.crumb'], crumb_data['crumb'])
self._check_requests(jenkins_mock.call_args_list)
@patch('jenkins.urlopen')
def test_jenkins_open__403(self, jenkins_mock):
jenkins_mock.side_effect = jenkins.HTTPError(
'http://example.com/job/TestJob',
code=401,
msg="basic auth failed",
hdrs=[],
fp=None)
j = jenkins.Jenkins('http://example.com/', 'test', 'test')
request = jenkins.Request('http://example.com/job/TestJob')
with self.assertRaises(jenkins.JenkinsException) as context_manager:
j.jenkins_open(request, add_crumb=False)
self.assertEqual(
str(context_manager.exception),
'Error in request. Possibly authentication failed [401]: '
'basic auth failed')
self.assertEqual(
jenkins_mock.call_args[0][0].get_full_url(),
'http://example.com/job/TestJob')
self._check_requests(jenkins_mock.call_args_list)
@patch('jenkins.urlo |
repotvsupertuga/tvsupertuga.repository | script.module.openscrapers/lib/openscrapers/sources_openscrapers/scrapertest-yoda/to_be_fixed/__init__.py | Python | gpl-2.0 | 77 | 0 | # M | akes this into a package, so it's | not loaded by the 'sources()' function.
|
ArcherSys/ArcherSys | Lib/distutils/command/install_headers.py | Python | mit | 4,034 | 0.005949 | <<<<<<< HEAD
<<<<<<< HEAD
"""distutils.command.install_headers
Implements the Distutils 'install_headers' command, to install C/C++ header
files to the Python include directory."""
from distutils.core import Command
# XXX force is never used
class install_headers(Command):
description = "install C/C++ header files"
user_options = [('install-dir=', 'd',
"directory to install header files to"),
('force', 'f',
"force installation (overwrite existing files)"),
]
boolean_options = ['force']
def initialize_options(self):
self.install_dir = None
self.force = 0
self.outfiles = []
def finalize_options(self):
self.set_undefined_options('install',
('install_headers', 'install_dir'),
('force', 'force'))
def run(self):
headers = self.distribution.headers
if not headers:
return
self.mkpath(self.install_dir)
for header in headers:
(out, _) = self.copy_file(header, self.install_dir)
self.outfiles.append(out)
def get_inputs(self):
return self.distribution.headers or []
def get_outputs(self):
return self.outfiles
=======
"""distutils.command.install_headers
Implements the Distutils 'install_headers' command, to install C/C++ header
files to the Python include directory."""
from distutils.core import Command
# XXX force is never used
class install_headers(Command):
description = "install C/C++ header files"
user_options = [('install-dir=', 'd',
"directory to install header files to"),
('force', 'f',
"force installation (overwrite existing files)"),
]
boolean_options = ['force']
def initialize_options(self):
self.install_dir = None
self.force = 0
self.outfiles = []
def finalize_options(self):
self.set_undefined_options('install',
('install_headers', 'install_ | dir'),
('force', 'force'))
def run(self):
headers = self.distribution.headers
if not headers:
return
self.mkpath(self.install_dir)
for header in headers:
(out, _) = self.copy_file(header, self.install_dir)
self.outfiles.append(out)
def get_inputs(self):
return self.distribution.headers or []
def get_outputs(self):
retu | rn self.outfiles
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
"""distutils.command.install_headers
Implements the Distutils 'install_headers' command, to install C/C++ header
files to the Python include directory."""
from distutils.core import Command
# XXX force is never used
class install_headers(Command):
description = "install C/C++ header files"
user_options = [('install-dir=', 'd',
"directory to install header files to"),
('force', 'f',
"force installation (overwrite existing files)"),
]
boolean_options = ['force']
def initialize_options(self):
self.install_dir = None
self.force = 0
self.outfiles = []
def finalize_options(self):
self.set_undefined_options('install',
('install_headers', 'install_dir'),
('force', 'force'))
def run(self):
headers = self.distribution.headers
if not headers:
return
self.mkpath(self.install_dir)
for header in headers:
(out, _) = self.copy_file(header, self.install_dir)
self.outfiles.append(out)
def get_inputs(self):
return self.distribution.headers or []
def get_outputs(self):
return self.outfiles
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
|
Suwmlee/XX-Net | gae_proxy/server/lib/google/appengine/ext/db/polymodel.py | Python | bsd-2-clause | 12,820 | 0.00507 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Support for polymorphic models and queries.
The Model class on its own is only able to support functional polymorphism.
It is possible to create a subclass of Model and then subclass that one as
many generations as necessary and those classes will share all the same
properties and behaviors. The problem is that subclassing Model in this way
places each subclass in their own Kind. This means that it is not possible
to do polymorphic queries. Building a query on a base class will only return
instances of that class from the Datastore, while queries on a subclass will
only return those instances.
This module allows applications to specify class hierarchies that support
polymorphic queries.
"""
from google.appengine.ext import db
_class_map = {}
_CLASS_KEY_PROPERTY = 'class'
class _ClassKeyProperty(db.ListProperty):
"""Property representing class-key property of a polymorphic class.
The class key is a list of strings describing an polymorphic instances
place within its class hierarchy. This property is automatically calculated.
For example:
class Foo(PolyModel): ...
class Bar(Foo): ...
class Baz(Bar): ...
Foo.class_key() == ['Foo']
Bar.class_key() == ['Foo', 'Bar']
Baz.class_key() == ['Foo', 'Bar', 'Baz']
"""
def __init__(self, name):
super(_ClassKeyProperty, self).__init__(name=name,
item_type=str,
default=None)
def __set__(self, *args):
raise db.DerivedPropertyError(
'Class-key is a derived property and cannot be set.')
def __get__(self, model_instance, model_class):
if model_instance is None:
return self
return [cls.__name__ for cls in model_class.__class_hierarchy__]
class PolymorphicClass(db.PropertiedClass):
"""Meta-class for initializing PolymorphicClasses.
This class extends PropertiedClass to add a few static attributes to
new polymorphic classes necessary for their correct functioning.
"""
def __init__(cls, name, bases, dct):
"""Initializes a class that belongs to a polymorphic hierarchy.
This method configures a few built-in attributes of polymorphic
models:
__root_class__: If the new class is a root class, __root_class__ is set to
itself so that it subclasses can quickly know what the root of
their hierarchy is and what kind they are stored in.
__class_hierarchy__: List of classes describing the new model's place
in the class hierarchy in reverse MRO order. The first element is
always the root class while the last element is always the new class.
MRO documentation: http://www.python.org/download/releases/2.3/mro/
For example:
class Foo(PolymorphicClass): ...
class Bar(Foo): ...
class Baz(Bar): ...
Foo.__class_hierarchy__ == [Foo]
Bar.__class_hierarchy__ == [Foo, Bar]
Baz._ | _class | _hierarchy__ == [Foo, Bar, Baz]
Unless the class is a root class or PolyModel itself, it is not
inserted in to the kind-map like other models. However, all polymorphic
classes, are inserted in to the class-map which maps the class-key to
implementation. This class key is consulted using the polymorphic instances
discriminator (the 'class' property of the entity) when loading from the
datastore.
"""
if name == 'PolyModel':
super(PolymorphicClass, cls).__init__(name, bases, dct, map_kind=False)
return
elif PolyModel in bases:
if getattr(cls, '__class_hierarchy__', None):
raise db.ConfigurationError(('%s cannot derive from PolyModel as '
'__class_hierarchy__ is already defined.') % cls.__name__)
cls.__class_hierarchy__ = [cls]
cls.__root_class__ = cls
super(PolymorphicClass, cls).__init__(name, bases, dct)
else:
super(PolymorphicClass, cls).__init__(name, bases, dct, map_kind=False)
cls.__class_hierarchy__ = [c for c in reversed(cls.mro())
if issubclass(c, PolyModel) and c != PolyModel]
if cls.__class_hierarchy__[0] != cls.__root_class__:
raise db.ConfigurationError(
'%s cannot be derived from both root classes %s and %s' %
(cls.__name__,
cls.__class_hierarchy__[0].__name__,
cls.__root_class__.__name__))
_class_map[cls.class_key()] = cls
class PolyModel(db.Model, metaclass=PolymorphicClass):
"""Base-class for models that supports polymorphic queries.
Use this class to build hierarchies that can be queried based
on their types.
Example:
consider the following model hierarchy:
+------+
|Animal|
+------+
|
+-----------------+
| |
+------+ +------+
|Canine| |Feline|
+------+ +------+
| |
+-------+ +-------+
| | | |
+---+ +----+ +---+ +-------+
|Dog| |Wolf| |Cat| |Panther|
+---+ +----+ +---+ +-------+
This class hierarchy has three levels. The first is the "root class".
All models in a single class hierarchy must inherit from this root. All
models in the hierarchy are stored as the same kind as the root class.
For example, Panther entities when stored to the datastore are of the kind
'Animal'. Querying against the Animal kind will retrieve Cats, Dogs and
Canines, for example, that match your query. Different classes stored
in the root class' kind are identified by their class-key. When loaded
from the datastore, it is mapped to the appropriate implementation class.
Polymorphic properties:
Properties that are defined in a given base-class within a hierarchy are
stored in the datastore for all sub-casses only. So, if the Feline class
had a property called 'whiskers', the Cat and Panther enties would also
have whiskers, but not Animal, Canine, Dog or Wolf.
Polymorphic queries:
When written to the datastore, all polymorphic objects automatically have
a property called 'class' that you can query against. Using this property
it is possible to easily write a GQL query against any sub-hierarchy. For
example, to fetch only Canine objects, including all Dogs and Wolves:
db.GqlQuery("SELECT * FROM Animal WHERE class='Canine'")
And alternate method is to use the 'all' or 'gql' methods of the Canine
class:
Canine.all()
Canine.gql('')
The 'class' property is not meant to be used by your code other than
for queries. Since it is supposed to represents the real Python class
it is intended to be hidden from view.
Root class:
The root class is the class from which all other classes of the hierarchy
inherits from. Each hierarchy has a single root class. A class is a
root class if it is an immediate child of PolyModel. The subclasses of
the root class are all the same kind as the root class. In other words:
Animal.kind() == Feline.kind() == Panther.kind() == 'Animal'
"""
_class = _ClassKeyProperty(name=_CLASS_KEY_PROPERTY)
def __new__(*args, **kwds):
"""Prevents direct instantiation of PolyModel.
Allow subclasses to call __new__() with arguments.
Do NOT list 'cls' as the first argument, or in the case when
the 'kwds' dictionary contains the key 'cls', the function
will complain about multiple argument values for 'cls'.
Raises:
Type |
ratoaq2/Flexget | flexget/plugins/input/listdir.py | Python | mit | 1,908 | 0.001048 | """Plugin for filesystem tasks."""
from __future__ import unicode_literals, division, absolute_import
import os
import logging
from path impo | rt Path
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.config_schema import one_or_more
log = logging.getLogger('listdir')
class Listdir(object):
"""
Uses local path content as an input.
| Example::
listdir: /storage/movies/
"""
schema = one_or_more({'type': 'string', 'format': 'path'})
def on_task_input(self, task, config):
# If only a single path is passed turn it into a 1 element list
if isinstance(config, basestring):
config = [config]
entries = []
for folder in config:
folder = Path(folder).expanduser()
try:
dir_files = folder.listdir()
except OSError as e:
log.error('Path %s could not be accessed: %s' % (folder, e.strerror))
continue
for filepath in dir_files:
try:
filepath.exists()
except UnicodeError:
log.error('file %s not decodable with filesystem encoding' % filepath)
continue
e = Entry()
if filepath.isfile():
e['title'] = filepath.namebase
else:
e['title'] = filepath.name
e['location'] = filepath
# Windows paths need an extra / preceded to them
if not filepath.startswith('/'):
filepath = '/' + filepath
e['url'] = 'file://%s' % filepath
e['filename'] = filepath.name
entries.append(e)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Listdir, 'listdir', api_ver=2)
|
amdouglas/OpenPNM | test/unit/Utilities/TransformationsTest.py | Python | mit | 100 | 0 | class TransformationsTest:
| class ArcBall | Test:
def test_set_axes(self):
pass
|
antonygc/liblightbase | liblightbase/lbsearch/search.py | Python | gpl-2.0 | 5,160 | 0.003682 | from liblightbase import lbutils
from liblightbase.lbutils.conv import dict2document
class OrderBy(object):
"""
The ORDER BY keyword sorts the records in ascending order by default.
To sort the records in a descending order, you can use the DESC keyword.
To sort the records in a ascending order, you can use the ASC keyword.
"""
def __init__(self, asc = [], desc = []):
""" OrderBy constructor
"""
# @property asc: list with structure names
self.asc = asc
# @property desc: list with structure names
self.desc = desc
@property
def asc(self):
""" @property asc getter
"""
return self._asc
@asc.setter
def asc(self, value):
""" @property asc setter
"""
msg = 'asc property must be list object.'
assert isinstance(value, list), msg
self._asc = value
@property
def desc(self):
""" @prope | rty asc getter
"""
return self._desc
@desc.setter
| def desc(self, value):
""" @property asc setter
"""
msg = 'desc property must be list object.'
assert isinstance(value, list), msg
self._desc = value
def _asjson(self, **kw):
dict_orderby = {}
for key in dir(self):
if not key[0] == "_":
dict_orderby[key] = getattr(self, key)
return lbutils.object2json(dict_orderby)
def _asdict(self, **kw):
dict_orderby = {}
for key in dir(self):
if not key[0] == "_":
dict_orderby[key] = getattr(self, key)
return dict_orderby
class Search(object):
"""
"""
def __init__(self, select=['*'], order_by=OrderBy(),
literal='', limit=10, offset=0):
"""
"""
# @property select:
self.select = select
# @property order_by:
self.order_by = order_by
# @property literal:
self.literal = literal
# @property limit:
self.limit = limit
# @property offset:
self.offset = offset
def _asjson(self, **kw):
dict_search = {}
for key in dir(self):
if not key[0] == "_":
if isinstance(getattr(self, key), OrderBy):
value = getattr(self, key)._asdict()
else:
value = getattr(self, key)
dict_search[key] = value
return lbutils.object2json(dict_search)
def _asdict(self, **kw):
dict_search = {}
for key in dir(self):
if not key[0] == "_":
if isinstance(getattr(self, key), OrderBy):
value = getattr(self, key)._asdict()
else:
value = getattr(self, key)
dict_search[key] = value
return dict_search
@property
def select(self):
"""@property select getter
"""
return self._select
@select.setter
def select(self, value):
"""@property select setter
"""
msg = 'select property must be list object.'
assert isinstance(value, list), msg
self._select = value
@property
def order_by(self):
"""@property order_by getter
"""
return self._order_by
@order_by.setter
def order_by(self, value):
"""@property order_by setter
"""
msg = "order_by propert mut be a OrderBy instance"
assert isinstance(value, OrderBy), msg
self._order_by = value
@property
def literal(self):
"""@property literal getter
"""
return self._literal
@literal.setter
def literal(self, value):
"""@property literal setter
"""
msg = "literal property must be a string"
assert isinstance(value, str), msg
self._literal = value
@property
def limit(self):
"""@property limit getter
"""
return self._limit
@limit.setter
def limit(self, value):
"""@property limit setter
"""
if not value == None:
msg = "limit property must be a int"
assert isinstance(value, int), msg
self._limit = value
@property
def offset(self):
"""@property offset getter
"""
return self._offset
@offset.setter
def offset(self, value):
"""@property offset setter
"""
msg = "offset property must be a int"
assert isinstance(value, int), msg
self._offset = value
class Results(list):
def __init__(self, base, results):
results_object = [dict2document(base, dictobj) for dictobj in results]
super(Results, self).__init__(results_object)
class Collection(object):
def __init__(self, base, results, result_count, limit, offset):
# @property results:
self.results = Results(base, results)
# @property result_count:
self.result_count = result_count
# @property limit:
self.limit = limit
# @property offset:
self.offset = offset
|
ballouche/navitia | source/jormungandr/jormungandr/interfaces/v1/Ptobjects.py | Python | agpl-3.0 | 5,265 | 0.003039 | # coding=utf-8
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
from flask import Flask, request
from flask.ext.restful import Resource, fields, marshal_with, reqparse, abort
from flask.globals import g
from jormungandr import i_manager, timezone
from jormungandr.interfaces.v1.fields import disruption_marshaller
from jormungandr.interfaces.v1.make_links import add_id_links
from jormungandr.interfaces.v1.fields import NonNullList, NonNullNested, PbField, error, pt_object, feed_publisher
from jormungandr.interfaces.v1.ResourceUri import ResourceUri
from jormungandr.interfaces.argument import ArgumentDoc
from jormungandr.interfaces.parsers import depth_argument, option_value, default_count_arg_type, date_time_format
from copy import deepcopy
import datetime
pt_objects = {
"pt_objects": NonNullList(NonNullNested(pt_object), attribute='places'),
"disruptions": fields.List(NonNullNested(disruption_marshaller), attribute="impacts"),
"error": PbField(error, attribute='error'),
"feed_publishers": fields.List(NonNullNested(feed_publisher))
}
pt_object_type_values = ["network", "commercial_mode", "line", "line_group", "route", "stop_area"]
class Ptobjects(ResourceUri):
def __init__(self, *args, **kwargs):
ResourceUri.__init__(self, *args, **kwargs)
self.parsers = {}
self.parsers["get"] = reqparse.RequestParser(
argument_class=ArgumentDoc)
self.parsers["get"].add_argument("q", type=unicode, required=True,
description="The data to search")
self.parsers["get"].add_argument("type[]", type=option_value(pt_object_type_values),
action="append",default=pt_object_type_values,
description="The type of data to\
search")
self.parsers["get"].add_argume | nt("count", type=default_count_arg_type, default=10,
description="The maximum number of\
ptobjects returned")
self.parsers["get"].add_argument("search_type", type=int, default=0,
d | escription="Type of search:\
firstletter or type error")
self.parsers["get"].add_argument("admin_uri[]", type=unicode,
action="append",
description="If filled, will\
restrained the search within the\
given admin uris")
self.parsers["get"].add_argument("depth", type=depth_argument,
default=1,
description="The depth of objects")
self.parsers["get"].add_argument("_current_datetime", type=date_time_format, default=datetime.datetime.utcnow(),
description="The datetime used to consider the state of the pt object"
" Default is the current date and it is used for debug."
" Note: it will mainly change the disruptions that concern "
"the object The timezone should be specified in the format,"
" else we consider it as UTC")
@marshal_with(pt_objects)
def get(self, region=None, lon=None, lat=None):
self.region = i_manager.get_region(region, lon, lat)
timezone.set_request_timezone(self.region)
args = self.parsers["get"].parse_args()
self._register_interpreted_parameters(args)
if len(args['q']) == 0:
abort(400, message="Search word absent")
response = i_manager.dispatch(args, "pt_objects",
instance_name=self.region)
return response, 200
|
giliam/turbo-songwriter | backend/songwriter/tests.py | Python | mit | 75 | 0 | # coding:utf-8
from dja | ngo.test import TestCase
# Create your te | sts here.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.